*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
*/
#ifdef HAVE_CONFIG_H
#endif
#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/video/video.h>
#include <string.h>
#include <stdlib.h>
#include "gstrtpvrawdepay.h"
+#include "gstrtputils.h"
GST_DEBUG_CATEGORY_STATIC (rtpvrawdepay_debug);
#define GST_CAT_DEFAULT (rtpvrawdepay_debug)
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("application/x-rtp, "
"media = (string) \"video\", "
- "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", "
- "clock-rate = (int) 90000, " "encoding-name = (string) \"RAW\"")
+ "clock-rate = (int) 90000, "
+ "encoding-name = (string) \"RAW\", "
+ "sampling = (string) { \"RGB\", \"RGBA\", \"BGR\", \"BGRA\", "
+ "\"YCbCr-4:4:4\", \"YCbCr-4:2:2\", \"YCbCr-4:2:0\", "
+ "\"YCbCr-4:1:1\" },"
+ /* we cannot express these as strings
+ * "width = (string) [1 32767],"
+ * "height = (string) [1 32767],"
+ */
+ "depth = (string) { \"8\", \"10\", \"12\", \"16\" }")
);
#define gst_rtp_vraw_depay_parent_class parent_class
G_DEFINE_TYPE (GstRtpVRawDepay, gst_rtp_vraw_depay,
- GST_TYPE_BASE_RTP_DEPAYLOAD);
+ GST_TYPE_RTP_BASE_DEPAYLOAD);
-static gboolean gst_rtp_vraw_depay_setcaps (GstBaseRTPDepayload * depayload,
+static gboolean gst_rtp_vraw_depay_setcaps (GstRTPBaseDepayload * depayload,
GstCaps * caps);
-static GstBuffer *gst_rtp_vraw_depay_process (GstBaseRTPDepayload * depayload,
- GstBuffer * buf);
+static GstBuffer *gst_rtp_vraw_depay_process_packet (GstRTPBaseDepayload *
+ depay, GstRTPBuffer * rtp);
static GstStateChangeReturn gst_rtp_vraw_depay_change_state (GstElement *
element, GstStateChange transition);
-static gboolean gst_rtp_vraw_depay_handle_event (GstBaseRTPDepayload * filter,
+static gboolean gst_rtp_vraw_depay_handle_event (GstRTPBaseDepayload * filter,
GstEvent * event);
static void
gst_rtp_vraw_depay_class_init (GstRtpVRawDepayClass * klass)
{
GstElementClass *gstelement_class;
- GstBaseRTPDepayloadClass *gstbasertpdepayload_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
gstelement_class = (GstElementClass *) klass;
- gstbasertpdepayload_class = (GstBaseRTPDepayloadClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
gstelement_class->change_state = gst_rtp_vraw_depay_change_state;
- gstbasertpdepayload_class->set_caps = gst_rtp_vraw_depay_setcaps;
- gstbasertpdepayload_class->process = gst_rtp_vraw_depay_process;
- gstbasertpdepayload_class->handle_event = gst_rtp_vraw_depay_handle_event;
+ gstrtpbasedepayload_class->set_caps = gst_rtp_vraw_depay_setcaps;
+ gstrtpbasedepayload_class->process_rtp_packet =
+ gst_rtp_vraw_depay_process_packet;
+ gstrtpbasedepayload_class->handle_event = gst_rtp_vraw_depay_handle_event;
- gst_element_class_add_pad_template (gstelement_class,
- gst_static_pad_template_get (&gst_rtp_vraw_depay_src_template));
- gst_element_class_add_pad_template (gstelement_class,
- gst_static_pad_template_get (&gst_rtp_vraw_depay_sink_template));
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_vraw_depay_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_vraw_depay_sink_template);
- gst_element_class_set_details_simple (gstelement_class,
+ gst_element_class_set_static_metadata (gstelement_class,
"RTP Raw Video depayloader", "Codec/Depayloader/Network/RTP",
"Extracts raw video from RTP packets (RFC 4175)",
"Wim Taymans <wim.taymans@gmail.com>");
static void
gst_rtp_vraw_depay_init (GstRtpVRawDepay * rtpvrawdepay)
{
- /* needed because of GST_BOILERPLATE */
}
static void
-gst_rtp_vraw_depay_reset (GstRtpVRawDepay * rtpvrawdepay)
+gst_rtp_vraw_depay_reset (GstRtpVRawDepay * rtpvrawdepay, gboolean full)
{
if (rtpvrawdepay->outbuf) {
+ gst_video_frame_unmap (&rtpvrawdepay->frame);
gst_buffer_unref (rtpvrawdepay->outbuf);
rtpvrawdepay->outbuf = NULL;
}
rtpvrawdepay->timestamp = -1;
- if (rtpvrawdepay->pool) {
+
+ if (full && rtpvrawdepay->pool) {
gst_buffer_pool_set_active (rtpvrawdepay->pool, FALSE);
gst_object_unref (rtpvrawdepay->pool);
rtpvrawdepay->pool = NULL;
{
GstQuery *query;
GstBufferPool *pool = NULL;
- guint size, min, max, prefix, alignment;
+ guint size, min, max;
GstStructure *config;
/* find a pool for the negotiated caps now */
query = gst_query_new_allocation (caps, TRUE);
- if (gst_pad_peer_query (GST_BASE_RTP_DEPAYLOAD_SRCPAD (depay), query)) {
- GST_DEBUG_OBJECT (depay, "got downstream ALLOCATION hints");
+ if (!gst_pad_peer_query (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depay), query)) {
+ /* not a problem, we use the defaults of query */
+ GST_DEBUG_OBJECT (depay, "could not get downstream ALLOCATION hints");
+ }
+
+ if (gst_query_get_n_allocation_pools (query) > 0) {
/* we got configuration from our peer, parse them */
- gst_query_parse_allocation_params (query, &size, &min, &max, &prefix,
- &alignment, &pool);
+ gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max);
} else {
- GST_DEBUG_OBJECT (depay, "didn't get downstream ALLOCATION hints");
+ GST_DEBUG_OBJECT (depay, "didn't get downstream pool hints");
size = info->size;
min = max = 0;
- prefix = 0;
- alignment = 0;
}
if (pool == NULL) {
/* we did not get a pool, make one ourselves then */
- pool = gst_buffer_pool_new ();
+ pool = gst_video_buffer_pool_new ();
}
if (depay->pool)
depay->pool = pool;
config = gst_buffer_pool_get_config (pool);
- gst_buffer_pool_config_set (config, caps, size, min, max, prefix, alignment);
- /* just set the metadata, if the pool can support it we will transparently use
- * it through the video info API. We could also see if the pool support this
- * metadata and only activate it then. */
- gst_buffer_pool_config_add_option (config, GST_BUFFER_POOL_OPTION_VIDEO_META);
+ gst_buffer_pool_config_set_params (config, caps, size, min, max);
+ if (gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL)) {
+ /* just set the metadata, if the pool can support it we will transparently use
+ * it through the video info API. We could also see if the pool support this
+ * metadata and only activate it then. */
+ gst_buffer_pool_config_add_option (config,
+ GST_BUFFER_POOL_OPTION_VIDEO_META);
+ }
gst_buffer_pool_set_config (pool, config);
/* and activate */
}
static gboolean
-gst_rtp_vraw_depay_setcaps (GstBaseRTPDepayload * depayload, GstCaps * caps)
+gst_rtp_vraw_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
{
GstStructure *structure;
GstRtpVRawDepay *rtpvrawdepay;
gint clock_rate;
const gchar *str;
- gint format, width, height, pgroup, xinc, yinc;
+ gint format, width, height, depth, pgroup, xinc, yinc;
GstCaps *srccaps;
gboolean res;
GstFlowReturn ret;
goto no_height;
height = atoi (str);
+ if (!(str = gst_structure_get_string (structure, "depth")))
+ goto no_depth;
+ depth = atoi (str);
+
/* optional interlace value but we don't handle interlaced
* formats yet */
if (gst_structure_get_string (structure, "interlace"))
format = GST_VIDEO_FORMAT_AYUV;
pgroup = 3;
} else if (!strcmp (str, "YCbCr-4:2:2")) {
- format = GST_VIDEO_FORMAT_UYVY;
- pgroup = 4;
+ if (depth == 8) {
+ format = GST_VIDEO_FORMAT_UYVY;
+ pgroup = 4;
+ } else if (depth == 10) {
+ format = GST_VIDEO_FORMAT_UYVP;
+ pgroup = 5;
+ } else
+ goto unknown_format;
xinc = 2;
} else if (!strcmp (str, "YCbCr-4:2:0")) {
format = GST_VIDEO_FORMAT_I420;
format = GST_VIDEO_FORMAT_Y41B;
pgroup = 6;
xinc = 4;
- } else
+ } else {
goto unknown_format;
+ }
gst_video_info_init (&rtpvrawdepay->vinfo);
gst_video_info_set_format (&rtpvrawdepay->vinfo, format, width, height);
rtpvrawdepay->yinc = yinc;
srccaps = gst_video_info_to_caps (&rtpvrawdepay->vinfo);
- res = gst_pad_set_caps (GST_BASE_RTP_DEPAYLOAD_SRCPAD (depayload), srccaps);
+ res = gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload), srccaps);
gst_caps_unref (srccaps);
GST_DEBUG_OBJECT (depayload, "width %d, height %d, format %d", width, height,
xinc, yinc, pgroup);
/* negotiate a bufferpool */
- if ((ret = gst_rtp_vraw_depay_negotiate_pool (rtpvrawdepay, caps,
+ if ((ret = gst_rtp_vraw_depay_negotiate_pool (rtpvrawdepay, srccaps,
&rtpvrawdepay->vinfo)) != GST_FLOW_OK)
goto no_bufferpool;
GST_ERROR_OBJECT (depayload, "no height specified");
return FALSE;
}
+no_depth:
+ {
+ GST_ERROR_OBJECT (depayload, "no depth specified");
+ return FALSE;
+ }
interlaced:
{
GST_ERROR_OBJECT (depayload, "interlaced formats not supported yet");
}
static GstBuffer *
-gst_rtp_vraw_depay_process (GstBaseRTPDepayload * depayload, GstBuffer * buf)
+gst_rtp_vraw_depay_process_packet (GstRTPBaseDepayload * depayload,
+ GstRTPBuffer * rtp)
{
GstRtpVRawDepay *rtpvrawdepay;
- guint8 *payload, *yp, *up, *vp, *headers;
+ guint8 *payload, *p0, *yp, *up, *vp, *headers;
guint32 timestamp;
guint cont, ystride, uvstride, pgroup, payload_len;
gint width, height, xinc, yinc;
- GstRTPBuffer rtp;
- GstVideoFrame frame;
+ GstVideoFrame *frame;
+ gboolean marker;
+ GstBuffer *outbuf = NULL;
rtpvrawdepay = GST_RTP_VRAW_DEPAY (depayload);
- gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
-
- timestamp = gst_rtp_buffer_get_timestamp (&rtp);
+ timestamp = gst_rtp_buffer_get_timestamp (rtp);
if (timestamp != rtpvrawdepay->timestamp || rtpvrawdepay->outbuf == NULL) {
- GstBuffer *outbuf;
+ GstBuffer *new_buffer;
GstFlowReturn ret;
GST_LOG_OBJECT (depayload, "new frame with timestamp %u", timestamp);
/* new timestamp, flush old buffer and create new output buffer */
if (rtpvrawdepay->outbuf) {
- gst_base_rtp_depayload_push_ts (depayload, rtpvrawdepay->timestamp,
- rtpvrawdepay->outbuf);
+ gst_video_frame_unmap (&rtpvrawdepay->frame);
+ gst_rtp_base_depayload_push (depayload, rtpvrawdepay->outbuf);
rtpvrawdepay->outbuf = NULL;
}
- if (gst_pad_check_reconfigure (GST_BASE_RTP_DEPAYLOAD_SRCPAD (depayload))) {
+ if (gst_pad_check_reconfigure (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload))) {
GstCaps *caps;
caps =
- gst_pad_get_current_caps (GST_BASE_RTP_DEPAYLOAD_SRCPAD (depayload));
+ gst_pad_get_current_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload));
gst_rtp_vraw_depay_negotiate_pool (rtpvrawdepay, caps,
&rtpvrawdepay->vinfo);
gst_caps_unref (caps);
}
- ret = gst_buffer_pool_acquire_buffer (rtpvrawdepay->pool, &outbuf, NULL);
+ ret =
+ gst_buffer_pool_acquire_buffer (rtpvrawdepay->pool, &new_buffer, NULL);
+
if (G_UNLIKELY (ret != GST_FLOW_OK))
goto alloc_failed;
/* clear timestamp from alloc... */
- GST_BUFFER_TIMESTAMP (outbuf) = -1;
+ GST_BUFFER_PTS (new_buffer) = -1;
- rtpvrawdepay->outbuf = outbuf;
+ if (!gst_video_frame_map (&rtpvrawdepay->frame, &rtpvrawdepay->vinfo,
+ new_buffer, GST_MAP_WRITE | GST_VIDEO_FRAME_MAP_FLAG_NO_REF)) {
+ gst_buffer_unref (new_buffer);
+ goto invalid_frame;
+ }
+
+ rtpvrawdepay->outbuf = new_buffer;
rtpvrawdepay->timestamp = timestamp;
}
- if (!gst_video_frame_map (&frame, &rtpvrawdepay->vinfo, rtpvrawdepay->outbuf,
- GST_MAP_WRITE))
- goto invalid_frame;
+ frame = &rtpvrawdepay->frame;
+
+ g_assert (frame->buffer != NULL);
/* get pointer and strides of the planes */
- yp = GST_VIDEO_FRAME_COMP_DATA (&frame, 0);
- up = GST_VIDEO_FRAME_COMP_DATA (&frame, 1);
- vp = GST_VIDEO_FRAME_COMP_DATA (&frame, 2);
+ p0 = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
+ yp = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
+ up = GST_VIDEO_FRAME_COMP_DATA (frame, 1);
+ vp = GST_VIDEO_FRAME_COMP_DATA (frame, 2);
- ystride = GST_VIDEO_FRAME_COMP_STRIDE (&frame, 0);
- uvstride = GST_VIDEO_FRAME_COMP_STRIDE (&frame, 1);
+ ystride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
+ uvstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 1);
pgroup = rtpvrawdepay->pgroup;
width = GST_VIDEO_INFO_WIDTH (&rtpvrawdepay->vinfo);
xinc = rtpvrawdepay->xinc;
yinc = rtpvrawdepay->yinc;
- payload = gst_rtp_buffer_get_payload (&rtp);
- payload_len = gst_rtp_buffer_get_payload_len (&rtp);
+ payload = gst_rtp_buffer_get_payload (rtp);
+ payload_len = gst_rtp_buffer_get_payload_len (rtp);
if (payload_len < 3)
goto short_packet;
/* remember header position */
headers = payload;
+ gst_rtp_copy_video_meta (rtpvrawdepay, frame->buffer, rtp->buffer);
+
/* find data start */
do {
if (payload_len < 6)
goto next;
}
- /* calculate the maximim amount of bytes we can use per line */
+ /* calculate the maximum amount of bytes we can use per line */
if (offs + ((length / pgroup) * xinc) > width) {
plen = ((width - offs) * pgroup) / xinc;
GST_WARNING_OBJECT (depayload, "clipping length %d, offset %d, plen %d",
case GST_VIDEO_FORMAT_BGR:
case GST_VIDEO_FORMAT_BGRA:
case GST_VIDEO_FORMAT_UYVY:
+ case GST_VIDEO_FORMAT_UYVP:
/* samples are packed just like gstreamer packs them */
offs /= xinc;
- datap = yp + (line * ystride) + (offs * pgroup);
+ datap = p0 + (line * ystride) + (offs * pgroup);
memcpy (datap, payload, plen);
break;
gint i;
guint8 *p;
- datap = yp + (line * ystride) + (offs * 4);
+ datap = p0 + (line * ystride) + (offs * 4);
p = payload;
/* samples are packed in order Cb-Y-Cr for both interlaced and
payload_len -= length;
}
- gst_video_frame_unmap (&frame);
- gst_rtp_buffer_unmap (&rtp);
+ marker = gst_rtp_buffer_get_marker (rtp);
- if (gst_rtp_buffer_get_marker (&rtp)) {
+ if (marker) {
GST_LOG_OBJECT (depayload, "marker, flushing frame");
- if (rtpvrawdepay->outbuf) {
- gst_base_rtp_depayload_push_ts (depayload, timestamp,
- rtpvrawdepay->outbuf);
- rtpvrawdepay->outbuf = NULL;
- }
+ gst_video_frame_unmap (&rtpvrawdepay->frame);
+ outbuf = rtpvrawdepay->outbuf;
+ rtpvrawdepay->outbuf = NULL;
rtpvrawdepay->timestamp = -1;
}
- return NULL;
+ return outbuf;
/* ERRORS */
unknown_sampling:
{
GST_ELEMENT_ERROR (depayload, STREAM, FORMAT,
(NULL), ("unimplemented sampling"));
- gst_video_frame_unmap (&frame);
- gst_rtp_buffer_unmap (&rtp);
return NULL;
}
alloc_failed:
{
GST_WARNING_OBJECT (depayload, "failed to alloc output buffer");
- gst_rtp_buffer_unmap (&rtp);
return NULL;
}
invalid_frame:
{
GST_ERROR_OBJECT (depayload, "could not map video frame");
- gst_rtp_buffer_unmap (&rtp);
return NULL;
}
wrong_length:
{
GST_WARNING_OBJECT (depayload, "length not multiple of pgroup");
- gst_video_frame_unmap (&frame);
- gst_rtp_buffer_unmap (&rtp);
return NULL;
}
short_packet:
{
GST_WARNING_OBJECT (depayload, "short packet");
- gst_video_frame_unmap (&frame);
- gst_rtp_buffer_unmap (&rtp);
return NULL;
}
}
static gboolean
-gst_rtp_vraw_depay_handle_event (GstBaseRTPDepayload * filter, GstEvent * event)
+gst_rtp_vraw_depay_handle_event (GstRTPBaseDepayload * filter, GstEvent * event)
{
gboolean ret;
GstRtpVRawDepay *rtpvrawdepay;
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_FLUSH_STOP:
- gst_rtp_vraw_depay_reset (rtpvrawdepay);
+ gst_rtp_vraw_depay_reset (rtpvrawdepay, FALSE);
break;
default:
break;
}
ret =
- GST_BASE_RTP_DEPAYLOAD_CLASS (parent_class)->handle_event (filter, event);
+ GST_RTP_BASE_DEPAYLOAD_CLASS (parent_class)->handle_event (filter, event);
return ret;
}
case GST_STATE_CHANGE_NULL_TO_READY:
break;
case GST_STATE_CHANGE_READY_TO_PAUSED:
- gst_rtp_vraw_depay_reset (rtpvrawdepay);
+ gst_rtp_vraw_depay_reset (rtpvrawdepay, TRUE);
break;
default:
break;
switch (transition) {
case GST_STATE_CHANGE_PAUSED_TO_READY:
- gst_rtp_vraw_depay_reset (rtpvrawdepay);
+ gst_rtp_vraw_depay_reset (rtpvrawdepay, TRUE);
break;
case GST_STATE_CHANGE_READY_TO_NULL:
break;