X-Git-Url: http://review.tizen.org/git/?a=blobdiff_plain;f=gst%2Frtp%2Fgstrtph263ppay.c;h=69ae80fcff0e0e7dc043d41805aaaf3598c90abd;hb=ff31090671a18900e5266d3d4a8a6995f0b4e208;hp=fcc4d556ad8d0fe725331af281fa20b6226642ac;hpb=f1533c55045bffbb9716ddd5df348ceff7213fbd;p=platform%2Fupstream%2Fgst-plugins-good.git diff --git a/gst/rtp/gstrtph263ppay.c b/gst/rtp/gstrtph263ppay.c index fcc4d55..69ae80f 100644 --- a/gst/rtp/gstrtph263ppay.c +++ b/gst/rtp/gstrtph263ppay.c @@ -1,5 +1,5 @@ /* GStreamer - * Copyright (C) <2005> Wim Taymans + * Copyright (C) <2005> Wim Taymans * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Library General Public @@ -22,74 +22,94 @@ #endif #include +#include #include #include "gstrtph263ppay.h" -/* elementfactory information */ -static const GstElementDetails gst_rtp_h263ppay_details = -GST_ELEMENT_DETAILS ("RTP packet parser", - "Codec/Payloader/Network", - "Payload-encodes H263+ video in RTP packets (RFC 2429)", - "Wim Taymans "); +#define DEFAULT_FRAGMENTATION_MODE GST_FRAGMENTATION_MODE_NORMAL + +enum +{ + PROP_0, + PROP_FRAGMENTATION_MODE +}; + +#define GST_TYPE_FRAGMENTATION_MODE (gst_fragmentation_mode_get_type()) +static GType +gst_fragmentation_mode_get_type (void) +{ + static GType fragmentation_mode_type = 0; + static const GEnumValue fragmentation_mode[] = { + {GST_FRAGMENTATION_MODE_NORMAL, "Normal", "normal"}, + {GST_FRAGMENTATION_MODE_SYNC, "Fragment at sync points", "sync"}, + {0, NULL, NULL}, + }; + + if (!fragmentation_mode_type) { + fragmentation_mode_type = + g_enum_register_static ("GstFragmentationMode", fragmentation_mode); + } + return fragmentation_mode_type; +} + + +GST_DEBUG_CATEGORY_STATIC (rtph263ppay_debug); +#define GST_CAT_DEFAULT rtph263ppay_debug static GstStaticPadTemplate gst_rtp_h263p_pay_sink_template = GST_STATIC_PAD_TEMPLATE ("sink", GST_PAD_SINK, GST_PAD_ALWAYS, - GST_STATIC_CAPS ("video/x-h263") + GST_STATIC_CAPS ("video/x-h263, " "variant = (string) \"itu\"") ); +/* + * We also set all of those as required: + * + * "annex-f = (boolean) {true, false}," + * "annex-i = (boolean) {true, false}," + * "annex-j = (boolean) {true, false}," + * "annex-l = (boolean) {true, false}," + * "annex-t = (boolean) {true, false}," + * "annex-v = (boolean) {true, false}") + */ + + static GstStaticPadTemplate gst_rtp_h263p_pay_src_template = -GST_STATIC_PAD_TEMPLATE ("src", + GST_STATIC_PAD_TEMPLATE ("src", GST_PAD_SRC, GST_PAD_ALWAYS, GST_STATIC_CAPS ("application/x-rtp, " "media = (string) \"video\", " - "payload = (int) [ 96, 127 ], " - "clock-rate = (int) 90000, " "encoding-name = (string) \"H263-1998\"") + "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", " + "clock-rate = (int) 90000, " "encoding-name = (string) \"H263-1998\"; " + "application/x-rtp, " + "media = (string) \"video\", " + "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", " + "clock-rate = (int) 90000, " "encoding-name = (string) \"H263-2000\"") ); -static void gst_rtp_h263p_pay_class_init (GstRtpH263PPayClass * klass); -static void gst_rtp_h263p_pay_base_init (GstRtpH263PPayClass * klass); -static void gst_rtp_h263p_pay_init (GstRtpH263PPay * rtph263ppay); static void gst_rtp_h263p_pay_finalize (GObject * object); +static void gst_rtp_h263p_pay_set_property (GObject * object, guint prop_id, + const GValue * value, GParamSpec * pspec); +static void gst_rtp_h263p_pay_get_property (GObject * object, guint prop_id, + GValue * value, GParamSpec * pspec); + static gboolean gst_rtp_h263p_pay_setcaps (GstBaseRTPPayload * payload, GstCaps * caps); +static GstCaps *gst_rtp_h263p_pay_sink_getcaps (GstBaseRTPPayload * payload, + GstPad * pad); static GstFlowReturn gst_rtp_h263p_pay_handle_buffer (GstBaseRTPPayload * payload, GstBuffer * buffer); -static GstBaseRTPPayloadClass *parent_class = NULL; - -static GType -gst_rtp_h263p_pay_get_type (void) -{ - static GType rtph263ppay_type = 0; - - if (!rtph263ppay_type) { - static const GTypeInfo rtph263ppay_info = { - sizeof (GstRtpH263PPayClass), - (GBaseInitFunc) gst_rtp_h263p_pay_base_init, - NULL, - (GClassInitFunc) gst_rtp_h263p_pay_class_init, - NULL, - NULL, - sizeof (GstRtpH263PPay), - 0, - (GInstanceInitFunc) gst_rtp_h263p_pay_init, - }; - - rtph263ppay_type = - g_type_register_static (GST_TYPE_BASE_RTP_PAYLOAD, "GstRtpH263PPay", - &rtph263ppay_info, 0); - } - return rtph263ppay_type; -} +GST_BOILERPLATE (GstRtpH263PPay, gst_rtp_h263p_pay, GstBaseRTPPayload, + GST_TYPE_BASE_RTP_PAYLOAD); static void -gst_rtp_h263p_pay_base_init (GstRtpH263PPayClass * klass) +gst_rtp_h263p_pay_base_init (gpointer klass) { GstElementClass *element_class = GST_ELEMENT_CLASS (klass); @@ -98,32 +118,47 @@ gst_rtp_h263p_pay_base_init (GstRtpH263PPayClass * klass) gst_element_class_add_pad_template (element_class, gst_static_pad_template_get (&gst_rtp_h263p_pay_sink_template)); - gst_element_class_set_details (element_class, &gst_rtp_h263ppay_details); + gst_element_class_set_details_simple (element_class, "RTP H263 payloader", + "Codec/Payloader/Network/RTP", + "Payload-encodes H263/+/++ video in RTP packets (RFC 4629)", + "Wim Taymans "); } static void gst_rtp_h263p_pay_class_init (GstRtpH263PPayClass * klass) { GObjectClass *gobject_class; - GstElementClass *gstelement_class; GstBaseRTPPayloadClass *gstbasertppayload_class; gobject_class = (GObjectClass *) klass; - gstelement_class = (GstElementClass *) klass; gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass; - parent_class = g_type_class_peek_parent (klass); - gobject_class->finalize = gst_rtp_h263p_pay_finalize; + gobject_class->set_property = gst_rtp_h263p_pay_set_property; + gobject_class->get_property = gst_rtp_h263p_pay_get_property; gstbasertppayload_class->set_caps = gst_rtp_h263p_pay_setcaps; + gstbasertppayload_class->get_caps = gst_rtp_h263p_pay_sink_getcaps; gstbasertppayload_class->handle_buffer = gst_rtp_h263p_pay_handle_buffer; + + g_object_class_install_property (G_OBJECT_CLASS (klass), + PROP_FRAGMENTATION_MODE, g_param_spec_enum ("fragmentation-mode", + "Fragmentation Mode", + "Packet Fragmentation Mode", GST_TYPE_FRAGMENTATION_MODE, + DEFAULT_FRAGMENTATION_MODE, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + + GST_DEBUG_CATEGORY_INIT (rtph263ppay_debug, "rtph263ppay", + 0, "rtph263ppay (RFC 4629)"); } static void -gst_rtp_h263p_pay_init (GstRtpH263PPay * rtph263ppay) +gst_rtp_h263p_pay_init (GstRtpH263PPay * rtph263ppay, + GstRtpH263PPayClass * klass) { rtph263ppay->adapter = gst_adapter_new (); + + rtph263ppay->fragmentation_mode = DEFAULT_FRAGMENTATION_MODE; } static void @@ -142,12 +177,298 @@ gst_rtp_h263p_pay_finalize (GObject * object) static gboolean gst_rtp_h263p_pay_setcaps (GstBaseRTPPayload * payload, GstCaps * caps) { - gst_basertppayload_set_options (payload, "video", TRUE, "H263-1998", 90000); - gst_basertppayload_set_outcaps (payload, NULL); + gboolean res; + GstCaps *peercaps; + gchar *encoding_name = NULL; + + g_return_val_if_fail (gst_caps_is_fixed (caps), FALSE); + + peercaps = gst_pad_peer_get_caps (GST_BASE_RTP_PAYLOAD_SRCPAD (payload)); + if (peercaps) { + GstCaps *intersect = gst_caps_intersect (peercaps, + gst_pad_get_pad_template_caps (GST_BASE_RTP_PAYLOAD_SRCPAD (payload))); + + gst_caps_unref (peercaps); + if (!gst_caps_is_empty (intersect)) { + GstStructure *s = gst_caps_get_structure (intersect, 0); + encoding_name = g_strdup (gst_structure_get_string (s, "encoding-name")); + } + gst_caps_unref (intersect); + } + + if (!encoding_name) + encoding_name = g_strdup ("H263-1998"); + + gst_basertppayload_set_options (payload, "video", TRUE, + (gchar *) encoding_name, 90000); + res = gst_basertppayload_set_outcaps (payload, NULL); + g_free (encoding_name); - return TRUE; + return res; } +static GstCaps * +gst_rtp_h263p_pay_sink_getcaps (GstBaseRTPPayload * payload, GstPad * pad) +{ + GstRtpH263PPay *rtph263ppay; + GstCaps *caps = gst_caps_new_empty (); + GstCaps *peercaps = NULL; + GstCaps *intersect = NULL; + guint i; + + rtph263ppay = GST_RTP_H263P_PAY (payload); + + peercaps = gst_pad_peer_get_caps (GST_BASE_RTP_PAYLOAD_SRCPAD (payload)); + if (!peercaps) + return + gst_caps_copy (gst_pad_get_pad_template_caps + (GST_BASE_RTP_PAYLOAD_SRCPAD (payload))); + + intersect = gst_caps_intersect (peercaps, + gst_pad_get_pad_template_caps (GST_BASE_RTP_PAYLOAD_SRCPAD (payload))); + gst_caps_unref (peercaps); + + if (gst_caps_is_empty (intersect)) + return intersect; + + for (i = 0; i < gst_caps_get_size (intersect); i++) { + GstStructure *s = gst_caps_get_structure (intersect, i); + const gchar *encoding_name = gst_structure_get_string (s, "encoding-name"); + + if (!strcmp (encoding_name, "H263-2000")) { + const gchar *profile_str = gst_structure_get_string (s, "profile"); + const gchar *level_str = gst_structure_get_string (s, "level"); + int profile = 0; + int level = 0; + + if (profile_str && level_str) { + gboolean i = FALSE, j = FALSE, l = FALSE, t = FALSE, f = FALSE, + v = FALSE; + GstStructure *new_s = gst_structure_new ("video/x-h263", + "variant", G_TYPE_STRING, "itu", + NULL); + + profile = atoi (profile_str); + level = atoi (level_str); + + /* These profiles are defined in the H.263 Annex X */ + switch (profile) { + case 0: + /* The Baseline Profile (Profile 0) */ + break; + case 1: + /* H.320 Coding Efficiency Version 2 Backward-Compatibility Profile + * (Profile 1) + * Baseline + Annexes I, J, L.4 and T + */ + i = j = l = t = TRUE; + break; + case 2: + /* Version 1 Backward-Compatibility Profile (Profile 2) + * Baseline + Annex F + */ + i = j = l = t = f = TRUE; + break; + case 3: + /* Version 2 Interactive and Streaming Wireless Profile + * Baseline + Annexes I, J, T + */ + i = j = t = TRUE; + break; + case 4: + /* Version 3 Interactive and Streaming Wireless Profile (Profile 4) + * Baseline + Annexes I, J, T, V, W.6.3.8, + */ + /* Missing W.6.3.8 */ + i = j = t = v = TRUE; + break; + case 5: + /* Conversational High Compression Profile (Profile 5) + * Baseline + Annexes F, I, J, L.4, T, D, U + */ + /* Missing D, U */ + f = i = j = l = t = TRUE; + break; + case 6: + /* Conversational Internet Profile (Profile 6) + * Baseline + Annexes F, I, J, L.4, T, D, U and + * K with arbitratry slice ordering + */ + /* Missing D, U, K with arbitratry slice ordering */ + f = i = j = l = t = TRUE; + break; + case 7: + /* Conversational Interlace Profile (Profile 7) + * Baseline + Annexes F, I, J, L.4, T, D, U, W.6.3.11 + */ + /* Missing D, U, W.6.3.11 */ + f = i = j = l = t = TRUE; + break; + case 8: + /* High Latency Profile (Profile 8) + * Baseline + Annexes F, I, J, L.4, T, D, U, P.5, O.1.1 and + * K with arbitratry slice ordering + */ + /* Missing D, U, P.5, O.1.1 */ + f = i = j = l = t = TRUE; + break; + } + + + if (f || i || j || t || l || v) { + GValue list = { 0 }; + GValue vstr = { 0 }; + + g_value_init (&list, GST_TYPE_LIST); + g_value_init (&vstr, G_TYPE_STRING); + + g_value_set_static_string (&vstr, "h263"); + gst_value_list_append_value (&list, &vstr); + g_value_set_static_string (&vstr, "h263p"); + gst_value_list_append_value (&list, &vstr); + + if (l || v) { + g_value_set_static_string (&vstr, "h263pp"); + gst_value_list_append_value (&list, &vstr); + } + g_value_unset (&vstr); + + gst_structure_set_value (new_s, "h263version", &list); + g_value_unset (&list); + } else { + gst_structure_set (new_s, "h263version", G_TYPE_STRING, "h263", NULL); + } + + + if (!f) + gst_structure_set (new_s, "annex-f", G_TYPE_BOOLEAN, FALSE, NULL); + if (!i) + gst_structure_set (new_s, "annex-i", G_TYPE_BOOLEAN, FALSE, NULL); + if (!j) + gst_structure_set (new_s, "annex-j", G_TYPE_BOOLEAN, FALSE, NULL); + if (!t) + gst_structure_set (new_s, "annex-t", G_TYPE_BOOLEAN, FALSE, NULL); + if (!l) + gst_structure_set (new_s, "annex-l", G_TYPE_BOOLEAN, FALSE, NULL); + if (!v) + gst_structure_set (new_s, "annex-v", G_TYPE_BOOLEAN, FALSE, NULL); + + /* FIXME: + * Ignore the profile for now, gst-ffmpeg need to accept + * height/width/framerates first + */ + + gst_caps_merge_structure (caps, new_s); + } else { + GstStructure *new_s = gst_structure_new ("video/x-h263", + "variant", G_TYPE_STRING, "itu", + "h263version", G_TYPE_STRING, "h263", + NULL); + + GST_DEBUG_OBJECT (rtph263ppay, "No profile or level specified" + " for H263-2000, defaulting to baseline H263"); + + gst_caps_merge_structure (caps, new_s); + } + } else { + gboolean f = FALSE, i = FALSE, j = FALSE, t = FALSE; + /* FIXME: ffmpeg support the Appendix K too, how do we express it ? + * guint k; + */ + const gchar *str; + GstStructure *new_s = gst_structure_new ("video/x-h263", + "variant", G_TYPE_STRING, "itu", + NULL); + + str = gst_structure_get_string (s, "f"); + if (str && !strcmp (str, "1")) + f = TRUE; + + str = gst_structure_get_string (s, "i"); + if (str && !strcmp (str, "1")) + i = TRUE; + + str = gst_structure_get_string (s, "j"); + if (str && !strcmp (str, "1")) + j = TRUE; + + str = gst_structure_get_string (s, "t"); + if (str && !strcmp (str, "1")) + t = TRUE; + + if (f || i || j || t) { + GValue list = { 0 }; + GValue vstr = { 0 }; + + g_value_init (&list, GST_TYPE_LIST); + g_value_init (&vstr, G_TYPE_STRING); + + g_value_set_static_string (&vstr, "h263"); + gst_value_list_append_value (&list, &vstr); + g_value_set_static_string (&vstr, "h263p"); + gst_value_list_append_value (&list, &vstr); + g_value_unset (&vstr); + + gst_structure_set_value (new_s, "h263version", &list); + g_value_unset (&list); + } else { + gst_structure_set (new_s, "h263version", G_TYPE_STRING, "h263", NULL); + } + + if (!f) + gst_structure_set (new_s, "annex-f", G_TYPE_BOOLEAN, FALSE, NULL); + if (!i) + gst_structure_set (new_s, "annex-i", G_TYPE_BOOLEAN, FALSE, NULL); + if (!j) + gst_structure_set (new_s, "annex-j", G_TYPE_BOOLEAN, FALSE, NULL); + if (!t) + gst_structure_set (new_s, "annex-t", G_TYPE_BOOLEAN, FALSE, NULL); + + gst_caps_merge_structure (caps, new_s); + } + } + + gst_caps_unref (intersect); + + return caps; +} + + +static void +gst_rtp_h263p_pay_set_property (GObject * object, guint prop_id, + const GValue * value, GParamSpec * pspec) +{ + GstRtpH263PPay *rtph263ppay; + + rtph263ppay = GST_RTP_H263P_PAY (object); + + switch (prop_id) { + case PROP_FRAGMENTATION_MODE: + rtph263ppay->fragmentation_mode = g_value_get_enum (value); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; + } +} + +static void +gst_rtp_h263p_pay_get_property (GObject * object, guint prop_id, + GValue * value, GParamSpec * pspec) +{ + GstRtpH263PPay *rtph263ppay; + + rtph263ppay = GST_RTP_H263P_PAY (object); + + switch (prop_id) { + case PROP_FRAGMENTATION_MODE: + g_value_set_enum (value, rtph263ppay->fragmentation_mode); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; + } +} static GstFlowReturn gst_rtp_h263p_pay_flush (GstRtpH263PPay * rtph263ppay) @@ -162,29 +483,64 @@ gst_rtp_h263p_pay_flush (GstRtpH263PPay * rtph263ppay) return GST_FLOW_OK; fragmented = FALSE; - - /* This algorithm assumes the H263+ encoder sends complete frames in each + /* This algorithm assumes the H263/+/++ encoder sends complete frames in each * buffer */ - /* This algorithm implements the Follow-on packets method for packetization. - * This assumes low packet loss network. A more resilient method would be to - * separate large frames at synchronisation points (Segments) (See RFC 2429 - * section 6). It would be interesting to have a property such as network - * quality to select between both packetization methods */ - /* TODO Add VRC supprt (See RFC 2429 section 4.2) */ + /* With Fragmentation Mode at GST_FRAGMENTATION_MODE_NORMAL: + * This algorithm implements the Follow-on packets method for packetization. + * This assumes low packet loss network. + * With Fragmentation Mode at GST_FRAGMENTATION_MODE_SYNC: + * This algorithm separates large frames at synchronisation points (Segments) + * (See RFC 4629 section 6). It would be interesting to have a property such as network + * quality to select between both packetization methods */ + /* TODO Add VRC supprt (See RFC 4629 section 5.2) */ + while (avail > 0) { guint towrite; guint8 *payload; - guint8 *data; guint payload_len; gint header_len; + guint next_gop = 0; + gboolean found_gob = FALSE; + + if (rtph263ppay->fragmentation_mode == GST_FRAGMENTATION_MODE_SYNC) { + /* start after 1st gop possible */ + guint parsed_len = 3; + const guint8 *parse_data = NULL; + + parse_data = gst_adapter_peek (rtph263ppay->adapter, avail); + + /* Check if we have a gob or eos , eossbs */ + /* FIXME EOS and EOSSBS packets should never contain any gobs and vice-versa */ + if (avail >= 3 && *parse_data == 0 && *(parse_data + 1) == 0 + && *(parse_data + 2) >= 0x80) { + GST_DEBUG_OBJECT (rtph263ppay, " Found GOB header"); + found_gob = TRUE; + } + /* Find next and cut the packet accordingly */ + /* TODO we should get as many gobs as possible until MTU is reached, this + * code seems to just get one GOB per packet */ + while (parsed_len + 2 < avail) { + if (parse_data[parsed_len] == 0 && parse_data[parsed_len + 1] == 0 + && parse_data[parsed_len + 2] >= 0x80) { + next_gop = parsed_len; + GST_DEBUG_OBJECT (rtph263ppay, " Next GOB Detected at : %d", + next_gop); + break; + } + parsed_len++; + } + } /* for picture start frames (non-fragmented), we need to remove the first * two 0x00 bytes and set P=1 */ - header_len = (fragmented ? 2 : 0); + header_len = (fragmented && !found_gob) ? 2 : 0; towrite = MIN (avail, gst_rtp_buffer_calc_payload_len (GST_BASE_RTP_PAYLOAD_MTU (rtph263ppay) - header_len, 0, 0)); + if (next_gop > 0) + towrite = MIN (next_gop, towrite); + payload_len = header_len + towrite; outbuf = gst_rtp_buffer_new_allocate (payload_len, 0, 0); @@ -193,8 +549,7 @@ gst_rtp_h263p_pay_flush (GstRtpH263PPay * rtph263ppay) payload = gst_rtp_buffer_get_payload (outbuf); - data = (guint8 *) gst_adapter_peek (rtph263ppay->adapter, towrite); - memcpy (&payload[header_len], data, towrite); + gst_adapter_copy (rtph263ppay->adapter, &payload[header_len], 0, towrite); /* 0 1 * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 @@ -202,10 +557,13 @@ gst_rtp_h263p_pay_flush (GstRtpH263PPay * rtph263ppay) * | RR |P|V| PLEN |PEBIT| * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ */ - payload[0] = fragmented ? 0x00 : 0x04; + /* if fragmented or gop header , write p bit =1 */ + payload[0] = (fragmented && !found_gob) ? 0x00 : 0x04; payload[1] = 0; - GST_BUFFER_TIMESTAMP (outbuf) = rtph263ppay->first_ts; + GST_BUFFER_TIMESTAMP (outbuf) = rtph263ppay->first_timestamp; + GST_BUFFER_DURATION (outbuf) = rtph263ppay->first_duration; + gst_adapter_flush (rtph263ppay->adapter, towrite); ret = gst_basertppayload_push (GST_BASE_RTP_PAYLOAD (rtph263ppay), outbuf); @@ -223,12 +581,11 @@ gst_rtp_h263p_pay_handle_buffer (GstBaseRTPPayload * payload, { GstRtpH263PPay *rtph263ppay; GstFlowReturn ret; - guint size; rtph263ppay = GST_RTP_H263P_PAY (payload); - size = GST_BUFFER_SIZE (buffer); - rtph263ppay->first_ts = GST_BUFFER_TIMESTAMP (buffer); + rtph263ppay->first_timestamp = GST_BUFFER_TIMESTAMP (buffer); + rtph263ppay->first_duration = GST_BUFFER_DURATION (buffer); /* we always encode and flush a full picture */ gst_adapter_push (rtph263ppay->adapter, buffer); @@ -241,5 +598,5 @@ gboolean gst_rtp_h263p_pay_plugin_init (GstPlugin * plugin) { return gst_element_register (plugin, "rtph263ppay", - GST_RANK_NONE, GST_TYPE_RTP_H263P_PAY); + GST_RANK_SECONDARY, GST_TYPE_RTP_H263P_PAY); }