1 /* ex: set tabstop=2 shiftwidth=2 expandtab: */
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Library General Public
6 * License as published by the Free Software Foundation; either
7 * version 2 of the License, or (at your option) any later version.
9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Library General Public License for more details.
14 * You should have received a copy of the GNU Library General Public
15 * License along with this library; if not, write to the
16 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
17 * Boston, MA 02110-1301, USA.
27 #include <gst/rtp/gstrtpbuffer.h>
28 #include <gst/pbutils/pbutils.h>
29 #include <gst/video/video.h>
31 /* Included to not duplicate gst_rtp_h264_add_sps_pps () */
32 #include "gstrtph264depay.h"
34 #include "gstrtpelements.h"
35 #include "gstrtph264pay.h"
36 #include "gstrtputils.h"
37 #include "gstbuffermemory.h"
44 #define STAP_A_TYPE_ID 24
45 #define FU_A_TYPE_ID 28
47 GST_DEBUG_CATEGORY_STATIC (rtph264pay_debug);
48 #define GST_CAT_DEFAULT (rtph264pay_debug)
50 #define GST_TYPE_RTP_H264_AGGREGATE_MODE \
51 (gst_rtp_h264_aggregate_mode_get_type ())
55 gst_rtp_h264_aggregate_mode_get_type (void)
57 static GType type = 0;
58 static const GEnumValue values[] = {
59 {GST_RTP_H264_AGGREGATE_NONE, "Do not aggregate NAL units", "none"},
60 {GST_RTP_H264_AGGREGATE_ZERO_LATENCY,
61 "Aggregate NAL units until a VCL unit is included", "zero-latency"},
62 {GST_RTP_H264_AGGREGATE_MAX_STAP,
63 "Aggregate all NAL units with the same timestamp (adds one frame of"
64 " latency)", "max-stap"},
69 type = g_enum_register_static ("GstRtpH264AggregateMode", values);
81 static GstStaticPadTemplate gst_rtp_h264_pay_sink_template =
82 GST_STATIC_PAD_TEMPLATE ("sink",
85 GST_STATIC_CAPS ("video/x-h264, "
86 "stream-format = (string) avc, alignment = (string) au;"
88 "stream-format = (string) byte-stream, alignment = (string) { nal, au }")
91 static GstStaticPadTemplate gst_rtp_h264_pay_src_template =
92 GST_STATIC_PAD_TEMPLATE ("src",
95 GST_STATIC_CAPS ("application/x-rtp, "
96 "media = (string) \"video\", "
97 "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", "
98 "clock-rate = (int) 90000, " "encoding-name = (string) \"H264\"")
101 #define DEFAULT_SPROP_PARAMETER_SETS NULL
102 #define DEFAULT_CONFIG_INTERVAL 0
103 #define DEFAULT_AGGREGATE_MODE GST_RTP_H264_AGGREGATE_NONE
108 PROP_SPROP_PARAMETER_SETS,
109 PROP_CONFIG_INTERVAL,
113 static void gst_rtp_h264_pay_finalize (GObject * object);
115 static void gst_rtp_h264_pay_set_property (GObject * object, guint prop_id,
116 const GValue * value, GParamSpec * pspec);
117 static void gst_rtp_h264_pay_get_property (GObject * object, guint prop_id,
118 GValue * value, GParamSpec * pspec);
120 static GstCaps *gst_rtp_h264_pay_getcaps (GstRTPBasePayload * payload,
121 GstPad * pad, GstCaps * filter);
122 static gboolean gst_rtp_h264_pay_setcaps (GstRTPBasePayload * basepayload,
124 static GstFlowReturn gst_rtp_h264_pay_handle_buffer (GstRTPBasePayload * pad,
126 static gboolean gst_rtp_h264_pay_sink_event (GstRTPBasePayload * payload,
128 static GstStateChangeReturn gst_rtp_h264_pay_change_state (GstElement *
129 element, GstStateChange transition);
130 static gboolean gst_rtp_h264_pay_src_query (GstPad * pad, GstObject * parent,
133 static void gst_rtp_h264_pay_reset_bundle (GstRtpH264Pay * rtph264pay);
135 #define gst_rtp_h264_pay_parent_class parent_class
136 G_DEFINE_TYPE (GstRtpH264Pay, gst_rtp_h264_pay, GST_TYPE_RTP_BASE_PAYLOAD);
137 GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtph264pay, "rtph264pay",
138 GST_RANK_SECONDARY, GST_TYPE_RTP_H264_PAY, rtp_element_init (plugin));
141 gst_rtp_h264_pay_class_init (GstRtpH264PayClass * klass)
143 GObjectClass *gobject_class;
144 GstElementClass *gstelement_class;
145 GstRTPBasePayloadClass *gstrtpbasepayload_class;
147 gobject_class = (GObjectClass *) klass;
148 gstelement_class = (GstElementClass *) klass;
149 gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
151 gobject_class->set_property = gst_rtp_h264_pay_set_property;
152 gobject_class->get_property = gst_rtp_h264_pay_get_property;
154 g_object_class_install_property (G_OBJECT_CLASS (klass),
155 PROP_SPROP_PARAMETER_SETS, g_param_spec_string ("sprop-parameter-sets",
156 "sprop-parameter-sets",
157 "The base64 sprop-parameter-sets to set in out caps (set to NULL to "
158 "extract from stream)",
159 DEFAULT_SPROP_PARAMETER_SETS,
160 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_DEPRECATED));
162 g_object_class_install_property (G_OBJECT_CLASS (klass),
163 PROP_CONFIG_INTERVAL,
164 g_param_spec_int ("config-interval",
165 "SPS PPS Send Interval",
166 "Send SPS and PPS Insertion Interval in seconds (sprop parameter sets "
167 "will be multiplexed in the data stream when detected.) "
168 "(0 = disabled, -1 = send with every IDR frame)",
169 -1, 3600, DEFAULT_CONFIG_INTERVAL,
170 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)
174 * GstRtpH264Pay:aggregate-mode
176 * Bundle suitable SPS/PPS NAL units into STAP-A aggregate packets.
178 * This can potentially reduce RTP packetization overhead but not all
179 * RTP implementations handle it correctly.
181 * For best compatibility, it is recommended to set this to "none" (the
182 * default) for RTSP and for WebRTC to "zero-latency".
186 g_object_class_install_property (G_OBJECT_CLASS (klass),
188 g_param_spec_enum ("aggregate-mode",
189 "Attempt to use aggregate packets",
190 "Bundle suitable SPS/PPS NAL units into STAP-A "
192 GST_TYPE_RTP_H264_AGGREGATE_MODE,
193 DEFAULT_AGGREGATE_MODE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)
196 gobject_class->finalize = gst_rtp_h264_pay_finalize;
198 gst_element_class_add_static_pad_template (gstelement_class,
199 &gst_rtp_h264_pay_src_template);
200 gst_element_class_add_static_pad_template (gstelement_class,
201 &gst_rtp_h264_pay_sink_template);
203 gst_element_class_set_static_metadata (gstelement_class, "RTP H264 payloader",
204 "Codec/Payloader/Network/RTP",
205 "Payload-encode H264 video into RTP packets (RFC 3984)",
206 "Laurent Glayal <spglegle@yahoo.fr>");
208 gstelement_class->change_state =
209 GST_DEBUG_FUNCPTR (gst_rtp_h264_pay_change_state);
211 gstrtpbasepayload_class->get_caps = gst_rtp_h264_pay_getcaps;
212 gstrtpbasepayload_class->set_caps = gst_rtp_h264_pay_setcaps;
213 gstrtpbasepayload_class->handle_buffer = gst_rtp_h264_pay_handle_buffer;
214 gstrtpbasepayload_class->sink_event = gst_rtp_h264_pay_sink_event;
216 GST_DEBUG_CATEGORY_INIT (rtph264pay_debug, "rtph264pay", 0,
217 "H264 RTP Payloader");
219 gst_type_mark_as_plugin_api (GST_TYPE_RTP_H264_AGGREGATE_MODE, 0);
223 gst_rtp_h264_pay_init (GstRtpH264Pay * rtph264pay)
225 rtph264pay->queue = g_array_new (FALSE, FALSE, sizeof (guint));
226 rtph264pay->profile = 0;
227 rtph264pay->sps = g_ptr_array_new_with_free_func (
228 (GDestroyNotify) gst_buffer_unref);
229 rtph264pay->pps = g_ptr_array_new_with_free_func (
230 (GDestroyNotify) gst_buffer_unref);
231 rtph264pay->last_spspps = -1;
232 rtph264pay->spspps_interval = DEFAULT_CONFIG_INTERVAL;
233 rtph264pay->aggregate_mode = DEFAULT_AGGREGATE_MODE;
234 rtph264pay->delta_unit = FALSE;
235 rtph264pay->discont = FALSE;
237 rtph264pay->adapter = gst_adapter_new ();
239 gst_pad_set_query_function (GST_RTP_BASE_PAYLOAD_SRCPAD (rtph264pay),
240 gst_rtp_h264_pay_src_query);
244 gst_rtp_h264_pay_clear_sps_pps (GstRtpH264Pay * rtph264pay)
246 g_ptr_array_set_size (rtph264pay->sps, 0);
247 g_ptr_array_set_size (rtph264pay->pps, 0);
251 gst_rtp_h264_pay_finalize (GObject * object)
253 GstRtpH264Pay *rtph264pay;
255 rtph264pay = GST_RTP_H264_PAY (object);
257 g_array_free (rtph264pay->queue, TRUE);
259 g_ptr_array_free (rtph264pay->sps, TRUE);
260 g_ptr_array_free (rtph264pay->pps, TRUE);
262 g_free (rtph264pay->sprop_parameter_sets);
264 g_object_unref (rtph264pay->adapter);
265 gst_rtp_h264_pay_reset_bundle (rtph264pay);
267 G_OBJECT_CLASS (parent_class)->finalize (object);
270 static const gchar all_levels[][4] = {
290 gst_rtp_h264_pay_getcaps (GstRTPBasePayload * payload, GstPad * pad,
293 GstCaps *template_caps;
294 GstCaps *allowed_caps;
295 GstCaps *caps, *icaps;
296 gboolean append_unrestricted;
300 gst_pad_peer_query_caps (GST_RTP_BASE_PAYLOAD_SRCPAD (payload), NULL);
302 if (allowed_caps == NULL)
306 gst_static_pad_template_get_caps (&gst_rtp_h264_pay_sink_template);
308 if (gst_caps_is_any (allowed_caps)) {
309 caps = gst_caps_ref (template_caps);
313 if (gst_caps_is_empty (allowed_caps)) {
314 caps = gst_caps_ref (allowed_caps);
318 caps = gst_caps_new_empty ();
320 append_unrestricted = FALSE;
321 for (i = 0; i < gst_caps_get_size (allowed_caps); i++) {
322 GstStructure *s = gst_caps_get_structure (allowed_caps, i);
323 GstStructure *new_s = gst_structure_new_empty ("video/x-h264");
324 const gchar *profile_level_id;
326 profile_level_id = gst_structure_get_string (s, "profile-level-id");
328 if (profile_level_id && strlen (profile_level_id) == 6) {
329 const gchar *profile;
334 spsint = strtol (profile_level_id, NULL, 16);
335 sps[0] = spsint >> 16;
336 sps[1] = spsint >> 8;
339 profile = gst_codec_utils_h264_get_profile (sps, 3);
340 level = gst_codec_utils_h264_get_level (sps, 3);
342 if (profile && level) {
343 GST_LOG_OBJECT (payload, "In caps, have profile %s and level %s",
346 if (!strcmp (profile, "constrained-baseline"))
347 gst_structure_set (new_s, "profile", G_TYPE_STRING, profile, NULL);
350 GValue profiles = { 0, };
352 g_value_init (&profiles, GST_TYPE_LIST);
353 g_value_init (&val, G_TYPE_STRING);
355 g_value_set_static_string (&val, profile);
356 gst_value_list_append_value (&profiles, &val);
358 g_value_set_static_string (&val, "constrained-baseline");
359 gst_value_list_append_value (&profiles, &val);
361 gst_structure_take_value (new_s, "profile", &profiles);
364 if (!strcmp (level, "1"))
365 gst_structure_set (new_s, "level", G_TYPE_STRING, level, NULL);
367 GValue levels = { 0, };
371 g_value_init (&levels, GST_TYPE_LIST);
372 g_value_init (&val, G_TYPE_STRING);
374 for (j = 0; j < G_N_ELEMENTS (all_levels); j++) {
375 g_value_set_static_string (&val, all_levels[j]);
376 gst_value_list_prepend_value (&levels, &val);
377 if (!strcmp (level, all_levels[j]))
380 gst_structure_take_value (new_s, "level", &levels);
383 /* Invalid profile-level-id means baseline */
385 gst_structure_set (new_s,
386 "profile", G_TYPE_STRING, "constrained-baseline", NULL);
389 /* No profile-level-id means baseline or unrestricted */
391 gst_structure_set (new_s,
392 "profile", G_TYPE_STRING, "constrained-baseline", NULL);
393 append_unrestricted = TRUE;
396 caps = gst_caps_merge_structure (caps, new_s);
399 if (append_unrestricted) {
401 gst_caps_merge_structure (caps, gst_structure_new ("video/x-h264", NULL,
405 icaps = gst_caps_intersect (caps, template_caps);
406 gst_caps_unref (caps);
411 GST_DEBUG_OBJECT (payload, "Intersect %" GST_PTR_FORMAT " and filter %"
412 GST_PTR_FORMAT, caps, filter);
413 icaps = gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);
414 gst_caps_unref (caps);
418 gst_caps_unref (template_caps);
419 gst_caps_unref (allowed_caps);
421 GST_LOG_OBJECT (payload, "returning caps %" GST_PTR_FORMAT, caps);
426 gst_rtp_h264_pay_src_query (GstPad * pad, GstObject * parent, GstQuery * query)
428 GstRtpH264Pay *rtph264pay = GST_RTP_H264_PAY (parent);
430 if (GST_QUERY_TYPE (query) == GST_QUERY_LATENCY) {
433 GstClockTime min_latency, max_latency;
435 retval = gst_pad_query_default (pad, parent, query);
439 if (rtph264pay->stream_format == GST_H264_STREAM_FORMAT_UNKNOWN ||
440 rtph264pay->alignment == GST_H264_ALIGNMENT_UNKNOWN)
443 gst_query_parse_latency (query, &live, &min_latency, &max_latency);
445 if (rtph264pay->aggregate_mode == GST_RTP_H264_AGGREGATE_MAX_STAP &&
446 rtph264pay->alignment != GST_H264_ALIGNMENT_AU && rtph264pay->fps_num) {
447 GstClockTime one_frame = gst_util_uint64_scale_int (GST_SECOND,
448 rtph264pay->fps_denum, rtph264pay->fps_num);
450 min_latency += one_frame;
451 max_latency += one_frame;
452 gst_query_set_latency (query, live, min_latency, max_latency);
457 return gst_pad_query_default (pad, parent, query);
461 /* take the currently configured SPS and PPS lists and set them on the caps as
462 * sprop-parameter-sets */
464 gst_rtp_h264_pay_set_sps_pps (GstRTPBasePayload * basepayload)
466 GstRtpH264Pay *payloader = GST_RTP_H264_PAY (basepayload);
475 sprops = g_string_new ("");
478 /* build the sprop-parameter-sets */
479 for (i = 0; i < payloader->sps->len; i++) {
481 GST_BUFFER_CAST (g_ptr_array_index (payloader->sps, i));
483 gst_buffer_map (sps_buf, &map, GST_MAP_READ);
484 set = g_base64_encode (map.data, map.size);
485 gst_buffer_unmap (sps_buf, &map);
487 g_string_append_printf (sprops, "%s%s", count ? "," : "", set);
491 for (i = 0; i < payloader->pps->len; i++) {
493 GST_BUFFER_CAST (g_ptr_array_index (payloader->pps, i));
495 gst_buffer_map (pps_buf, &map, GST_MAP_READ);
496 set = g_base64_encode (map.data, map.size);
497 gst_buffer_unmap (pps_buf, &map);
499 g_string_append_printf (sprops, "%s%s", count ? "," : "", set);
504 if (G_LIKELY (count)) {
505 if (payloader->profile != 0) {
506 /* profile is 24 bit. Force it to respect the limit */
507 profile = g_strdup_printf ("%06x", payloader->profile & 0xffffff);
508 /* combine into output caps */
509 res = gst_rtp_base_payload_set_outcaps (basepayload,
510 "packetization-mode", G_TYPE_STRING, "1",
511 "profile-level-id", G_TYPE_STRING, profile,
512 "sprop-parameter-sets", G_TYPE_STRING, sprops->str, NULL);
515 res = gst_rtp_base_payload_set_outcaps (basepayload,
516 "packetization-mode", G_TYPE_STRING, "1",
517 "sprop-parameter-sets", G_TYPE_STRING, sprops->str, NULL);
521 res = gst_rtp_base_payload_set_outcaps (basepayload, NULL);
523 g_string_free (sprops, TRUE);
530 gst_rtp_h264_pay_setcaps (GstRTPBasePayload * basepayload, GstCaps * caps)
532 GstRtpH264Pay *rtph264pay;
539 const gchar *alignment, *stream_format;
541 rtph264pay = GST_RTP_H264_PAY (basepayload);
543 str = gst_caps_get_structure (caps, 0);
545 /* we can only set the output caps when we found the sprops and profile
547 gst_rtp_base_payload_set_options (basepayload, "video", TRUE, "H264", 90000);
549 rtph264pay->alignment = GST_H264_ALIGNMENT_UNKNOWN;
550 alignment = gst_structure_get_string (str, "alignment");
552 if (g_str_equal (alignment, "au"))
553 rtph264pay->alignment = GST_H264_ALIGNMENT_AU;
554 if (g_str_equal (alignment, "nal"))
555 rtph264pay->alignment = GST_H264_ALIGNMENT_NAL;
558 rtph264pay->stream_format = GST_H264_STREAM_FORMAT_UNKNOWN;
559 stream_format = gst_structure_get_string (str, "stream-format");
561 if (g_str_equal (stream_format, "avc"))
562 rtph264pay->stream_format = GST_H264_STREAM_FORMAT_AVC;
563 if (g_str_equal (stream_format, "byte-stream"))
564 rtph264pay->stream_format = GST_H264_STREAM_FORMAT_BYTESTREAM;
567 if (!gst_structure_get_fraction (str, "framerate", &rtph264pay->fps_num,
568 &rtph264pay->fps_denum))
569 rtph264pay->fps_num = rtph264pay->fps_denum = 0;
571 /* packetized AVC video has a codec_data */
572 if ((value = gst_structure_get_value (str, "codec_data"))) {
573 guint num_sps, num_pps;
576 GST_DEBUG_OBJECT (rtph264pay, "have packetized h264");
578 buffer = gst_value_get_buffer (value);
580 gst_buffer_map (buffer, &map, GST_MAP_READ);
584 /* parse the avcC data */
587 /* parse the version, this must be 1 */
591 /* AVCProfileIndication */
593 /* AVCLevelIndication */
594 rtph264pay->profile = (data[1] << 16) | (data[2] << 8) | data[3];
595 GST_DEBUG_OBJECT (rtph264pay, "profile %06x", rtph264pay->profile);
597 /* 6 bits reserved | 2 bits lengthSizeMinusOne */
598 /* this is the number of bytes in front of the NAL units to mark their
600 rtph264pay->nal_length_size = (data[4] & 0x03) + 1;
601 GST_DEBUG_OBJECT (rtph264pay, "nal length %u", rtph264pay->nal_length_size);
602 /* 3 bits reserved | 5 bits numOfSequenceParameterSets */
603 num_sps = data[5] & 0x1f;
604 GST_DEBUG_OBJECT (rtph264pay, "num SPS %u", num_sps);
609 /* create the sprop-parameter-sets */
610 for (i = 0; i < num_sps; i++) {
616 nal_size = (data[0] << 8) | data[1];
620 GST_LOG_OBJECT (rtph264pay, "SPS %d size %d", i, nal_size);
625 /* make a buffer out of it and add to SPS list */
626 sps_buf = gst_buffer_new_and_alloc (nal_size);
627 gst_buffer_fill (sps_buf, 0, data, nal_size);
628 gst_rtp_h264_add_sps_pps (GST_ELEMENT (rtph264pay), rtph264pay->sps,
629 rtph264pay->pps, sps_buf);
636 /* 8 bits numOfPictureParameterSets */
641 GST_DEBUG_OBJECT (rtph264pay, "num PPS %u", num_pps);
642 for (i = 0; i < num_pps; i++) {
648 nal_size = (data[0] << 8) | data[1];
652 GST_LOG_OBJECT (rtph264pay, "PPS %d size %d", i, nal_size);
657 /* make a buffer out of it and add to PPS list */
658 pps_buf = gst_buffer_new_and_alloc (nal_size);
659 gst_buffer_fill (pps_buf, 0, data, nal_size);
660 gst_rtp_h264_add_sps_pps (GST_ELEMENT (rtph264pay), rtph264pay->sps,
661 rtph264pay->pps, pps_buf);
667 /* and update the caps with the collected data */
668 if (!gst_rtp_h264_pay_set_sps_pps (basepayload))
669 goto set_sps_pps_failed;
671 gst_buffer_unmap (buffer, &map);
673 GST_DEBUG_OBJECT (rtph264pay, "have bytestream h264");
680 GST_ERROR_OBJECT (rtph264pay, "avcC size %" G_GSIZE_FORMAT " < 7", size);
685 GST_ERROR_OBJECT (rtph264pay, "wrong avcC version");
690 GST_ERROR_OBJECT (rtph264pay, "avcC too small ");
695 GST_ERROR_OBJECT (rtph264pay, "failed to set sps/pps");
700 gst_buffer_unmap (buffer, &map);
706 gst_rtp_h264_pay_parse_sprop_parameter_sets (GstRtpH264Pay * rtph264pay)
714 ps = rtph264pay->sprop_parameter_sets;
718 gst_rtp_h264_pay_clear_sps_pps (rtph264pay);
720 params = g_strsplit (ps, ",", 0);
721 len = g_strv_length (params);
723 GST_DEBUG_OBJECT (rtph264pay, "we have %d params", len);
725 for (i = 0; params[i]; i++) {
732 nal_len = strlen (params[i]);
733 buf = gst_buffer_new_and_alloc (nal_len);
735 gst_buffer_map (buf, &map, GST_MAP_WRITE);
737 nal_len = g_base64_decode_step (params[i], nal_len, nalp, &state, &save);
738 gst_buffer_unmap (buf, &map);
739 gst_buffer_resize (buf, 0, nal_len);
742 gst_buffer_unref (buf);
746 gst_rtp_h264_add_sps_pps (GST_ELEMENT (rtph264pay), rtph264pay->sps,
747 rtph264pay->pps, buf);
753 next_start_code (const guint8 * data, guint size)
755 /* Boyer-Moore string matching algorithm, in a degenerative
756 * sense because our search 'alphabet' is binary - 0 & 1 only.
757 * This allow us to simplify the general BM algorithm to a very
759 /* assume 1 is in the 3th byte */
762 while (offset < size) {
763 if (1 == data[offset]) {
764 unsigned int shift = offset;
766 if (0 == data[--shift]) {
767 if (0 == data[--shift]) {
771 /* The jump is always 3 because of the 1 previously matched.
772 * All the 0's must be after this '1' matched at offset */
774 } else if (0 == data[offset]) {
775 /* maybe next byte is 1? */
778 /* can jump 3 bytes forward */
781 /* at each iteration, we rescan in a backward manner until
782 * we match 0.0.1 in reverse order. Since our search string
783 * has only 2 'alpabets' (i.e. 0 & 1), we know that any
784 * mismatch will force us to shift a fixed number of steps */
786 GST_DEBUG ("Cannot find next NAL start code. returning %u", size);
792 gst_rtp_h264_pay_decode_nal (GstRtpH264Pay * payloader,
793 const guint8 * data, guint size, GstClockTime dts, GstClockTime pts)
798 /* default is no update */
801 GST_DEBUG ("NAL payload len=%u", size);
804 type = header & 0x1f;
806 /* We record the timestamp of the last SPS/PPS so
807 * that we can insert them at regular intervals and when needed. */
808 if (SPS_TYPE_ID == type || PPS_TYPE_ID == type) {
811 /* trailing 0x0 are not part of the SPS/PPS */
812 while (size > 0 && data[size - 1] == 0x0)
815 /* encode the entire SPS NAL in base64 */
816 GST_DEBUG ("Found %s %x %x %x Len=%u", type == SPS_TYPE_ID ? "SPS" : "PPS",
817 (header >> 7), (header >> 5) & 3, type, size);
819 nal = gst_buffer_new_allocate (NULL, size, NULL);
820 gst_buffer_fill (nal, 0, data, size);
822 updated = gst_rtp_h264_add_sps_pps (GST_ELEMENT (payloader),
823 payloader->sps, payloader->pps, nal);
825 /* remember when we last saw SPS */
827 payloader->last_spspps =
828 gst_segment_to_running_time (&GST_RTP_BASE_PAYLOAD_CAST
829 (payloader)->segment, GST_FORMAT_TIME, pts);
831 GST_DEBUG ("NAL: %x %x %x Len = %u", (header >> 7),
832 (header >> 5) & 3, type, size);
839 gst_rtp_h264_pay_payload_nal (GstRTPBasePayload * basepayload,
840 GstBuffer * paybuf, GstClockTime dts, GstClockTime pts, gboolean end_of_au,
841 gboolean delta_unit, gboolean discont);
844 gst_rtp_h264_pay_payload_nal_single (GstRTPBasePayload * basepayload,
845 GstBuffer * paybuf, GstClockTime dts, GstClockTime pts, gboolean end_of_au,
846 gboolean delta_unit, gboolean discont);
849 gst_rtp_h264_pay_payload_nal_fragment (GstRTPBasePayload * basepayload,
850 GstBuffer * paybuf, GstClockTime dts, GstClockTime pts, gboolean end_of_au,
851 gboolean delta_unit, gboolean discont, guint8 nal_header);
854 gst_rtp_h264_pay_payload_nal_bundle (GstRTPBasePayload * basepayload,
855 GstBuffer * paybuf, GstClockTime dts, GstClockTime pts, gboolean end_of_au,
856 gboolean delta_unit, gboolean discont, guint8 nal_header);
859 gst_rtp_h264_pay_send_sps_pps (GstRTPBasePayload * basepayload,
860 GstClockTime dts, GstClockTime pts, gboolean delta_unit, gboolean discont)
862 GstRtpH264Pay *rtph264pay = GST_RTP_H264_PAY (basepayload);
863 GstFlowReturn ret = GST_FLOW_OK;
864 gboolean sent_all_sps_pps = TRUE;
867 for (i = 0; i < rtph264pay->sps->len; i++) {
869 GST_BUFFER_CAST (g_ptr_array_index (rtph264pay->sps, i));
871 GST_DEBUG_OBJECT (rtph264pay, "inserting SPS in the stream");
873 ret = gst_rtp_h264_pay_payload_nal (basepayload, gst_buffer_ref (sps_buf),
874 dts, pts, FALSE, delta_unit, discont);
875 /* Not critical here; but throw a warning */
876 if (ret != GST_FLOW_OK) {
877 sent_all_sps_pps = FALSE;
878 GST_WARNING_OBJECT (basepayload, "Problem pushing SPS");
881 for (i = 0; i < rtph264pay->pps->len; i++) {
883 GST_BUFFER_CAST (g_ptr_array_index (rtph264pay->pps, i));
885 GST_DEBUG_OBJECT (rtph264pay, "inserting PPS in the stream");
887 ret = gst_rtp_h264_pay_payload_nal (basepayload, gst_buffer_ref (pps_buf),
888 dts, pts, FALSE, TRUE, FALSE);
889 /* Not critical here; but throw a warning */
890 if (ret != GST_FLOW_OK) {
891 sent_all_sps_pps = FALSE;
892 GST_WARNING_OBJECT (basepayload, "Problem pushing PPS");
896 if (pts != -1 && sent_all_sps_pps)
897 rtph264pay->last_spspps =
898 gst_segment_to_running_time (&basepayload->segment, GST_FORMAT_TIME,
904 /* @delta_unit: if %FALSE the first packet sent won't have the
905 * GST_BUFFER_FLAG_DELTA_UNIT flag.
906 * @discont: if %TRUE the first packet sent will have the
907 * GST_BUFFER_FLAG_DISCONT flag.
910 gst_rtp_h264_pay_payload_nal (GstRTPBasePayload * basepayload,
911 GstBuffer * paybuf, GstClockTime dts, GstClockTime pts, gboolean end_of_au,
912 gboolean delta_unit, gboolean discont)
914 GstRtpH264Pay *rtph264pay;
915 guint8 nal_header, nal_type;
916 gboolean send_spspps;
919 rtph264pay = GST_RTP_H264_PAY (basepayload);
920 size = gst_buffer_get_size (paybuf);
922 gst_buffer_extract (paybuf, 0, &nal_header, 1);
923 nal_type = nal_header & 0x1f;
925 /* These payload type are reserved for STAP-A, STAP-B, MTAP16, and MTAP24
926 * as internally used NAL types */
932 GST_WARNING_OBJECT (rtph264pay, "Ignoring reserved NAL TYPE=%d",
934 gst_buffer_unref (paybuf);
940 GST_DEBUG_OBJECT (rtph264pay,
941 "payloading NAL Unit: datasize=%u type=%d pts=%" GST_TIME_FORMAT,
942 size, nal_type, GST_TIME_ARGS (pts));
944 /* should set src caps before pushing stuff,
945 * and if we did not see enough SPS/PPS, that may not be the case */
946 if (G_UNLIKELY (!gst_pad_has_current_caps (GST_RTP_BASE_PAYLOAD_SRCPAD
948 gst_rtp_h264_pay_set_sps_pps (basepayload);
952 /* check if we need to emit an SPS/PPS now */
953 if (nal_type == IDR_TYPE_ID && rtph264pay->spspps_interval > 0) {
954 if (rtph264pay->last_spspps != -1) {
956 GstClockTime running_time =
957 gst_segment_to_running_time (&basepayload->segment, GST_FORMAT_TIME,
960 GST_LOG_OBJECT (rtph264pay,
961 "now %" GST_TIME_FORMAT ", last SPS/PPS %" GST_TIME_FORMAT,
962 GST_TIME_ARGS (running_time),
963 GST_TIME_ARGS (rtph264pay->last_spspps));
965 /* calculate diff between last SPS/PPS in milliseconds */
966 if (running_time > rtph264pay->last_spspps)
967 diff = running_time - rtph264pay->last_spspps;
971 GST_DEBUG_OBJECT (rtph264pay,
972 "interval since last SPS/PPS %" GST_TIME_FORMAT,
973 GST_TIME_ARGS (diff));
975 /* bigger than interval, queue SPS/PPS */
976 if (GST_TIME_AS_SECONDS (diff) >= rtph264pay->spspps_interval) {
977 GST_DEBUG_OBJECT (rtph264pay, "time to send SPS/PPS");
981 /* no know previous SPS/PPS time, send now */
982 GST_DEBUG_OBJECT (rtph264pay, "no previous SPS/PPS time, send now");
985 } else if (nal_type == IDR_TYPE_ID && rtph264pay->spspps_interval == -1) {
986 GST_DEBUG_OBJECT (rtph264pay, "sending SPS/PPS before current IDR frame");
987 /* send SPS/PPS before every IDR frame */
991 if (send_spspps || rtph264pay->send_spspps) {
992 /* we need to send SPS/PPS now first. FIXME, don't use the pts for
993 * checking when we need to send SPS/PPS but convert to running_time first. */
996 rtph264pay->send_spspps = FALSE;
998 ret = gst_rtp_h264_pay_send_sps_pps (basepayload, dts, pts, delta_unit,
1000 if (ret != GST_FLOW_OK) {
1001 gst_buffer_unref (paybuf);
1009 if (rtph264pay->aggregate_mode != GST_RTP_H264_AGGREGATE_NONE)
1010 return gst_rtp_h264_pay_payload_nal_bundle (basepayload, paybuf, dts, pts,
1011 end_of_au, delta_unit, discont, nal_header);
1013 return gst_rtp_h264_pay_payload_nal_fragment (basepayload, paybuf, dts, pts,
1014 end_of_au, delta_unit, discont, nal_header);
1017 static GstFlowReturn
1018 gst_rtp_h264_pay_payload_nal_fragment (GstRTPBasePayload * basepayload,
1019 GstBuffer * paybuf, GstClockTime dts, GstClockTime pts, gboolean end_of_au,
1020 gboolean delta_unit, gboolean discont, guint8 nal_header)
1022 GstRtpH264Pay *rtph264pay;
1023 guint mtu, size, max_fragment_size, max_fragments, ii, pos;
1026 GstBufferList *list = NULL;
1027 GstRTPBuffer rtp = { NULL };
1029 rtph264pay = GST_RTP_H264_PAY (basepayload);
1030 mtu = GST_RTP_BASE_PAYLOAD_MTU (rtph264pay);
1031 size = gst_buffer_get_size (paybuf);
1033 if (gst_rtp_buffer_calc_packet_len (size, 0, 0) <= mtu) {
1034 /* We don't need to fragment this packet */
1035 GST_DEBUG_OBJECT (rtph264pay,
1036 "sending NAL Unit: datasize=%u mtu=%u", size, mtu);
1037 return gst_rtp_h264_pay_payload_nal_single (basepayload, paybuf, dts, pts,
1038 end_of_au, delta_unit, discont);
1041 GST_DEBUG_OBJECT (basepayload,
1042 "using FU-A fragmentation for NAL Unit: datasize=%u mtu=%u", size, mtu);
1044 /* We keep 2 bytes for FU indicator and FU Header */
1045 max_fragment_size = gst_rtp_buffer_calc_payload_len (mtu - 2, 0, 0);
1046 max_fragments = (size + max_fragment_size - 2) / max_fragment_size;
1047 list = gst_buffer_list_new_sized (max_fragments);
1049 /* Start at the NALU payload */
1050 for (pos = 1, ii = 0; pos < size; pos += max_fragment_size, ii++) {
1051 guint remaining, fragment_size;
1052 gboolean first_fragment, last_fragment;
1054 remaining = size - pos;
1055 fragment_size = MIN (remaining, max_fragment_size);
1056 first_fragment = (pos == 1);
1057 last_fragment = (remaining <= max_fragment_size);
1059 GST_DEBUG_OBJECT (basepayload,
1060 "creating FU-A packet %u/%u, size %u",
1061 ii + 1, max_fragments, fragment_size);
1064 * create buffer without payload containing only the RTP header
1065 * (memory block at index 0) */
1066 outbuf = gst_rtp_base_payload_allocate_output_buffer (basepayload, 2, 0, 0);
1068 gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
1070 GST_BUFFER_DTS (outbuf) = dts;
1071 GST_BUFFER_PTS (outbuf) = pts;
1072 payload = gst_rtp_buffer_get_payload (&rtp);
1074 /* If it's the last fragment and the end of this au, mark the end of
1076 gst_rtp_buffer_set_marker (&rtp, last_fragment && end_of_au);
1079 payload[0] = (nal_header & 0x60) | FU_A_TYPE_ID;
1082 payload[1] = (first_fragment << 7) | (last_fragment << 6) |
1083 (nal_header & 0x1f);
1085 gst_rtp_buffer_unmap (&rtp);
1087 /* insert payload memory block */
1088 gst_rtp_copy_video_meta (rtph264pay, outbuf, paybuf);
1089 gst_buffer_copy_into (outbuf, paybuf, GST_BUFFER_COPY_MEMORY, pos,
1093 /* Only the first packet sent should not have the flag */
1096 GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT);
1099 GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
1100 /* Only the first packet sent should have the flag */
1104 /* add the buffer to the buffer list */
1105 gst_buffer_list_add (list, outbuf);
1108 GST_DEBUG_OBJECT (rtph264pay,
1109 "sending FU-A fragments: n=%u datasize=%u mtu=%u", ii, size, mtu);
1111 gst_buffer_unref (paybuf);
1112 return gst_rtp_base_payload_push_list (basepayload, list);
1115 static GstFlowReturn
1116 gst_rtp_h264_pay_payload_nal_single (GstRTPBasePayload * basepayload,
1117 GstBuffer * paybuf, GstClockTime dts, GstClockTime pts, gboolean end_of_au,
1118 gboolean delta_unit, gboolean discont)
1120 GstRtpH264Pay *rtph264pay;
1122 GstRTPBuffer rtp = { NULL };
1124 rtph264pay = GST_RTP_H264_PAY (basepayload);
1126 /* create buffer without payload containing only the RTP header
1127 * (memory block at index 0) */
1128 outbuf = gst_rtp_base_payload_allocate_output_buffer (basepayload, 0, 0, 0);
1130 gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
1132 /* Mark the end of a frame */
1133 gst_rtp_buffer_set_marker (&rtp, end_of_au);
1135 /* timestamp the outbuffer */
1136 GST_BUFFER_PTS (outbuf) = pts;
1137 GST_BUFFER_DTS (outbuf) = dts;
1140 GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT);
1143 GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
1145 gst_rtp_buffer_unmap (&rtp);
1147 /* insert payload memory block */
1148 gst_rtp_copy_video_meta (rtph264pay, outbuf, paybuf);
1149 outbuf = gst_buffer_append (outbuf, paybuf);
1151 /* push the buffer to the next element */
1152 return gst_rtp_base_payload_push (basepayload, outbuf);
1156 gst_rtp_h264_pay_reset_bundle (GstRtpH264Pay * rtph264pay)
1158 g_clear_pointer (&rtph264pay->bundle, gst_buffer_list_unref);
1159 rtph264pay->bundle_size = 0;
1160 rtph264pay->bundle_contains_vcl = FALSE;
1163 static GstFlowReturn
1164 gst_rtp_h264_pay_send_bundle (GstRtpH264Pay * rtph264pay, gboolean end_of_au)
1166 GstRTPBasePayload *basepayload;
1167 GstBufferList *bundle;
1168 guint length, bundle_size;
1169 GstBuffer *first, *outbuf;
1170 GstClockTime dts, pts;
1171 gboolean delta, discont;
1173 bundle_size = rtph264pay->bundle_size;
1175 if (bundle_size == 0) {
1176 GST_DEBUG_OBJECT (rtph264pay, "no bundle, nothing to send");
1180 basepayload = GST_RTP_BASE_PAYLOAD (rtph264pay);
1181 bundle = rtph264pay->bundle;
1182 length = gst_buffer_list_length (bundle);
1184 first = gst_buffer_list_get (bundle, 0);
1185 dts = GST_BUFFER_DTS (first);
1186 pts = GST_BUFFER_PTS (first);
1187 delta = GST_BUFFER_FLAG_IS_SET (first, GST_BUFFER_FLAG_DELTA_UNIT);
1188 discont = GST_BUFFER_FLAG_IS_SET (first, GST_BUFFER_FLAG_DISCONT);
1191 /* Push unaggregated NALU */
1192 outbuf = gst_buffer_ref (first);
1194 GST_DEBUG_OBJECT (rtph264pay,
1195 "sending NAL Unit unaggregated: datasize=%u", bundle_size - 2);
1200 outbuf = gst_buffer_new_allocate (NULL, sizeof stap_header, NULL);
1201 stap_header = STAP_A_TYPE_ID;
1203 for (i = 0; i < length; i++) {
1204 GstBuffer *buf = gst_buffer_list_get (bundle, i);
1206 GstMemory *size_header;
1209 gst_buffer_extract (buf, 0, &nal_header, sizeof nal_header);
1211 /* Propagate F bit */
1212 if ((nal_header & 0x80))
1213 stap_header |= 0x80;
1215 /* Select highest nal_ref_idc */
1216 if ((nal_header & 0x60) > (stap_header & 0x60))
1217 stap_header = (stap_header & 0x9f) | (nal_header & 0x60);
1219 /* append NALU size */
1220 size_header = gst_allocator_alloc (NULL, 2, NULL);
1221 gst_memory_map (size_header, &map, GST_MAP_WRITE);
1222 GST_WRITE_UINT16_BE (map.data, gst_buffer_get_size (buf));
1223 gst_memory_unmap (size_header, &map);
1224 gst_buffer_append_memory (outbuf, size_header);
1226 /* append NALU data */
1227 outbuf = gst_buffer_append (outbuf, gst_buffer_ref (buf));
1230 gst_buffer_fill (outbuf, 0, &stap_header, sizeof stap_header);
1232 GST_DEBUG_OBJECT (rtph264pay,
1233 "sending STAP-A bundle: n=%u header=%02x datasize=%u",
1234 length, stap_header, bundle_size);
1237 gst_rtp_h264_pay_reset_bundle (rtph264pay);
1238 return gst_rtp_h264_pay_payload_nal_single (basepayload, outbuf, dts, pts,
1239 end_of_au, delta, discont);
1243 gst_rtp_h264_pay_payload_nal_bundle (GstRTPBasePayload * basepayload,
1244 GstBuffer * paybuf, GstClockTime dts, GstClockTime pts, gboolean end_of_au,
1245 gboolean delta_unit, gboolean discont, guint8 nal_header)
1247 GstRtpH264Pay *rtph264pay;
1249 guint mtu, pay_size, bundle_size;
1250 GstBufferList *bundle;
1252 gboolean start_of_au;
1254 rtph264pay = GST_RTP_H264_PAY (basepayload);
1255 nal_type = nal_header & 0x1f;
1256 mtu = GST_RTP_BASE_PAYLOAD_MTU (rtph264pay);
1257 pay_size = 2 + gst_buffer_get_size (paybuf);
1258 bundle = rtph264pay->bundle;
1259 start_of_au = FALSE;
1262 GstBuffer *first = gst_buffer_list_get (bundle, 0);
1264 if (nal_type == AUD_TYPE_ID) {
1265 GST_DEBUG_OBJECT (rtph264pay, "found access delimiter");
1267 } else if (discont) {
1268 GST_DEBUG_OBJECT (rtph264pay, "found discont");
1270 } else if (GST_BUFFER_PTS (first) != pts || GST_BUFFER_DTS (first) != dts) {
1271 GST_DEBUG_OBJECT (rtph264pay, "found timestamp mismatch");
1277 GST_DEBUG_OBJECT (rtph264pay, "sending bundle before start of AU");
1279 ret = gst_rtp_h264_pay_send_bundle (rtph264pay, TRUE);
1280 if (ret != GST_FLOW_OK)
1286 bundle_size = 1 + pay_size;
1288 if (gst_rtp_buffer_calc_packet_len (bundle_size, 0, 0) > mtu) {
1289 GST_DEBUG_OBJECT (rtph264pay, "NAL Unit cannot fit in a bundle");
1291 ret = gst_rtp_h264_pay_send_bundle (rtph264pay, FALSE);
1292 if (ret != GST_FLOW_OK)
1295 return gst_rtp_h264_pay_payload_nal_fragment (basepayload, paybuf, dts, pts,
1296 end_of_au, delta_unit, discont, nal_header);
1299 bundle_size = rtph264pay->bundle_size + pay_size;
1301 if (gst_rtp_buffer_calc_packet_len (bundle_size, 0, 0) > mtu) {
1302 GST_DEBUG_OBJECT (rtph264pay,
1303 "bundle overflows, sending: bundlesize=%u datasize=2+%u mtu=%u",
1304 rtph264pay->bundle_size, pay_size - 2, mtu);
1306 ret = gst_rtp_h264_pay_send_bundle (rtph264pay, FALSE);
1307 if (ret != GST_FLOW_OK)
1314 GST_DEBUG_OBJECT (rtph264pay, "creating new STAP-A aggregate");
1315 bundle = rtph264pay->bundle = gst_buffer_list_new ();
1316 bundle_size = rtph264pay->bundle_size = 1;
1317 rtph264pay->bundle_contains_vcl = FALSE;
1320 GST_DEBUG_OBJECT (rtph264pay,
1321 "bundling NAL Unit: bundlesize=%u datasize=2+%u mtu=%u",
1322 rtph264pay->bundle_size, pay_size - 2, mtu);
1324 paybuf = gst_buffer_make_writable (paybuf);
1325 GST_BUFFER_PTS (paybuf) = pts;
1326 GST_BUFFER_DTS (paybuf) = dts;
1329 GST_BUFFER_FLAG_SET (paybuf, GST_BUFFER_FLAG_DELTA_UNIT);
1331 GST_BUFFER_FLAG_UNSET (paybuf, GST_BUFFER_FLAG_DELTA_UNIT);
1334 GST_BUFFER_FLAG_SET (paybuf, GST_BUFFER_FLAG_DISCONT);
1336 GST_BUFFER_FLAG_UNSET (paybuf, GST_BUFFER_FLAG_DISCONT);
1338 gst_buffer_list_add (bundle, gst_buffer_ref (paybuf));
1339 rtph264pay->bundle_size += pay_size;
1342 if ((nal_type >= 1 && nal_type <= 5) || nal_type == 14 ||
1343 (nal_type >= 20 && nal_type <= 23))
1344 rtph264pay->bundle_contains_vcl = TRUE;
1347 GST_DEBUG_OBJECT (rtph264pay, "sending bundle at end of AU");
1348 ret = gst_rtp_h264_pay_send_bundle (rtph264pay, TRUE);
1352 gst_buffer_unref (paybuf);
1356 static GstFlowReturn
1357 gst_rtp_h264_pay_handle_buffer (GstRTPBasePayload * basepayload,
1360 GstRtpH264Pay *rtph264pay;
1365 GstClockTime dts, pts;
1368 GstBuffer *paybuf = NULL;
1370 gboolean delayed_not_delta_unit = FALSE;
1371 gboolean delayed_discont = FALSE;
1372 gboolean marker = FALSE;
1373 gboolean draining = (buffer == NULL);
1375 rtph264pay = GST_RTP_H264_PAY (basepayload);
1377 /* the input buffer contains one or more NAL units */
1379 avc = rtph264pay->stream_format == GST_H264_STREAM_FORMAT_AVC;
1382 /* In AVC mode, there is no adapter, so nothing to drain */
1387 if (!GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT)) {
1388 if (gst_adapter_available (rtph264pay->adapter) == 0)
1389 rtph264pay->delta_unit = FALSE;
1391 /* This buffer contains a key frame but the adapter isn't empty. So
1392 * we'll purge it first by sending a first packet and then the second
1393 * one won't have the DELTA_UNIT flag. */
1394 delayed_not_delta_unit = TRUE;
1397 if (GST_BUFFER_IS_DISCONT (buffer)) {
1398 if (gst_adapter_available (rtph264pay->adapter) == 0)
1399 rtph264pay->discont = TRUE;
1401 /* This buffer has the DISCONT flag but the adapter isn't empty. So
1402 * we'll purge it first by sending a first packet and then the second
1403 * one will have the DISCONT flag set. */
1404 delayed_discont = TRUE;
1407 marker = GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_MARKER);
1408 gst_adapter_push (rtph264pay->adapter, buffer);
1412 /* We want to use the first TS used to construct the following NAL */
1413 dts = gst_adapter_prev_dts (rtph264pay->adapter, NULL);
1414 pts = gst_adapter_prev_pts (rtph264pay->adapter, NULL);
1416 size = gst_adapter_available (rtph264pay->adapter);
1417 /* Nothing to do here if the adapter is empty, e.g. on EOS */
1420 data = gst_adapter_map (rtph264pay->adapter, size);
1421 GST_DEBUG_OBJECT (basepayload, "got %" G_GSIZE_FORMAT " bytes", size);
1426 /* now loop over all NAL units and put them in a packet */
1428 GstBufferMemoryMap memory;
1429 gsize remaining_buffer_size;
1430 guint nal_length_size;
1433 gst_buffer_memory_map (buffer, &memory);
1434 remaining_buffer_size = gst_buffer_get_size (buffer);
1436 pts = GST_BUFFER_PTS (buffer);
1437 dts = GST_BUFFER_DTS (buffer);
1438 rtph264pay->delta_unit = GST_BUFFER_FLAG_IS_SET (buffer,
1439 GST_BUFFER_FLAG_DELTA_UNIT);
1440 rtph264pay->discont = GST_BUFFER_IS_DISCONT (buffer);
1441 marker = GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_MARKER);
1442 GST_DEBUG_OBJECT (basepayload, "got %" G_GSIZE_FORMAT " bytes",
1443 remaining_buffer_size);
1445 nal_length_size = rtph264pay->nal_length_size;
1447 while (remaining_buffer_size > nal_length_size) {
1449 gboolean end_of_au = FALSE;
1452 for (i = 0; i < nal_length_size; i++) {
1453 nal_len = (nal_len << 8) + *memory.data;
1454 if (!gst_buffer_memory_advance_bytes (&memory, 1))
1458 offset += nal_length_size;
1459 remaining_buffer_size -= nal_length_size;
1461 if (remaining_buffer_size >= nal_len) {
1462 GST_DEBUG_OBJECT (basepayload, "got NAL of size %u", nal_len);
1464 nal_len = remaining_buffer_size;
1465 GST_DEBUG_OBJECT (basepayload, "got incomplete NAL of size %u",
1469 /* If we're at the end of the buffer, then we're at the end of the
1472 if (remaining_buffer_size - nal_len <= nal_length_size) {
1473 if (rtph264pay->alignment == GST_H264_ALIGNMENT_AU || marker)
1477 paybuf = gst_buffer_copy_region (buffer, GST_BUFFER_COPY_ALL, offset,
1480 gst_rtp_h264_pay_payload_nal (basepayload, paybuf, dts, pts,
1481 end_of_au, rtph264pay->delta_unit, rtph264pay->discont);
1483 if (!rtph264pay->delta_unit)
1484 /* Only the first outgoing packet doesn't have the DELTA_UNIT flag */
1485 rtph264pay->delta_unit = TRUE;
1487 if (rtph264pay->discont)
1488 /* Only the first outgoing packet have the DISCONT flag */
1489 rtph264pay->discont = FALSE;
1491 if (ret != GST_FLOW_OK)
1494 /* Skip current nal. If it is split over multiple GstMemory
1495 * advance_bytes () will switch to the correct GstMemory. The payloader
1496 * does not access those bytes directly but uses gst_buffer_copy_region ()
1497 * to create a sub-buffer referencing the nal instead */
1498 if (!gst_buffer_memory_advance_bytes (&memory, nal_len))
1502 remaining_buffer_size -= nal_len;
1505 gst_buffer_memory_unmap (&memory);
1506 gst_buffer_unref (buffer);
1509 gboolean update = FALSE;
1511 /* get offset of first start code */
1512 next = next_start_code (data, size);
1514 /* skip to start code, if no start code is found, next will be size and we
1515 * will not collect data. */
1518 nal_queue = rtph264pay->queue;
1521 /* array must be empty when we get here */
1522 g_assert (nal_queue->len == 0);
1524 GST_DEBUG_OBJECT (basepayload,
1525 "found first start at %u, bytes left %" G_GSIZE_FORMAT, next, size);
1527 /* first pass to locate NALs and parse SPS/PPS */
1529 /* skip start code */
1533 /* use next_start_code() to scan buffer.
1534 * next_start_code() returns the offset in data,
1535 * starting from zero to the first byte of 0.0.0.1
1536 * If no start code is found, it returns the value of the
1538 * data is unchanged by the call to next_start_code()
1540 next = next_start_code (data, size);
1542 /* nal or au aligned input needs no delaying until next time */
1543 if (next == size && !draining &&
1544 rtph264pay->alignment == GST_H264_ALIGNMENT_UNKNOWN) {
1545 /* Didn't find the start of next NAL and it's not EOS,
1546 * handle it next time */
1550 /* nal length is distance to next start code */
1553 GST_DEBUG_OBJECT (basepayload, "found next start at %u of size %u", next,
1556 if (rtph264pay->sprop_parameter_sets != NULL) {
1557 /* explicitly set profile and sprop, use those */
1558 if (rtph264pay->update_caps) {
1559 if (!gst_rtp_base_payload_set_outcaps (basepayload,
1560 "sprop-parameter-sets", G_TYPE_STRING,
1561 rtph264pay->sprop_parameter_sets, NULL))
1564 /* parse SPS and PPS from provided parameter set (for insertion) */
1565 gst_rtp_h264_pay_parse_sprop_parameter_sets (rtph264pay);
1567 rtph264pay->update_caps = FALSE;
1569 GST_DEBUG ("outcaps update: sprop-parameter-sets=%s",
1570 rtph264pay->sprop_parameter_sets);
1573 /* We know our stream is a valid H264 NAL packet,
1574 * go parse it for SPS/PPS to enrich the caps */
1575 /* order: make sure to check nal */
1577 gst_rtp_h264_pay_decode_nal (rtph264pay, data, nal_len, dts, pts)
1580 /* move to next NAL packet */
1584 g_array_append_val (nal_queue, nal_len);
1587 /* if has new SPS & PPS, update the output caps */
1588 if (G_UNLIKELY (update))
1589 if (!gst_rtp_h264_pay_set_sps_pps (basepayload))
1592 /* second pass to payload and push */
1594 if (nal_queue->len != 0)
1595 gst_adapter_flush (rtph264pay->adapter, skip);
1597 for (i = 0; i < nal_queue->len; i++) {
1599 gboolean end_of_au = FALSE;
1601 nal_len = g_array_index (nal_queue, guint, i);
1602 /* skip start code */
1603 gst_adapter_flush (rtph264pay->adapter, 3);
1605 /* Trim the end unless we're the last NAL in the stream.
1606 * In case we're not at the end of the buffer we know the next block
1607 * starts with 0x000001 so all the 0x00 bytes at the end of this one are
1608 * trailing 0x0 that can be discarded */
1610 data = gst_adapter_map (rtph264pay->adapter, size);
1611 if (i + 1 != nal_queue->len || !draining)
1612 for (; size > 1 && data[size - 1] == 0x0; size--)
1616 /* If it's the last nal unit we have in non-bytestream mode, we can
1617 * assume it's the end of an access-unit
1619 * FIXME: We need to wait until the next packet or EOS to
1620 * actually payload the NAL so we can know if the current NAL is
1621 * the last one of an access unit or not if we are in bytestream mode
1623 if (i == nal_queue->len - 1) {
1624 if (rtph264pay->alignment == GST_H264_ALIGNMENT_AU ||
1628 paybuf = gst_adapter_take_buffer (rtph264pay->adapter, size);
1631 /* put the data in one or more RTP packets */
1633 gst_rtp_h264_pay_payload_nal (basepayload, paybuf, dts, pts,
1634 end_of_au, rtph264pay->delta_unit, rtph264pay->discont);
1636 if (delayed_not_delta_unit) {
1637 rtph264pay->delta_unit = FALSE;
1638 delayed_not_delta_unit = FALSE;
1640 /* Only the first outgoing packet doesn't have the DELTA_UNIT flag */
1641 rtph264pay->delta_unit = TRUE;
1644 if (delayed_discont) {
1645 rtph264pay->discont = TRUE;
1646 delayed_discont = FALSE;
1648 /* Only the first outgoing packet have the DISCONT flag */
1649 rtph264pay->discont = FALSE;
1652 if (ret != GST_FLOW_OK) {
1656 /* move to next NAL packet */
1657 /* Skips the trailing zeros */
1658 gst_adapter_flush (rtph264pay->adapter, nal_len - size);
1660 g_array_set_size (nal_queue, 0);
1663 if (ret == GST_FLOW_OK && rtph264pay->bundle_size > 0 &&
1664 rtph264pay->aggregate_mode == GST_RTP_H264_AGGREGATE_ZERO_LATENCY &&
1665 rtph264pay->bundle_contains_vcl) {
1666 GST_DEBUG_OBJECT (rtph264pay, "sending bundle at end incoming packet");
1667 ret = gst_rtp_h264_pay_send_bundle (rtph264pay, FALSE);
1673 gst_adapter_unmap (rtph264pay->adapter);
1680 GST_WARNING_OBJECT (basepayload, "Could not set outcaps");
1681 g_array_set_size (nal_queue, 0);
1682 ret = GST_FLOW_NOT_NEGOTIATED;
1688 gst_rtp_h264_pay_sink_event (GstRTPBasePayload * payload, GstEvent * event)
1691 const GstStructure *s;
1692 GstRtpH264Pay *rtph264pay = GST_RTP_H264_PAY (payload);
1693 GstFlowReturn ret = GST_FLOW_OK;
1695 switch (GST_EVENT_TYPE (event)) {
1696 case GST_EVENT_FLUSH_STOP:
1697 gst_adapter_clear (rtph264pay->adapter);
1698 gst_rtp_h264_pay_reset_bundle (rtph264pay);
1700 case GST_EVENT_CUSTOM_DOWNSTREAM:
1701 s = gst_event_get_structure (event);
1702 if (gst_structure_has_name (s, "GstForceKeyUnit")) {
1703 gboolean resend_codec_data;
1705 if (gst_structure_get_boolean (s, "all-headers",
1706 &resend_codec_data) && resend_codec_data)
1707 rtph264pay->send_spspps = TRUE;
1712 /* call handle_buffer with NULL to flush last NAL from adapter
1713 * in byte-stream mode
1715 gst_rtp_h264_pay_handle_buffer (payload, NULL);
1716 ret = gst_rtp_h264_pay_send_bundle (rtph264pay, TRUE);
1719 case GST_EVENT_STREAM_START:
1720 GST_DEBUG_OBJECT (rtph264pay, "New stream detected => Clear SPS and PPS");
1721 gst_rtp_h264_pay_clear_sps_pps (rtph264pay);
1722 ret = gst_rtp_h264_pay_send_bundle (rtph264pay, TRUE);
1728 if (ret != GST_FLOW_OK)
1731 res = GST_RTP_BASE_PAYLOAD_CLASS (parent_class)->sink_event (payload, event);
1736 static GstStateChangeReturn
1737 gst_rtp_h264_pay_change_state (GstElement * element, GstStateChange transition)
1739 GstStateChangeReturn ret;
1740 GstRtpH264Pay *rtph264pay = GST_RTP_H264_PAY (element);
1742 switch (transition) {
1743 case GST_STATE_CHANGE_READY_TO_PAUSED:
1744 rtph264pay->send_spspps = FALSE;
1745 gst_adapter_clear (rtph264pay->adapter);
1746 gst_rtp_h264_pay_reset_bundle (rtph264pay);
1752 ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
1754 switch (transition) {
1755 case GST_STATE_CHANGE_PAUSED_TO_READY:
1756 rtph264pay->last_spspps = -1;
1757 gst_rtp_h264_pay_clear_sps_pps (rtph264pay);
1767 gst_rtp_h264_pay_set_property (GObject * object, guint prop_id,
1768 const GValue * value, GParamSpec * pspec)
1770 GstRtpH264Pay *rtph264pay;
1772 rtph264pay = GST_RTP_H264_PAY (object);
1775 case PROP_SPROP_PARAMETER_SETS:
1776 g_free (rtph264pay->sprop_parameter_sets);
1777 rtph264pay->sprop_parameter_sets = g_value_dup_string (value);
1778 rtph264pay->update_caps = TRUE;
1780 case PROP_CONFIG_INTERVAL:
1781 rtph264pay->spspps_interval = g_value_get_int (value);
1783 case PROP_AGGREGATE_MODE:
1784 rtph264pay->aggregate_mode = g_value_get_enum (value);
1787 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
1793 gst_rtp_h264_pay_get_property (GObject * object, guint prop_id,
1794 GValue * value, GParamSpec * pspec)
1796 GstRtpH264Pay *rtph264pay;
1798 rtph264pay = GST_RTP_H264_PAY (object);
1801 case PROP_SPROP_PARAMETER_SETS:
1802 g_value_set_string (value, rtph264pay->sprop_parameter_sets);
1804 case PROP_CONFIG_INTERVAL:
1805 g_value_set_int (value, rtph264pay->spspps_interval);
1807 case PROP_AGGREGATE_MODE:
1808 g_value_set_enum (value, rtph264pay->aggregate_mode);
1811 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);