1 /* GStreamer H.264 Parser
2 * Copyright (C) <2010> Collabora ltd
3 * Copyright (C) <2010> Nokia Corporation
4 * Copyright (C) <2011> Intel Corporation
6 * Copyright (C) <2010> Mark Nauwelaerts <mark.nauwelaerts@collabora.co.uk>
7 * Copyright (C) <2011> Thibault Saunier <thibault.saunier@collabora.com>
9 * This library is free software; you can redistribute it and/or
10 * modify it under the terms of the GNU Library General Public
11 * License as published by the Free Software Foundation; either
12 * version 2 of the License, or (at your option) any later version.
14 * This library is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17 * Library General Public License for more details.
19 * You should have received a copy of the GNU Library General Public
20 * License along with this library; if not, write to the
21 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
22 * Boston, MA 02110-1301, USA.
29 #include <gst/base/base.h>
30 #include <gst/pbutils/pbutils.h>
31 #include <gst/video/video.h>
32 #include "gstvideoparserselements.h"
33 #include "gsth264parse.h"
37 GST_DEBUG_CATEGORY (h264_parse_debug);
38 #define GST_CAT_DEFAULT h264_parse_debug
40 #define DEFAULT_CONFIG_INTERVAL (0)
41 #define DEFAULT_UPDATE_TIMECODE FALSE
52 GST_H264_PARSE_FORMAT_NONE,
53 GST_H264_PARSE_FORMAT_AVC,
54 GST_H264_PARSE_FORMAT_BYTE,
55 GST_H264_PARSE_FORMAT_AVC3
60 GST_H264_PARSE_ALIGN_NONE = 0,
61 GST_H264_PARSE_ALIGN_NAL,
62 GST_H264_PARSE_ALIGN_AU
67 GST_H264_PARSE_STATE_GOT_SPS = 1 << 0,
68 GST_H264_PARSE_STATE_GOT_PPS = 1 << 1,
69 GST_H264_PARSE_STATE_GOT_SLICE = 1 << 2,
71 GST_H264_PARSE_STATE_VALID_PICTURE_HEADERS = (GST_H264_PARSE_STATE_GOT_SPS |
72 GST_H264_PARSE_STATE_GOT_PPS),
73 GST_H264_PARSE_STATE_VALID_PICTURE =
74 (GST_H264_PARSE_STATE_VALID_PICTURE_HEADERS |
75 GST_H264_PARSE_STATE_GOT_SLICE)
80 GST_H264_PARSE_SEI_EXPIRED = 0,
81 GST_H264_PARSE_SEI_ACTIVE = 1,
82 GST_H264_PARSE_SEI_PARSED = 2,
85 #define GST_H264_PARSE_STATE_VALID(parse, expected_state) \
86 (((parse)->state & (expected_state)) == (expected_state))
88 static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE ("sink",
91 GST_STATIC_CAPS ("video/x-h264"));
93 static GstStaticPadTemplate srctemplate = GST_STATIC_PAD_TEMPLATE ("src",
96 GST_STATIC_CAPS ("video/x-h264, parsed = (boolean) true, "
97 "stream-format=(string) { avc, avc3, byte-stream }, "
98 "alignment=(string) { au, nal }"));
100 #define parent_class gst_h264_parse_parent_class
101 G_DEFINE_TYPE (GstH264Parse, gst_h264_parse, GST_TYPE_BASE_PARSE);
102 GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (h264parse, "h264parse",
103 GST_RANK_PRIMARY + 1, GST_TYPE_H264_PARSE,
104 videoparsers_element_init (plugin));
106 static void gst_h264_parse_finalize (GObject * object);
108 static gboolean gst_h264_parse_start (GstBaseParse * parse);
109 static gboolean gst_h264_parse_stop (GstBaseParse * parse);
110 static GstFlowReturn gst_h264_parse_handle_frame (GstBaseParse * parse,
111 GstBaseParseFrame * frame, gint * skipsize);
112 static GstFlowReturn gst_h264_parse_parse_frame (GstBaseParse * parse,
113 GstBaseParseFrame * frame);
114 static GstFlowReturn gst_h264_parse_pre_push_frame (GstBaseParse * parse,
115 GstBaseParseFrame * frame);
117 static void gst_h264_parse_set_property (GObject * object, guint prop_id,
118 const GValue * value, GParamSpec * pspec);
119 static void gst_h264_parse_get_property (GObject * object, guint prop_id,
120 GValue * value, GParamSpec * pspec);
122 static gboolean gst_h264_parse_set_caps (GstBaseParse * parse, GstCaps * caps);
123 static GstCaps *gst_h264_parse_get_caps (GstBaseParse * parse,
125 static gboolean gst_h264_parse_event (GstBaseParse * parse, GstEvent * event);
126 static gboolean gst_h264_parse_src_event (GstBaseParse * parse,
128 static void gst_h264_parse_update_src_caps (GstH264Parse * h264parse,
132 gst_h264_parse_class_init (GstH264ParseClass * klass)
134 GObjectClass *gobject_class = (GObjectClass *) klass;
135 GstBaseParseClass *parse_class = GST_BASE_PARSE_CLASS (klass);
136 GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
138 GST_DEBUG_CATEGORY_INIT (h264_parse_debug, "h264parse", 0, "h264 parser");
140 gobject_class->finalize = gst_h264_parse_finalize;
141 gobject_class->set_property = gst_h264_parse_set_property;
142 gobject_class->get_property = gst_h264_parse_get_property;
144 g_object_class_install_property (gobject_class, PROP_CONFIG_INTERVAL,
145 g_param_spec_int ("config-interval",
146 "SPS PPS Send Interval",
147 "Send SPS and PPS Insertion Interval in seconds (sprop parameter sets "
148 "will be multiplexed in the data stream when detected.) "
149 "(0 = disabled, -1 = send with every IDR frame)",
150 -1, 3600, DEFAULT_CONFIG_INTERVAL,
151 G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
154 * GstH264Parse:update-timecode:
156 * If the stream contains Picture Timing SEI, update their timecode values
157 * using upstream GstVideoTimeCodeMeta. However, if there are no Picture
158 * Timing SEI in bitstream, this property will not insert the SEI into the
159 * bitstream - it only modifies existing ones.
160 * Moreover, even if both GstVideoTimeCodeMeta and Picture Timing SEI
161 * are present, if pic_struct_present_flag of VUI is equal to zero,
162 * timecode values will not updated as there is not enough information
163 * in the stream to do so.
167 g_object_class_install_property (gobject_class, PROP_UPDATE_TIMECODE,
168 g_param_spec_boolean ("update-timecode",
170 "Update time code values in Picture Timing SEI if GstVideoTimeCodeMeta "
171 "is attached to incoming buffer and also Picture Timing SEI exists "
172 "in the bitstream. To make this property work, SPS must contain "
173 "VUI and pic_struct_present_flag of VUI must be non-zero",
174 DEFAULT_UPDATE_TIMECODE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
176 /* Override BaseParse vfuncs */
177 parse_class->start = GST_DEBUG_FUNCPTR (gst_h264_parse_start);
178 parse_class->stop = GST_DEBUG_FUNCPTR (gst_h264_parse_stop);
179 parse_class->handle_frame = GST_DEBUG_FUNCPTR (gst_h264_parse_handle_frame);
180 parse_class->pre_push_frame =
181 GST_DEBUG_FUNCPTR (gst_h264_parse_pre_push_frame);
182 parse_class->set_sink_caps = GST_DEBUG_FUNCPTR (gst_h264_parse_set_caps);
183 parse_class->get_sink_caps = GST_DEBUG_FUNCPTR (gst_h264_parse_get_caps);
184 parse_class->sink_event = GST_DEBUG_FUNCPTR (gst_h264_parse_event);
185 parse_class->src_event = GST_DEBUG_FUNCPTR (gst_h264_parse_src_event);
187 gst_element_class_add_static_pad_template (gstelement_class, &srctemplate);
188 gst_element_class_add_static_pad_template (gstelement_class, &sinktemplate);
190 gst_element_class_set_static_metadata (gstelement_class, "H.264 parser",
191 "Codec/Parser/Converter/Video",
192 "Parses H.264 streams",
193 "Mark Nauwelaerts <mark.nauwelaerts@collabora.co.uk>");
197 gst_h264_parse_init (GstH264Parse * h264parse)
199 h264parse->frame_out = gst_adapter_new ();
200 gst_base_parse_set_pts_interpolation (GST_BASE_PARSE (h264parse), FALSE);
201 gst_base_parse_set_infer_ts (GST_BASE_PARSE (h264parse), FALSE);
202 GST_PAD_SET_ACCEPT_INTERSECT (GST_BASE_PARSE_SINK_PAD (h264parse));
203 GST_PAD_SET_ACCEPT_TEMPLATE (GST_BASE_PARSE_SINK_PAD (h264parse));
205 h264parse->aud_needed = TRUE;
206 h264parse->aud_insert = TRUE;
207 h264parse->update_timecode = DEFAULT_UPDATE_TIMECODE;
211 gst_h264_parse_finalize (GObject * object)
213 GstH264Parse *h264parse = GST_H264_PARSE (object);
215 g_object_unref (h264parse->frame_out);
217 G_OBJECT_CLASS (parent_class)->finalize (object);
221 gst_h264_parse_reset_frame (GstH264Parse * h264parse)
223 GST_DEBUG_OBJECT (h264parse, "reset frame");
225 /* done parsing; reset state */
226 h264parse->current_off = -1;
228 h264parse->update_caps = FALSE;
229 h264parse->idr_pos = -1;
230 h264parse->sei_pos = -1;
231 h264parse->pic_timing_sei_pos = -1;
232 h264parse->pic_timing_sei_size = -1;
233 h264parse->keyframe = FALSE;
234 h264parse->predicted = FALSE;
235 h264parse->bidirectional = FALSE;
236 h264parse->header = FALSE;
237 h264parse->frame_start = FALSE;
238 h264parse->have_sps_in_frame = FALSE;
239 h264parse->have_pps_in_frame = FALSE;
240 h264parse->have_aud_in_frame = FALSE;
241 gst_adapter_clear (h264parse->frame_out);
245 gst_h264_parse_reset_stream_info (GstH264Parse * h264parse)
249 h264parse->width = 0;
250 h264parse->height = 0;
251 h264parse->fps_num = 0;
252 h264parse->fps_den = 0;
253 h264parse->upstream_par_n = -1;
254 h264parse->upstream_par_d = -1;
255 h264parse->parsed_par_n = 0;
256 h264parse->parsed_par_d = 0;
257 h264parse->parsed_colorimetry.range = GST_VIDEO_COLOR_RANGE_UNKNOWN;
258 h264parse->parsed_colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_UNKNOWN;
259 h264parse->parsed_colorimetry.transfer = GST_VIDEO_TRANSFER_UNKNOWN;
260 h264parse->parsed_colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_UNKNOWN;
262 h264parse->have_pps = FALSE;
263 h264parse->have_sps = FALSE;
265 h264parse->multiview_mode = GST_VIDEO_MULTIVIEW_MODE_NONE;
266 h264parse->multiview_flags = GST_VIDEO_MULTIVIEW_FLAGS_NONE;
267 h264parse->first_in_bundle = TRUE;
269 h264parse->align = GST_H264_PARSE_ALIGN_NONE;
270 h264parse->format = GST_H264_PARSE_FORMAT_NONE;
272 h264parse->transform = FALSE;
273 h264parse->nal_length_size = 4;
274 h264parse->packetized = FALSE;
275 h264parse->push_codec = FALSE;
276 h264parse->first_frame = TRUE;
278 gst_buffer_replace (&h264parse->codec_data, NULL);
279 gst_buffer_replace (&h264parse->codec_data_in, NULL);
281 gst_h264_parse_reset_frame (h264parse);
283 for (i = 0; i < GST_H264_MAX_SPS_COUNT; i++)
284 gst_buffer_replace (&h264parse->sps_nals[i], NULL);
285 for (i = 0; i < GST_H264_MAX_PPS_COUNT; i++)
286 gst_buffer_replace (&h264parse->pps_nals[i], NULL);
288 gst_video_mastering_display_info_init (&h264parse->mastering_display_info);
289 h264parse->mastering_display_info_state = GST_H264_PARSE_SEI_EXPIRED;
291 gst_video_content_light_level_init (&h264parse->content_light_level);
292 h264parse->content_light_level_state = GST_H264_PARSE_SEI_EXPIRED;
296 gst_h264_parse_reset (GstH264Parse * h264parse)
298 h264parse->last_report = GST_CLOCK_TIME_NONE;
300 h264parse->dts = GST_CLOCK_TIME_NONE;
301 h264parse->ts_trn_nb = GST_CLOCK_TIME_NONE;
302 h264parse->do_ts = TRUE;
304 h264parse->pending_key_unit_ts = GST_CLOCK_TIME_NONE;
305 gst_event_replace (&h264parse->force_key_unit_event, NULL);
307 h264parse->discont = FALSE;
308 h264parse->discard_bidirectional = FALSE;
309 h264parse->marker = FALSE;
311 gst_h264_parse_reset_stream_info (h264parse);
315 gst_h264_parse_start (GstBaseParse * parse)
317 GstH264Parse *h264parse = GST_H264_PARSE (parse);
319 GST_DEBUG_OBJECT (parse, "start");
320 gst_h264_parse_reset (h264parse);
322 h264parse->nalparser = gst_h264_nal_parser_new ();
324 h264parse->state = 0;
325 h264parse->dts = GST_CLOCK_TIME_NONE;
326 h264parse->ts_trn_nb = GST_CLOCK_TIME_NONE;
327 h264parse->sei_pic_struct_pres_flag = FALSE;
328 h264parse->sei_pic_struct = 0;
329 h264parse->field_pic_flag = 0;
330 h264parse->aud_needed = TRUE;
331 h264parse->aud_insert = FALSE;
333 gst_base_parse_set_min_frame_size (parse, 4);
339 gst_h264_parse_stop (GstBaseParse * parse)
341 GstH264Parse *h264parse = GST_H264_PARSE (parse);
343 GST_DEBUG_OBJECT (parse, "stop");
344 gst_h264_parse_reset (h264parse);
346 gst_h264_nal_parser_free (h264parse->nalparser);
352 gst_h264_parse_get_string (GstH264Parse * parse, gboolean format, gint code)
356 case GST_H264_PARSE_FORMAT_AVC:
358 case GST_H264_PARSE_FORMAT_BYTE:
359 return "byte-stream";
360 case GST_H264_PARSE_FORMAT_AVC3:
367 case GST_H264_PARSE_ALIGN_NAL:
369 case GST_H264_PARSE_ALIGN_AU:
378 gst_h264_parse_format_from_caps (GstCaps * caps, guint * format, guint * align)
381 *format = GST_H264_PARSE_FORMAT_NONE;
384 *align = GST_H264_PARSE_ALIGN_NONE;
386 g_return_if_fail (gst_caps_is_fixed (caps));
388 GST_DEBUG ("parsing caps: %" GST_PTR_FORMAT, caps);
390 if (caps && gst_caps_get_size (caps) > 0) {
391 GstStructure *s = gst_caps_get_structure (caps, 0);
392 const gchar *str = NULL;
395 if ((str = gst_structure_get_string (s, "stream-format"))) {
396 if (strcmp (str, "avc") == 0)
397 *format = GST_H264_PARSE_FORMAT_AVC;
398 else if (strcmp (str, "byte-stream") == 0)
399 *format = GST_H264_PARSE_FORMAT_BYTE;
400 else if (strcmp (str, "avc3") == 0)
401 *format = GST_H264_PARSE_FORMAT_AVC3;
406 if ((str = gst_structure_get_string (s, "alignment"))) {
407 if (strcmp (str, "au") == 0)
408 *align = GST_H264_PARSE_ALIGN_AU;
409 else if (strcmp (str, "nal") == 0)
410 *align = GST_H264_PARSE_ALIGN_NAL;
416 /* check downstream caps to configure format and alignment */
418 gst_h264_parse_negotiate (GstH264Parse * h264parse, gint in_format,
422 guint format = h264parse->format;
423 guint align = h264parse->align;
425 g_return_if_fail ((in_caps == NULL) || gst_caps_is_fixed (in_caps));
427 caps = gst_pad_get_allowed_caps (GST_BASE_PARSE_SRC_PAD (h264parse));
428 GST_DEBUG_OBJECT (h264parse, "allowed caps: %" GST_PTR_FORMAT, caps);
430 /* concentrate on leading structure, since decodebin parser
431 * capsfilter always includes parser template caps */
433 caps = gst_caps_truncate (caps);
434 GST_DEBUG_OBJECT (h264parse, "negotiating with caps: %" GST_PTR_FORMAT,
438 h264parse->can_passthrough = FALSE;
440 if (in_caps && caps) {
441 if (gst_caps_can_intersect (in_caps, caps)) {
442 GST_DEBUG_OBJECT (h264parse, "downstream accepts upstream caps");
443 gst_h264_parse_format_from_caps (in_caps, &format, &align);
444 gst_caps_unref (caps);
446 h264parse->can_passthrough = TRUE;
450 /* FIXME We could fail the negotiation immediately if caps are empty */
451 if (caps && !gst_caps_is_empty (caps)) {
452 /* fixate to avoid ambiguity with lists when parsing */
453 caps = gst_caps_fixate (caps);
454 gst_h264_parse_format_from_caps (caps, &format, &align);
459 format = GST_H264_PARSE_FORMAT_BYTE;
461 align = GST_H264_PARSE_ALIGN_AU;
463 GST_DEBUG_OBJECT (h264parse, "selected format %s, alignment %s",
464 gst_h264_parse_get_string (h264parse, TRUE, format),
465 gst_h264_parse_get_string (h264parse, FALSE, align));
467 h264parse->format = format;
468 h264parse->align = align;
470 h264parse->transform = in_format != h264parse->format ||
471 align == GST_H264_PARSE_ALIGN_AU;
474 gst_caps_unref (caps);
478 gst_h264_parse_wrap_nal (GstH264Parse * h264parse, guint format, guint8 * data,
482 guint nl = h264parse->nal_length_size;
485 GST_DEBUG_OBJECT (h264parse, "nal length %d", size);
487 buf = gst_buffer_new_allocate (NULL, 4 + size, NULL);
488 if (format == GST_H264_PARSE_FORMAT_AVC
489 || format == GST_H264_PARSE_FORMAT_AVC3) {
490 tmp = GUINT32_TO_BE (size << (32 - 8 * nl));
492 /* HACK: nl should always be 4 here, otherwise this won't work.
493 * There are legit cases where nl in avc stream is 2, but byte-stream
494 * SC is still always 4 bytes. */
496 tmp = GUINT32_TO_BE (1);
499 gst_buffer_fill (buf, 0, &tmp, sizeof (guint32));
500 gst_buffer_fill (buf, nl, data, size);
501 gst_buffer_set_size (buf, size + nl);
507 gst_h264_parser_store_nal (GstH264Parse * h264parse, guint id,
508 GstH264NalUnitType naltype, GstH264NalUnit * nalu)
510 GstBuffer *buf, **store;
511 guint size = nalu->size, store_size;
513 if (naltype == GST_H264_NAL_SPS || naltype == GST_H264_NAL_SUBSET_SPS) {
514 store_size = GST_H264_MAX_SPS_COUNT;
515 store = h264parse->sps_nals;
516 GST_DEBUG_OBJECT (h264parse, "storing sps %u", id);
517 } else if (naltype == GST_H264_NAL_PPS) {
518 store_size = GST_H264_MAX_PPS_COUNT;
519 store = h264parse->pps_nals;
520 GST_DEBUG_OBJECT (h264parse, "storing pps %u", id);
524 if (id >= store_size) {
525 GST_DEBUG_OBJECT (h264parse, "unable to store nal, id out-of-range %d", id);
529 buf = gst_buffer_new_allocate (NULL, size, NULL);
530 gst_buffer_fill (buf, 0, nalu->data + nalu->offset, size);
532 /* Indicate that buffer contain a header needed for decoding */
533 if (naltype == GST_H264_NAL_SPS || naltype == GST_H264_NAL_PPS)
534 GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_HEADER);
537 gst_buffer_unref (store[id]);
542 #ifndef GST_DISABLE_GST_DEBUG
543 static const gchar *nal_names[] = {
560 "Depth Parameter Set",
561 "Reserved", "Reserved",
562 "Slice Aux Unpartitioned",
564 "Slice Depth/3D-AVC Extension"
568 _nal_name (GstH264NalUnitType nal_type)
570 if (nal_type <= GST_H264_NAL_SLICE_DEPTH)
571 return nal_names[nal_type];
577 gst_h264_parse_process_sei_user_data (GstH264Parse * h264parse,
578 GstH264RegisteredUserData * rud)
580 guint16 provider_code;
582 GstVideoParseUtilsField field = GST_VIDEO_PARSE_UTILS_FIELD_1;
584 /* only US country code is currently supported */
585 switch (rud->country_code) {
586 case ITU_T_T35_COUNTRY_CODE_US:
589 GST_LOG_OBJECT (h264parse, "Unsupported country code %d",
594 if (rud->data == NULL || rud->size < 2)
597 gst_byte_reader_init (&br, rud->data, rud->size);
599 provider_code = gst_byte_reader_get_uint16_be_unchecked (&br);
601 if (h264parse->sei_pic_struct ==
602 (guint8) GST_H264_SEI_PIC_STRUCT_BOTTOM_FIELD)
603 field = GST_VIDEO_PARSE_UTILS_FIELD_2;
604 gst_video_parse_user_data ((GstElement *) h264parse, &h264parse->user_data,
605 &br, field, provider_code);
610 gst_h264_parse_process_sei (GstH264Parse * h264parse, GstH264NalUnit * nalu)
612 GstH264SEIMessage sei;
613 GstH264NalParser *nalparser = h264parse->nalparser;
614 GstH264ParserResult pres;
618 pres = gst_h264_parser_parse_sei (nalparser, nalu, &messages);
619 if (pres != GST_H264_PARSER_OK)
620 GST_WARNING_OBJECT (h264parse, "failed to parse one or more SEI message");
622 /* Even if pres != GST_H264_PARSER_OK, some message could have been parsed and
623 * stored in messages.
625 for (i = 0; i < messages->len; i++) {
626 sei = g_array_index (messages, GstH264SEIMessage, i);
627 switch (sei.payloadType) {
628 case GST_H264_SEI_PIC_TIMING:
631 h264parse->sei_pic_struct_pres_flag =
632 sei.payload.pic_timing.pic_struct_present_flag;
633 h264parse->sei_cpb_removal_delay =
634 sei.payload.pic_timing.cpb_removal_delay;
635 if (h264parse->sei_pic_struct_pres_flag) {
636 h264parse->sei_pic_struct = sei.payload.pic_timing.pic_struct;
639 h264parse->num_clock_timestamp = 0;
640 memcpy (&h264parse->pic_timing_sei, &sei.payload.pic_timing,
641 sizeof (GstH264PicTiming));
643 for (j = 0; j < 3; j++) {
644 if (sei.payload.pic_timing.clock_timestamp_flag[j]) {
645 h264parse->num_clock_timestamp++;
649 if (h264parse->sei_pic_struct_pres_flag && h264parse->update_timecode) {
650 /* FIXME: add support multiple messages in a SEI nalu.
651 * Updating only this SEI message and preserving the others
652 * is a bit complicated */
653 if (messages->len == 1) {
654 h264parse->pic_timing_sei_pos = nalu->sc_offset;
655 h264parse->pic_timing_sei_size =
656 nalu->size + (nalu->offset - nalu->sc_offset);
660 GST_LOG_OBJECT (h264parse, "pic timing updated");
663 case GST_H264_SEI_REGISTERED_USER_DATA:
664 gst_h264_parse_process_sei_user_data (h264parse,
665 &sei.payload.registered_user_data);
667 case GST_H264_SEI_BUF_PERIOD:
668 if (h264parse->ts_trn_nb == GST_CLOCK_TIME_NONE ||
669 h264parse->dts == GST_CLOCK_TIME_NONE)
670 h264parse->ts_trn_nb = 0;
672 h264parse->ts_trn_nb = h264parse->dts;
674 GST_LOG_OBJECT (h264parse,
675 "new buffering period; ts_trn_nb updated: %" GST_TIME_FORMAT,
676 GST_TIME_ARGS (h264parse->ts_trn_nb));
679 /* Additional messages that are not innerly useful to the
680 * element but for debugging purposes */
681 case GST_H264_SEI_RECOVERY_POINT:
682 GST_LOG_OBJECT (h264parse, "recovery point found: %u %u %u %u",
683 sei.payload.recovery_point.recovery_frame_cnt,
684 sei.payload.recovery_point.exact_match_flag,
685 sei.payload.recovery_point.broken_link_flag,
686 sei.payload.recovery_point.changing_slice_group_idc);
687 h264parse->keyframe = TRUE;
690 /* Additional messages that are not innerly useful to the
691 * element but for debugging purposes */
692 case GST_H264_SEI_STEREO_VIDEO_INFO:{
693 GstVideoMultiviewMode mview_mode = GST_VIDEO_MULTIVIEW_MODE_NONE;
694 GstVideoMultiviewFlags mview_flags = GST_VIDEO_MULTIVIEW_FLAGS_NONE;
696 GST_LOG_OBJECT (h264parse, "Stereo video information %u %u %u %u %u %u",
697 sei.payload.stereo_video_info.field_views_flag,
698 sei.payload.stereo_video_info.top_field_is_left_view_flag,
699 sei.payload.stereo_video_info.current_frame_is_left_view_flag,
700 sei.payload.stereo_video_info.next_frame_is_second_view_flag,
701 sei.payload.stereo_video_info.left_view_self_contained_flag,
702 sei.payload.stereo_video_info.right_view_self_contained_flag);
704 if (sei.payload.stereo_video_info.field_views_flag) {
705 mview_mode = GST_VIDEO_MULTIVIEW_MODE_ROW_INTERLEAVED;
706 if (!sei.payload.stereo_video_info.top_field_is_left_view_flag)
707 mview_flags |= GST_VIDEO_MULTIVIEW_FLAGS_RIGHT_VIEW_FIRST;
709 mview_mode = GST_VIDEO_MULTIVIEW_MODE_FRAME_BY_FRAME;
710 if (sei.payload.stereo_video_info.next_frame_is_second_view_flag) {
711 /* Mark current frame as first in bundle */
712 h264parse->first_in_bundle = TRUE;
713 if (!sei.payload.stereo_video_info.current_frame_is_left_view_flag)
714 mview_flags |= GST_VIDEO_MULTIVIEW_FLAGS_RIGHT_VIEW_FIRST;
717 if (mview_mode != h264parse->multiview_mode ||
718 mview_flags != h264parse->multiview_flags) {
719 h264parse->multiview_mode = mview_mode;
720 h264parse->multiview_flags = mview_flags;
721 /* output caps need to be changed */
722 gst_h264_parse_update_src_caps (h264parse, NULL);
726 case GST_H264_SEI_FRAME_PACKING:{
727 GstVideoMultiviewMode mview_mode = GST_VIDEO_MULTIVIEW_MODE_NONE;
728 GstVideoMultiviewFlags mview_flags = GST_VIDEO_MULTIVIEW_FLAGS_NONE;
730 GST_LOG_OBJECT (h264parse,
731 "frame packing arrangement message: id %u cancelled %u "
732 "type %u quincunx %u content_interpretation %d flip %u "
733 "right_first %u field_views %u is_frame0 %u "
734 "frame0_self_contained %u frame1_self_contained %u "
735 "frame0_grid (%u, %u) frame1_grid (%u, %u) "
736 "repetition_period %" G_GUINT16_FORMAT,
737 sei.payload.frame_packing.frame_packing_id,
738 sei.payload.frame_packing.frame_packing_cancel_flag,
739 sei.payload.frame_packing.frame_packing_type,
740 sei.payload.frame_packing.quincunx_sampling_flag,
741 sei.payload.frame_packing.content_interpretation_type,
742 sei.payload.frame_packing.spatial_flipping_flag,
743 sei.payload.frame_packing.frame0_flipped_flag,
744 sei.payload.frame_packing.field_views_flag,
745 sei.payload.frame_packing.current_frame_is_frame0_flag,
746 sei.payload.frame_packing.frame0_self_contained_flag,
747 sei.payload.frame_packing.frame1_self_contained_flag,
748 sei.payload.frame_packing.frame0_grid_position_x,
749 sei.payload.frame_packing.frame0_grid_position_y,
750 sei.payload.frame_packing.frame1_grid_position_x,
751 sei.payload.frame_packing.frame1_grid_position_y,
752 sei.payload.frame_packing.frame_packing_repetition_period);
754 /* Only IDs from 0->255 and 512->2^31-1 are valid. Ignore others */
755 if ((sei.payload.frame_packing.frame_packing_id >= 256 &&
756 sei.payload.frame_packing.frame_packing_id < 512) ||
757 (sei.payload.frame_packing.frame_packing_id >= (1U << 31)))
760 if (!sei.payload.frame_packing.frame_packing_cancel_flag) {
761 /* Cancel flag sets things back to no-info */
763 if (sei.payload.frame_packing.content_interpretation_type == 2)
764 mview_flags |= GST_VIDEO_MULTIVIEW_FLAGS_RIGHT_VIEW_FIRST;
766 switch (sei.payload.frame_packing.frame_packing_type) {
768 mview_mode = GST_VIDEO_MULTIVIEW_MODE_CHECKERBOARD;
771 mview_mode = GST_VIDEO_MULTIVIEW_MODE_COLUMN_INTERLEAVED;
774 mview_mode = GST_VIDEO_MULTIVIEW_MODE_ROW_INTERLEAVED;
777 if (sei.payload.frame_packing.quincunx_sampling_flag)
778 mview_mode = GST_VIDEO_MULTIVIEW_MODE_SIDE_BY_SIDE_QUINCUNX;
780 mview_mode = GST_VIDEO_MULTIVIEW_MODE_SIDE_BY_SIDE;
781 if (sei.payload.frame_packing.spatial_flipping_flag) {
782 /* One of the views is flopped. */
783 if (sei.payload.frame_packing.frame0_flipped_flag !=
785 GST_VIDEO_MULTIVIEW_FLAGS_RIGHT_VIEW_FIRST))
786 /* the left view is flopped */
787 mview_flags |= GST_VIDEO_MULTIVIEW_FLAGS_LEFT_FLOPPED;
789 mview_flags |= GST_VIDEO_MULTIVIEW_FLAGS_RIGHT_FLOPPED;
793 mview_mode = GST_VIDEO_MULTIVIEW_MODE_TOP_BOTTOM;
794 if (sei.payload.frame_packing.spatial_flipping_flag) {
795 /* One of the views is flipped, */
796 if (sei.payload.frame_packing.frame0_flipped_flag !=
798 GST_VIDEO_MULTIVIEW_FLAGS_RIGHT_VIEW_FIRST))
799 /* the left view is flipped */
800 mview_flags |= GST_VIDEO_MULTIVIEW_FLAGS_LEFT_FLIPPED;
802 mview_flags |= GST_VIDEO_MULTIVIEW_FLAGS_RIGHT_FLIPPED;
806 if (sei.payload.frame_packing.content_interpretation_type == 0)
807 mview_mode = GST_VIDEO_MULTIVIEW_MODE_MULTIVIEW_FRAME_BY_FRAME;
809 mview_mode = GST_VIDEO_MULTIVIEW_MODE_FRAME_BY_FRAME;
812 GST_DEBUG_OBJECT (h264parse, "Invalid frame packing type %u",
813 sei.payload.frame_packing.frame_packing_type);
818 if (mview_mode != h264parse->multiview_mode ||
819 mview_flags != h264parse->multiview_flags) {
820 h264parse->multiview_mode = mview_mode;
821 h264parse->multiview_flags = mview_flags;
822 /* output caps need to be changed */
823 gst_h264_parse_update_src_caps (h264parse, NULL);
827 case GST_H264_SEI_MASTERING_DISPLAY_COLOUR_VOLUME:
829 /* Precision defined by spec.
830 * See D.2.29 Mastering display colour volume SEI message semantics */
831 GstVideoMasteringDisplayInfo minfo;
834 /* GstVideoMasteringDisplayInfo::display_primaries is rgb order but
835 * AVC uses gbr order.
836 * See spec D.2.29 display_primaries_x and display_primaries_y
838 for (j = 0, k = 2; j < G_N_ELEMENTS (minfo.display_primaries); j++, k++) {
839 minfo.display_primaries[j].x =
841 mastering_display_colour_volume.display_primaries_x[k % 3];
842 minfo.display_primaries[j].y =
844 mastering_display_colour_volume.display_primaries_y[k % 3];
847 minfo.white_point.x =
848 sei.payload.mastering_display_colour_volume.white_point_x;
849 minfo.white_point.y =
850 sei.payload.mastering_display_colour_volume.white_point_y;
851 minfo.max_display_mastering_luminance =
852 sei.payload.mastering_display_colour_volume.
853 max_display_mastering_luminance;
854 minfo.min_display_mastering_luminance =
855 sei.payload.mastering_display_colour_volume.
856 min_display_mastering_luminance;
858 GST_LOG_OBJECT (h264parse, "mastering display info found: "
864 "min_luminance(%u) ",
865 minfo.display_primaries[0].x, minfo.display_primaries[0].y,
866 minfo.display_primaries[1].x, minfo.display_primaries[1].y,
867 minfo.display_primaries[2].x, minfo.display_primaries[2].y,
868 minfo.white_point.x, minfo.white_point.y,
869 minfo.max_display_mastering_luminance,
870 minfo.min_display_mastering_luminance);
872 if (h264parse->mastering_display_info_state ==
873 GST_H264_PARSE_SEI_EXPIRED) {
874 h264parse->update_caps = TRUE;
875 } else if (!gst_video_mastering_display_info_is_equal
876 (&h264parse->mastering_display_info, &minfo)) {
877 h264parse->update_caps = TRUE;
880 h264parse->mastering_display_info_state = GST_H264_PARSE_SEI_PARSED;
881 h264parse->mastering_display_info = minfo;
885 case GST_H264_SEI_CONTENT_LIGHT_LEVEL:
887 GstVideoContentLightLevel cll;
889 cll.max_content_light_level =
890 sei.payload.content_light_level.max_content_light_level;
891 cll.max_frame_average_light_level =
892 sei.payload.content_light_level.max_pic_average_light_level;
894 GST_LOG_OBJECT (h264parse, "content light level found: "
895 "maxCLL:(%u), maxFALL:(%u)", cll.max_content_light_level,
896 cll.max_frame_average_light_level);
898 if (h264parse->content_light_level_state == GST_H264_PARSE_SEI_EXPIRED) {
899 h264parse->update_caps = TRUE;
900 } else if (cll.max_content_light_level !=
901 h264parse->content_light_level.max_content_light_level ||
902 cll.max_frame_average_light_level !=
903 h264parse->content_light_level.max_frame_average_light_level) {
904 h264parse->update_caps = TRUE;
907 h264parse->content_light_level_state = GST_H264_PARSE_SEI_PARSED;
908 h264parse->content_light_level = cll;
913 gint payload_type = sei.payloadType;
915 if (payload_type == GST_H264_SEI_UNHANDLED_PAYLOAD) {
916 GstH264SEIUnhandledPayload *unhandled =
917 &sei.payload.unhandled_payload;
918 payload_type = unhandled->payloadType;
921 GST_LOG_OBJECT (h264parse, "Unsupported payload type %d", payload_type);
926 g_array_free (messages, TRUE);
929 /* caller guarantees 2 bytes of nal payload */
931 gst_h264_parse_process_nal (GstH264Parse * h264parse, GstH264NalUnit * nalu)
934 GstH264PPS pps = { 0, };
935 GstH264SPS sps = { 0, };
936 GstH264NalParser *nalparser = h264parse->nalparser;
937 GstH264ParserResult pres;
938 GstH264SliceHdr slice;
940 /* nothing to do for broken input */
941 if (G_UNLIKELY (nalu->size < 2)) {
942 GST_DEBUG_OBJECT (h264parse, "not processing nal size %u", nalu->size);
946 /* we have a peek as well */
947 nal_type = nalu->type;
949 GST_DEBUG_OBJECT (h264parse, "processing nal of type %u %s, size %u",
950 nal_type, _nal_name (nal_type), nalu->size);
953 case GST_H264_NAL_SUBSET_SPS:
954 if (!GST_H264_PARSE_STATE_VALID (h264parse, GST_H264_PARSE_STATE_GOT_SPS))
956 pres = gst_h264_parser_parse_subset_sps (nalparser, nalu, &sps);
959 case GST_H264_NAL_SPS:
960 /* reset state, everything else is obsolete */
961 h264parse->state &= GST_H264_PARSE_STATE_GOT_PPS;
962 pres = gst_h264_parser_parse_sps (nalparser, nalu, &sps);
965 /* arranged for a fallback sps.id, so use that one and only warn */
966 if (pres != GST_H264_PARSER_OK) {
967 GST_WARNING_OBJECT (h264parse, "failed to parse SPS:");
968 h264parse->state |= GST_H264_PARSE_STATE_GOT_SPS;
969 h264parse->header = TRUE;
973 GST_DEBUG_OBJECT (h264parse, "triggering src caps check");
974 h264parse->update_caps = TRUE;
975 h264parse->have_sps = TRUE;
976 h264parse->have_sps_in_frame = TRUE;
977 if (h264parse->push_codec && h264parse->have_pps) {
978 /* SPS and PPS found in stream before the first pre_push_frame, no need
979 * to forcibly push at start */
980 GST_INFO_OBJECT (h264parse, "have SPS/PPS in stream");
981 h264parse->push_codec = FALSE;
982 h264parse->have_sps = FALSE;
983 h264parse->have_pps = FALSE;
986 gst_h264_parser_store_nal (h264parse, sps.id, nal_type, nalu);
987 gst_h264_sps_clear (&sps);
988 h264parse->state |= GST_H264_PARSE_STATE_GOT_SPS;
989 h264parse->header = TRUE;
991 case GST_H264_NAL_PPS:
992 /* expected state: got-sps */
993 h264parse->state &= GST_H264_PARSE_STATE_GOT_SPS;
994 if (!GST_H264_PARSE_STATE_VALID (h264parse, GST_H264_PARSE_STATE_GOT_SPS))
997 pres = gst_h264_parser_parse_pps (nalparser, nalu, &pps);
998 /* arranged for a fallback pps.id, so use that one and only warn */
999 if (pres != GST_H264_PARSER_OK) {
1000 GST_WARNING_OBJECT (h264parse, "failed to parse PPS:");
1001 if (pres != GST_H264_PARSER_BROKEN_LINK)
1005 /* parameters might have changed, force caps check */
1006 if (!h264parse->have_pps) {
1007 GST_DEBUG_OBJECT (h264parse, "triggering src caps check");
1008 h264parse->update_caps = TRUE;
1010 h264parse->have_pps = TRUE;
1011 h264parse->have_pps_in_frame = TRUE;
1012 if (h264parse->push_codec && h264parse->have_sps) {
1013 /* SPS and PPS found in stream before the first pre_push_frame, no need
1014 * to forcibly push at start */
1015 GST_INFO_OBJECT (h264parse, "have SPS/PPS in stream");
1016 h264parse->push_codec = FALSE;
1017 h264parse->have_sps = FALSE;
1018 h264parse->have_pps = FALSE;
1021 gst_h264_parser_store_nal (h264parse, pps.id, nal_type, nalu);
1022 gst_h264_pps_clear (&pps);
1023 h264parse->state |= GST_H264_PARSE_STATE_GOT_PPS;
1024 h264parse->header = TRUE;
1026 case GST_H264_NAL_SEI:
1027 /* expected state: got-sps */
1028 if (!GST_H264_PARSE_STATE_VALID (h264parse, GST_H264_PARSE_STATE_GOT_SPS))
1031 h264parse->header = TRUE;
1032 gst_h264_parse_process_sei (h264parse, nalu);
1034 if (h264parse->sei_pos == -1) {
1035 if (h264parse->transform)
1036 h264parse->sei_pos = gst_adapter_available (h264parse->frame_out);
1038 h264parse->sei_pos = nalu->sc_offset;
1039 GST_DEBUG_OBJECT (h264parse, "marking SEI in frame at offset %d",
1040 h264parse->sei_pos);
1044 case GST_H264_NAL_SLICE:
1045 case GST_H264_NAL_SLICE_DPA:
1046 case GST_H264_NAL_SLICE_DPB:
1047 case GST_H264_NAL_SLICE_DPC:
1048 case GST_H264_NAL_SLICE_IDR:
1049 case GST_H264_NAL_SLICE_EXT:
1050 /* expected state: got-sps|got-pps (valid picture headers) */
1051 h264parse->state &= GST_H264_PARSE_STATE_VALID_PICTURE_HEADERS;
1052 if (!GST_H264_PARSE_STATE_VALID (h264parse,
1053 GST_H264_PARSE_STATE_VALID_PICTURE_HEADERS))
1056 /* This is similar to the GOT_SLICE state, but is only reset when the
1057 * AU is complete. This is used to keep track of AU */
1058 h264parse->picture_start = TRUE;
1060 /* don't need to parse the whole slice (header) here */
1061 if (*(nalu->data + nalu->offset + nalu->header_bytes) & 0x80) {
1062 /* means first_mb_in_slice == 0 */
1063 /* real frame data */
1064 GST_DEBUG_OBJECT (h264parse, "first_mb_in_slice = 0");
1065 h264parse->frame_start = TRUE;
1067 GST_DEBUG_OBJECT (h264parse, "frame start: %i", h264parse->frame_start);
1068 if (nal_type == GST_H264_NAL_SLICE_EXT && !GST_H264_IS_MVC_NALU (nalu))
1071 pres = gst_h264_parser_parse_slice_hdr (nalparser, nalu, &slice,
1073 GST_DEBUG_OBJECT (h264parse,
1074 "parse result %d, first MB: %u, slice type: %u",
1075 pres, slice.first_mb_in_slice, slice.type);
1076 if (pres == GST_H264_PARSER_OK) {
1077 if (GST_H264_IS_I_SLICE (&slice) || GST_H264_IS_SI_SLICE (&slice))
1078 h264parse->keyframe = TRUE;
1079 else if (GST_H264_IS_P_SLICE (&slice)
1080 || GST_H264_IS_SP_SLICE (&slice))
1081 h264parse->predicted = TRUE;
1082 else if (GST_H264_IS_B_SLICE (&slice))
1083 h264parse->bidirectional = TRUE;
1085 h264parse->state |= GST_H264_PARSE_STATE_GOT_SLICE;
1086 h264parse->field_pic_flag = slice.field_pic_flag;
1089 if (G_LIKELY (nal_type != GST_H264_NAL_SLICE_IDR &&
1090 !h264parse->push_codec))
1093 /* if we need to sneak codec NALs into the stream,
1094 * this is a good place, so fake it as IDR
1095 * (which should be at start anyway) */
1096 /* mark where config needs to go if interval expired */
1097 /* mind replacement buffer if applicable */
1098 if (h264parse->idr_pos == -1) {
1099 if (h264parse->transform)
1100 h264parse->idr_pos = gst_adapter_available (h264parse->frame_out);
1102 h264parse->idr_pos = nalu->sc_offset;
1103 GST_DEBUG_OBJECT (h264parse, "marking IDR in frame at offset %d",
1104 h264parse->idr_pos);
1106 /* if SEI precedes (faked) IDR, then we have to insert config there */
1107 if (h264parse->sei_pos >= 0 && h264parse->idr_pos > h264parse->sei_pos) {
1108 h264parse->idr_pos = h264parse->sei_pos;
1109 GST_DEBUG_OBJECT (h264parse, "moved IDR mark to SEI position %d",
1110 h264parse->idr_pos);
1112 /* Reset state only on first IDR slice of CVS D.2.29 */
1113 if (slice.first_mb_in_slice == 0) {
1114 if (h264parse->mastering_display_info_state ==
1115 GST_H264_PARSE_SEI_PARSED)
1116 h264parse->mastering_display_info_state = GST_H264_PARSE_SEI_ACTIVE;
1117 else if (h264parse->mastering_display_info_state ==
1118 GST_H264_PARSE_SEI_ACTIVE)
1119 h264parse->mastering_display_info_state = GST_H264_PARSE_SEI_EXPIRED;
1121 if (h264parse->content_light_level_state == GST_H264_PARSE_SEI_PARSED)
1122 h264parse->content_light_level_state = GST_H264_PARSE_SEI_ACTIVE;
1123 else if (h264parse->content_light_level_state ==
1124 GST_H264_PARSE_SEI_ACTIVE)
1125 h264parse->content_light_level_state = GST_H264_PARSE_SEI_EXPIRED;
1128 case GST_H264_NAL_AU_DELIMITER:
1129 /* Just accumulate AU Delimiter, whether it's before SPS or not */
1130 pres = gst_h264_parser_parse_nal (nalparser, nalu);
1131 if (pres != GST_H264_PARSER_OK)
1133 h264parse->aud_needed = FALSE;
1134 h264parse->have_aud_in_frame = TRUE;
1137 /* drop anything before the initial SPS */
1138 if (!GST_H264_PARSE_STATE_VALID (h264parse, GST_H264_PARSE_STATE_GOT_SPS))
1141 pres = gst_h264_parser_parse_nal (nalparser, nalu);
1142 if (pres != GST_H264_PARSER_OK)
1147 /* if AVC output needed, collect properly prefixed nal in adapter,
1148 * and use that to replace outgoing buffer data later on */
1149 if (h264parse->transform) {
1152 GST_LOG_OBJECT (h264parse, "collecting NAL in AVC frame");
1153 buf = gst_h264_parse_wrap_nal (h264parse, h264parse->format,
1154 nalu->data + nalu->offset, nalu->size);
1155 gst_adapter_push (h264parse->frame_out, buf);
1160 /* caller guarantees at least 2 bytes of nal payload for each nal
1161 * returns TRUE if next_nal indicates that nal terminates an AU */
1162 static inline gboolean
1163 gst_h264_parse_collect_nal (GstH264Parse * h264parse, GstH264NalUnit * nalu)
1165 GstH264NalUnitType nal_type = nalu->type;
1168 /* determine if AU complete */
1169 GST_LOG_OBJECT (h264parse, "next nal type: %d %s (picture started %i)",
1170 nal_type, _nal_name (nal_type), h264parse->picture_start);
1172 /* consider a coded slices (IDR or not) to start a picture,
1173 * (so ending the previous one) if first_mb_in_slice == 0
1174 * (non-0 is part of previous one) */
1175 /* NOTE this is not entirely according to Access Unit specs in 7.4.1.2.4,
1176 * but in practice it works in sane cases, needs not much parsing,
1177 * and also works with broken frame_num in NAL
1178 * (where spec-wise would fail) */
1179 complete = h264parse->picture_start && ((nal_type >= GST_H264_NAL_SEI &&
1180 nal_type <= GST_H264_NAL_AU_DELIMITER) ||
1181 (nal_type >= 14 && nal_type <= 18));
1183 /* first_mb_in_slice == 0 considered start of frame */
1184 if (nalu->size > nalu->header_bytes)
1185 complete |= h264parse->picture_start && (nal_type == GST_H264_NAL_SLICE
1186 || nal_type == GST_H264_NAL_SLICE_DPA
1187 || nal_type == GST_H264_NAL_SLICE_IDR) &&
1188 (nalu->data[nalu->offset + nalu->header_bytes] & 0x80);
1190 GST_LOG_OBJECT (h264parse, "au complete: %d", complete);
1193 h264parse->picture_start = FALSE;
1198 static guint8 au_delim[6] = {
1199 0x00, 0x00, 0x00, 0x01, /* nal prefix */
1200 0x09, /* nal unit type = access unit delimiter */
1201 0xf0 /* allow any slice type */
1204 static GstFlowReturn
1205 gst_h264_parse_handle_frame_packetized (GstBaseParse * parse,
1206 GstBaseParseFrame * frame)
1208 GstH264Parse *h264parse = GST_H264_PARSE (parse);
1209 GstBuffer *buffer = frame->buffer;
1210 GstFlowReturn ret = GST_FLOW_OK;
1211 GstH264ParserResult parse_res;
1212 GstH264NalUnit nalu;
1213 const guint nl = h264parse->nal_length_size;
1217 if (nl < 1 || nl > 4) {
1218 GST_DEBUG_OBJECT (h264parse, "insufficient data to split input");
1219 return GST_FLOW_NOT_NEGOTIATED;
1222 /* need to save buffer from invalidation upon _finish_frame */
1223 if (h264parse->split_packetized)
1224 buffer = gst_buffer_copy (frame->buffer);
1226 gst_buffer_map (buffer, &map, GST_MAP_READ);
1230 GST_LOG_OBJECT (h264parse,
1231 "processing packet buffer of size %" G_GSIZE_FORMAT, map.size);
1233 parse_res = gst_h264_parser_identify_nalu_avc (h264parse->nalparser,
1234 map.data, 0, map.size, nl, &nalu);
1236 /* Always enable AUD insertion per frame here. The pre_push function
1237 * will only add it once, and will only add it for byte-stream output
1238 * if AUD doesn't exist in the current frame */
1239 h264parse->aud_insert = TRUE;
1241 while (parse_res == GST_H264_PARSER_OK) {
1242 GST_DEBUG_OBJECT (h264parse, "AVC nal offset %d", nalu.offset + nalu.size);
1244 /* either way, have a look at it */
1245 gst_h264_parse_process_nal (h264parse, &nalu);
1247 /* dispatch per NALU if needed */
1248 if (h264parse->split_packetized) {
1249 GstBaseParseFrame tmp_frame;
1251 gst_base_parse_frame_init (&tmp_frame);
1252 tmp_frame.flags |= frame->flags;
1253 tmp_frame.offset = frame->offset;
1254 tmp_frame.overhead = frame->overhead;
1255 tmp_frame.buffer = gst_buffer_copy_region (buffer, GST_BUFFER_COPY_ALL,
1256 nalu.offset, nalu.size);
1258 /* Set marker on last packet */
1259 if (nl + nalu.size == left) {
1260 if (GST_BUFFER_FLAG_IS_SET (frame->buffer, GST_BUFFER_FLAG_MARKER))
1261 h264parse->marker = TRUE;
1264 /* note we don't need to come up with a sub-buffer, since
1265 * subsequent code only considers input buffer's metadata.
1266 * Real data is either taken from input by baseclass or
1267 * a replacement output buffer is provided anyway. */
1268 gst_h264_parse_parse_frame (parse, &tmp_frame);
1269 ret = gst_base_parse_finish_frame (parse, &tmp_frame, nl + nalu.size);
1270 left -= nl + nalu.size;
1273 parse_res = gst_h264_parser_identify_nalu_avc (h264parse->nalparser,
1274 map.data, nalu.offset + nalu.size, map.size, nl, &nalu);
1277 gst_buffer_unmap (buffer, &map);
1279 if (!h264parse->split_packetized) {
1280 h264parse->marker = TRUE;
1281 gst_h264_parse_parse_frame (parse, frame);
1282 ret = gst_base_parse_finish_frame (parse, frame, map.size);
1284 gst_buffer_unref (buffer);
1285 if (G_UNLIKELY (left)) {
1286 /* should not be happening for nice AVC */
1287 GST_WARNING_OBJECT (parse, "skipping leftover AVC data %d", left);
1288 frame->flags |= GST_BASE_PARSE_FRAME_FLAG_DROP;
1289 ret = gst_base_parse_finish_frame (parse, frame, map.size);
1293 if (parse_res == GST_H264_PARSER_NO_NAL_END ||
1294 parse_res == GST_H264_PARSER_BROKEN_DATA) {
1296 if (h264parse->split_packetized) {
1297 GST_ELEMENT_ERROR (h264parse, STREAM, FAILED, (NULL),
1298 ("invalid AVC input data"));
1300 return GST_FLOW_ERROR;
1302 /* do not meddle to much in this case */
1303 GST_DEBUG_OBJECT (h264parse, "parsing packet failed");
1310 static GstFlowReturn
1311 gst_h264_parse_handle_frame (GstBaseParse * parse,
1312 GstBaseParseFrame * frame, gint * skipsize)
1314 GstH264Parse *h264parse = GST_H264_PARSE (parse);
1315 GstBuffer *buffer = frame->buffer;
1319 gint current_off = 0;
1320 gboolean drain, nonext;
1321 GstH264NalParser *nalparser = h264parse->nalparser;
1322 GstH264NalUnit nalu;
1323 GstH264ParserResult pres;
1325 #ifdef TIZEN_FEATURE_H264PARSE_MODIFICATION
1326 GstH264NalUnitType last_nal_type = GST_H264_NAL_UNKNOWN;
1329 if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (frame->buffer,
1330 GST_BUFFER_FLAG_DISCONT))) {
1331 h264parse->discont = TRUE;
1334 /* delegate in packetized case, no skipping should be needed */
1335 if (h264parse->packetized)
1336 return gst_h264_parse_handle_frame_packetized (parse, frame);
1338 gst_buffer_map (buffer, &map, GST_MAP_READ);
1342 /* expect at least 3 bytes start_code, and 1 bytes NALU header.
1343 * the length of the NALU payload can be zero.
1344 * (e.g. EOS/EOB placed at the end of an AU.) */
1345 if (G_UNLIKELY (size < 4)) {
1346 gst_buffer_unmap (buffer, &map);
1351 /* need to configure aggregation */
1352 if (G_UNLIKELY (h264parse->format == GST_H264_PARSE_FORMAT_NONE))
1353 gst_h264_parse_negotiate (h264parse, GST_H264_PARSE_FORMAT_BYTE, NULL);
1355 /* avoid stale cached parsing state */
1356 if (frame->flags & GST_BASE_PARSE_FRAME_FLAG_NEW_FRAME) {
1357 GST_LOG_OBJECT (h264parse, "parsing new frame");
1358 gst_h264_parse_reset_frame (h264parse);
1360 GST_LOG_OBJECT (h264parse, "resuming frame parsing");
1363 /* Always consume the entire input buffer when in_align == ALIGN_AU */
1364 drain = GST_BASE_PARSE_DRAINING (parse)
1365 || h264parse->in_align == GST_H264_PARSE_ALIGN_AU;
1368 current_off = h264parse->current_off;
1369 if (current_off < 0)
1372 /* The parser is being drain, but no new data was added, just prentend this
1374 if (drain && current_off == size) {
1375 GST_DEBUG_OBJECT (h264parse, "draining with no new data");
1377 nalu.offset = current_off;
1381 g_assert (current_off < size);
1382 GST_DEBUG_OBJECT (h264parse, "last parse position %d", current_off);
1384 /* check for initial skip */
1385 if (h264parse->current_off == -1) {
1387 gst_h264_parser_identify_nalu_unchecked (nalparser, data, current_off,
1390 case GST_H264_PARSER_OK:
1391 if (nalu.sc_offset > 0) {
1392 *skipsize = nalu.sc_offset;
1396 case GST_H264_PARSER_NO_NAL:
1397 /* Start code may have up to 4 bytes */
1398 *skipsize = size - 4;
1402 /* should not really occur either */
1403 GST_ELEMENT_ERROR (h264parse, STREAM, FORMAT,
1404 ("Error parsing H.264 stream"), ("Invalid H.264 stream"));
1405 goto invalid_stream;
1408 /* Ensure we use the TS of the first NAL. This avoids broken timestamp in
1409 * the case of a miss-placed filler byte. */
1410 gst_base_parse_set_ts_at_offset (parse, nalu.offset);
1415 gst_h264_parser_identify_nalu (nalparser, data, current_off, size,
1419 case GST_H264_PARSER_OK:
1420 GST_DEBUG_OBJECT (h264parse, "complete nal (offset, size): (%u, %u) ",
1421 nalu.offset, nalu.size);
1422 #ifdef TIZEN_FEATURE_H264PARSE_MODIFICATION
1423 last_nal_type = nalu.type;
1426 case GST_H264_PARSER_NO_NAL_END:
1427 /* In NAL alignment, assume the NAL is complete */
1428 if (h264parse->in_align == GST_H264_PARSE_ALIGN_NAL ||
1429 h264parse->in_align == GST_H264_PARSE_ALIGN_AU) {
1431 nalu.size = size - nalu.offset;
1434 GST_DEBUG_OBJECT (h264parse, "not a complete nal found at offset %u",
1436 /* if draining, accept it as complete nal */
1439 nalu.size = size - nalu.offset;
1440 GST_DEBUG_OBJECT (h264parse, "draining, accepting with size %u",
1442 /* if it's not too short at least */
1447 /* otherwise need more */
1449 case GST_H264_PARSER_BROKEN_LINK:
1450 GST_ELEMENT_ERROR (h264parse, STREAM, FORMAT,
1451 ("Error parsing H.264 stream"),
1452 ("The link to structure needed for the parsing couldn't be found"));
1453 goto invalid_stream;
1454 case GST_H264_PARSER_ERROR:
1455 /* should not really occur either */
1456 GST_ELEMENT_ERROR (h264parse, STREAM, FORMAT,
1457 ("Error parsing H.264 stream"), ("Invalid H.264 stream"));
1458 goto invalid_stream;
1459 case GST_H264_PARSER_NO_NAL:
1460 #ifdef TIZEN_FEATURE_H264PARSE_MODIFICATION
1461 if (last_nal_type == GST_H264_NAL_SEQ_END) {
1462 GST_WARNING_OBJECT (h264parse, "No H.264 NAL unit found, but last "
1463 "nal type is SEQ_END, So push remain buffer");
1464 gst_buffer_unmap (buffer, &map);
1465 gst_h264_parse_parse_frame (parse, frame);
1466 return gst_base_parse_finish_frame (parse, frame, size);
1469 GST_ELEMENT_ERROR (h264parse, STREAM, FORMAT,
1470 ("Error parsing H.264 stream"), ("No H.264 NAL unit found"));
1471 goto invalid_stream;
1472 case GST_H264_PARSER_BROKEN_DATA:
1473 GST_WARNING_OBJECT (h264parse, "input stream is corrupt; "
1474 "it contains a NAL unit of length %u", nalu.size);
1476 /* broken nal at start -> arrange to skip it,
1477 * otherwise have it terminate current au
1478 * (and so it will be skipped on next frame round) */
1479 GST_ELEMENT_WARNING (h264parse, STREAM, DECODE,
1480 (NULL), ("Broken bit stream"));
1481 if (current_off == 0) {
1482 GST_DEBUG_OBJECT (h264parse, "skipping broken nal");
1483 *skipsize = nalu.offset;
1486 GST_DEBUG_OBJECT (h264parse, "terminating au");
1488 nalu.offset = nalu.sc_offset;
1493 g_assert_not_reached ();
1497 GST_DEBUG_OBJECT (h264parse, "%p complete nal found. Off: %u, Size: %u",
1498 data, nalu.offset, nalu.size);
1500 if (gst_h264_parse_collect_nal (h264parse, &nalu)) {
1501 h264parse->aud_needed = TRUE;
1502 /* complete current frame, if it exist */
1503 if (current_off > 0) {
1505 nalu.offset = nalu.sc_offset;
1506 h264parse->marker = TRUE;
1511 if (!gst_h264_parse_process_nal (h264parse, &nalu)) {
1512 GST_WARNING_OBJECT (h264parse,
1513 "broken/invalid nal Type: %d %s, Size: %u will be dropped",
1514 nalu.type, _nal_name (nalu.type), nalu.size);
1515 *skipsize = nalu.size;
1519 /* Make sure the next buffer will contain an AUD */
1520 if (h264parse->aud_needed) {
1521 h264parse->aud_insert = TRUE;
1522 h264parse->aud_needed = FALSE;
1525 /* Do not push immediately if we don't have all headers. This ensure that
1526 * our caps are complete, avoiding a renegotiation */
1527 if (h264parse->align == GST_H264_PARSE_ALIGN_NAL &&
1528 !GST_H264_PARSE_STATE_VALID (h264parse,
1529 GST_H264_PARSE_STATE_VALID_PICTURE_HEADERS))
1530 frame->flags |= GST_BASE_PARSE_FRAME_FLAG_QUEUE;
1532 /* if no next nal, we reached the end of this buffer */
1534 /* If there is a marker flag, or input is AU, we know this is complete */
1535 if (GST_BUFFER_FLAG_IS_SET (frame->buffer, GST_BUFFER_FLAG_MARKER) ||
1536 h264parse->in_align == GST_H264_PARSE_ALIGN_AU) {
1537 h264parse->marker = TRUE;
1541 /* or if we are draining */
1542 if (drain || h264parse->align == GST_H264_PARSE_ALIGN_NAL)
1545 current_off = nalu.offset + nalu.size;
1549 /* If the output is NAL, we are done */
1550 if (h264parse->align == GST_H264_PARSE_ALIGN_NAL)
1553 GST_DEBUG_OBJECT (h264parse, "Looking for more");
1554 current_off = nalu.offset + nalu.size;
1556 /* expect at least 3 bytes start_code, and 1 bytes NALU header.
1557 * the length of the NALU payload can be zero.
1558 * (e.g. EOS/EOB placed at the end of an AU.) */
1559 if (size - current_off < 4) {
1560 /* Finish the frame if there is no more data in the stream */
1569 framesize = nalu.offset + nalu.size;
1571 gst_buffer_unmap (buffer, &map);
1573 gst_h264_parse_parse_frame (parse, frame);
1575 return gst_base_parse_finish_frame (parse, frame, framesize);
1580 /* Restart parsing from here next time */
1581 if (current_off > 0)
1582 h264parse->current_off = current_off;
1586 gst_buffer_unmap (buffer, &map);
1590 GST_DEBUG_OBJECT (h264parse, "skipping %d", *skipsize);
1591 /* If we are collecting access units, we need to preserve the initial
1592 * config headers (SPS, PPS et al.) and only reset the frame if another
1593 * slice NAL was received. This means that broken pictures are discarded */
1594 if (h264parse->align != GST_H264_PARSE_ALIGN_AU ||
1595 !(h264parse->state & GST_H264_PARSE_STATE_VALID_PICTURE_HEADERS) ||
1596 (h264parse->state & GST_H264_PARSE_STATE_GOT_SLICE))
1597 gst_h264_parse_reset_frame (h264parse);
1601 gst_buffer_unmap (buffer, &map);
1602 return GST_FLOW_ERROR;
1605 /* byte together avc codec data based on collected pps and sps so far */
1607 gst_h264_parse_make_codec_data (GstH264Parse * h264parse)
1609 GstBuffer *buf, *nal;
1610 gint i, sps_size = 0, pps_size = 0, num_sps = 0, num_pps = 0;
1611 guint8 profile_idc = 0, profile_comp = 0, level_idc = 0;
1612 gboolean found = FALSE;
1617 /* only nal payload in stored nals */
1619 for (i = 0; i < GST_H264_MAX_SPS_COUNT; i++) {
1620 if ((nal = h264parse->sps_nals[i])) {
1621 gsize size = gst_buffer_get_size (nal);
1623 /* size bytes also count */
1624 sps_size += size + 2;
1628 gst_buffer_extract (nal, 1, tmp, 3);
1629 profile_idc = tmp[0];
1630 profile_comp = tmp[1];
1635 for (i = 0; i < GST_H264_MAX_PPS_COUNT; i++) {
1636 if ((nal = h264parse->pps_nals[i])) {
1638 /* size bytes also count */
1639 pps_size += gst_buffer_get_size (nal) + 2;
1643 /* AVC3 has SPS/PPS inside the stream, not in the codec_data */
1644 if (h264parse->format == GST_H264_PARSE_FORMAT_AVC3) {
1645 num_sps = sps_size = 0;
1646 num_pps = pps_size = 0;
1649 GST_DEBUG_OBJECT (h264parse,
1650 "constructing codec_data: num_sps=%d, num_pps=%d", num_sps, num_pps);
1652 if (!found || (0 == num_pps
1653 && GST_H264_PARSE_FORMAT_AVC3 != h264parse->format))
1656 buf = gst_buffer_new_allocate (NULL, 5 + 1 + sps_size + 1 + pps_size, NULL);
1657 gst_buffer_map (buf, &map, GST_MAP_WRITE);
1659 nl = h264parse->nal_length_size;
1661 data[0] = 1; /* AVC Decoder Configuration Record ver. 1 */
1662 data[1] = profile_idc; /* profile_idc */
1663 data[2] = profile_comp; /* profile_compability */
1664 data[3] = level_idc; /* level_idc */
1665 data[4] = 0xfc | (nl - 1); /* nal_length_size_minus1 */
1666 data[5] = 0xe0 | num_sps; /* number of SPSs */
1669 if (h264parse->format != GST_H264_PARSE_FORMAT_AVC3) {
1670 for (i = 0; i < GST_H264_MAX_SPS_COUNT; i++) {
1671 if ((nal = h264parse->sps_nals[i])) {
1672 gsize nal_size = gst_buffer_get_size (nal);
1673 GST_WRITE_UINT16_BE (data, nal_size);
1674 gst_buffer_extract (nal, 0, data + 2, nal_size);
1675 data += 2 + nal_size;
1682 if (h264parse->format != GST_H264_PARSE_FORMAT_AVC3) {
1683 for (i = 0; i < GST_H264_MAX_PPS_COUNT; i++) {
1684 if ((nal = h264parse->pps_nals[i])) {
1685 gsize nal_size = gst_buffer_get_size (nal);
1686 GST_WRITE_UINT16_BE (data, nal_size);
1687 gst_buffer_extract (nal, 0, data + 2, nal_size);
1688 data += 2 + nal_size;
1693 gst_buffer_unmap (buf, &map);
1699 gst_h264_parse_get_par (GstH264Parse * h264parse, gint * num, gint * den)
1701 if (h264parse->upstream_par_n != -1 && h264parse->upstream_par_d != -1) {
1702 *num = h264parse->upstream_par_n;
1703 *den = h264parse->upstream_par_d;
1705 *num = h264parse->parsed_par_n;
1706 *den = h264parse->parsed_par_d;
1711 get_compatible_profile_caps (GstH264SPS * sps)
1713 GstCaps *caps = NULL;
1714 const gchar **profiles = NULL;
1716 GValue compat_profiles = G_VALUE_INIT;
1717 g_value_init (&compat_profiles, GST_TYPE_LIST);
1719 switch (sps->profile_idc) {
1720 case GST_H264_PROFILE_EXTENDED:
1721 if (sps->constraint_set0_flag) { /* A.2.1 */
1722 if (sps->constraint_set1_flag) {
1723 static const gchar *profile_array[] =
1724 { "constrained-baseline", "baseline", "main", "high",
1725 "high-10", "high-4:2:2", "high-4:4:4", NULL
1727 profiles = profile_array;
1729 static const gchar *profile_array[] = { "baseline", NULL };
1730 profiles = profile_array;
1732 } else if (sps->constraint_set1_flag) { /* A.2.2 */
1733 static const gchar *profile_array[] =
1734 { "main", "high", "high-10", "high-4:2:2", "high-4:4:4", NULL };
1735 profiles = profile_array;
1738 case GST_H264_PROFILE_BASELINE:
1739 if (sps->constraint_set1_flag) { /* A.2.1 */
1740 static const gchar *profile_array[] =
1741 { "baseline", "main", "high", "high-10", "high-4:2:2",
1744 profiles = profile_array;
1746 static const gchar *profile_array[] = { "extended", NULL };
1747 profiles = profile_array;
1750 case GST_H264_PROFILE_MAIN:
1752 static const gchar *profile_array[] =
1753 { "high", "high-10", "high-4:2:2", "high-4:4:4", NULL };
1754 profiles = profile_array;
1757 case GST_H264_PROFILE_HIGH:
1758 if (sps->constraint_set1_flag) {
1759 static const gchar *profile_array[] =
1760 { "main", "high-10", "high-4:2:2", "high-4:4:4", NULL };
1761 profiles = profile_array;
1763 static const gchar *profile_array[] =
1764 { "high-10", "high-4:2:2", "high-4:4:4", NULL };
1765 profiles = profile_array;
1768 case GST_H264_PROFILE_HIGH10:
1769 if (sps->constraint_set1_flag) {
1770 static const gchar *profile_array[] =
1771 { "main", "high", "high-4:2:2", "high-4:4:4", NULL };
1772 profiles = profile_array;
1774 if (sps->constraint_set3_flag) { /* A.2.8 */
1775 static const gchar *profile_array[] =
1776 { "high-10", "high-4:2:2", "high-4:4:4", "high-4:2:2-intra",
1777 "high-4:4:4-intra", NULL
1779 profiles = profile_array;
1781 static const gchar *profile_array[] =
1782 { "high-4:2:2", "high-4:4:4", NULL };
1783 profiles = profile_array;
1787 case GST_H264_PROFILE_HIGH_422:
1788 if (sps->constraint_set1_flag) {
1789 static const gchar *profile_array[] =
1790 { "main", "high", "high-10", "high-4:4:4", NULL };
1791 profiles = profile_array;
1793 if (sps->constraint_set3_flag) { /* A.2.9 */
1794 static const gchar *profile_array[] =
1795 { "high-4:2:2", "high-4:4:4", "high-4:2:2-intra",
1796 "high-4:4:4-intra", NULL
1798 profiles = profile_array;
1800 static const gchar *profile_array[] =
1801 { "high-4:2:2", "high-4:4:4", NULL };
1802 profiles = profile_array;
1806 case GST_H264_PROFILE_HIGH_444:
1807 if (sps->constraint_set1_flag) {
1808 static const gchar *profile_array[] =
1809 { "main", "high", "high-10", "high-4:2:2", NULL };
1810 profiles = profile_array;
1811 } else if (sps->constraint_set3_flag) { /* A.2.10 */
1812 static const gchar *profile_array[] = { "high-4:4:4", NULL };
1813 profiles = profile_array;
1816 case GST_H264_PROFILE_MULTIVIEW_HIGH:
1817 if (sps->extension_type == GST_H264_NAL_EXTENSION_MVC
1818 && sps->extension.mvc.num_views_minus1 == 1) {
1819 static const gchar *profile_array[] =
1820 { "stereo-high", "multiview-high", NULL };
1821 profiles = profile_array;
1823 static const gchar *profile_array[] = { "multiview-high", NULL };
1824 profiles = profile_array;
1832 GValue value = G_VALUE_INIT;
1833 caps = gst_caps_new_empty_simple ("video/x-h264");
1834 for (i = 0; profiles[i]; i++) {
1835 g_value_init (&value, G_TYPE_STRING);
1836 g_value_set_string (&value, profiles[i]);
1837 gst_value_list_append_value (&compat_profiles, &value);
1838 g_value_unset (&value);
1840 gst_caps_set_value (caps, "profile", &compat_profiles);
1841 g_value_unset (&compat_profiles);
1847 /* if downstream didn't support the exact profile indicated in sps header,
1848 * check for the compatible profiles also */
1850 ensure_caps_profile (GstH264Parse * h264parse, GstCaps * caps, GstH264SPS * sps)
1852 GstCaps *peer_caps, *compat_caps;
1854 peer_caps = gst_pad_get_current_caps (GST_BASE_PARSE_SRC_PAD (h264parse));
1855 if (!peer_caps || !gst_caps_can_intersect (caps, peer_caps)) {
1856 GstCaps *filter_caps = gst_caps_new_empty_simple ("video/x-h264");
1859 gst_caps_unref (peer_caps);
1861 gst_pad_peer_query_caps (GST_BASE_PARSE_SRC_PAD (h264parse),
1864 gst_caps_unref (filter_caps);
1867 if (peer_caps && !gst_caps_can_intersect (caps, peer_caps)) {
1868 GstStructure *structure;
1870 compat_caps = get_compatible_profile_caps (sps);
1871 if (compat_caps != NULL) {
1872 GstCaps *res_caps = NULL;
1874 res_caps = gst_caps_intersect (peer_caps, compat_caps);
1876 if (res_caps && !gst_caps_is_empty (res_caps)) {
1877 const gchar *profile_str = NULL;
1879 res_caps = gst_caps_fixate (res_caps);
1880 structure = gst_caps_get_structure (res_caps, 0);
1881 profile_str = gst_structure_get_string (structure, "profile");
1883 gst_caps_set_simple (caps, "profile", G_TYPE_STRING, profile_str,
1885 GST_DEBUG_OBJECT (h264parse,
1886 "Setting compatible profile %s to the caps", profile_str);
1890 gst_caps_unref (res_caps);
1891 gst_caps_unref (compat_caps);
1895 gst_caps_unref (peer_caps);
1898 static const gchar *
1899 digit_to_string (guint digit)
1901 static const char itoa[][2] = {
1902 "0", "1", "2", "3", "4", "5", "6", "7", "8", "9"
1905 if (G_LIKELY (digit < 10))
1911 static const gchar *
1912 get_profile_string (GstH264SPS * sps)
1914 const gchar *profile = NULL;
1916 switch (sps->profile_idc) {
1918 if (sps->constraint_set1_flag)
1919 profile = "constrained-baseline";
1921 profile = "baseline";
1927 profile = "extended";
1930 if (sps->constraint_set4_flag) {
1931 if (sps->constraint_set5_flag)
1932 profile = "constrained-high";
1934 profile = "progressive-high";
1939 if (sps->constraint_set3_flag)
1940 profile = "high-10-intra";
1941 else if (sps->constraint_set4_flag)
1942 profile = "progressive-high-10";
1944 profile = "high-10";
1947 if (sps->constraint_set3_flag)
1948 profile = "high-4:2:2-intra";
1950 profile = "high-4:2:2";
1953 if (sps->constraint_set3_flag)
1954 profile = "high-4:4:4-intra";
1956 profile = "high-4:4:4";
1959 profile = "cavlc-4:4:4-intra";
1962 profile = "multiview-high";
1965 profile = "stereo-high";
1968 if (sps->constraint_set5_flag)
1969 profile = "scalable-constrained-baseline";
1971 profile = "scalable-baseline";
1974 if (sps->constraint_set3_flag)
1975 profile = "scalable-high-intra";
1976 else if (sps->constraint_set5_flag)
1977 profile = "scalable-constrained-high";
1979 profile = "scalable-high";
1988 static const gchar *
1989 get_level_string (GstH264SPS * sps)
1991 if (sps->level_idc == 0)
1993 else if ((sps->level_idc == 11 && sps->constraint_set3_flag)
1994 || sps->level_idc == 9)
1996 else if (sps->level_idc % 10 == 0)
1997 return digit_to_string (sps->level_idc / 10);
1999 switch (sps->level_idc) {
2033 gst_h264_parse_update_src_caps (GstH264Parse * h264parse, GstCaps * caps)
2036 GstCaps *sink_caps, *src_caps;
2037 gboolean modified = FALSE;
2038 GstBuffer *buf = NULL;
2039 GstStructure *s = NULL;
2041 if (G_UNLIKELY (!gst_pad_has_current_caps (GST_BASE_PARSE_SRC_PAD
2044 else if (G_UNLIKELY (!h264parse->update_caps))
2047 /* if this is being called from the first _setcaps call, caps on the sinkpad
2048 * aren't set yet and so they need to be passed as an argument */
2050 sink_caps = gst_caps_ref (caps);
2052 sink_caps = gst_pad_get_current_caps (GST_BASE_PARSE_SINK_PAD (h264parse));
2054 /* carry over input caps as much as possible; override with our own stuff */
2056 sink_caps = gst_caps_new_empty_simple ("video/x-h264");
2058 s = gst_caps_get_structure (sink_caps, 0);
2060 sps = h264parse->nalparser->last_sps;
2061 GST_DEBUG_OBJECT (h264parse, "sps: %p", sps);
2063 /* only codec-data for nice-and-clean au aligned packetized avc format */
2064 if ((h264parse->format == GST_H264_PARSE_FORMAT_AVC
2065 || h264parse->format == GST_H264_PARSE_FORMAT_AVC3)
2066 && h264parse->align == GST_H264_PARSE_ALIGN_AU) {
2067 buf = gst_h264_parse_make_codec_data (h264parse);
2068 if (buf && h264parse->codec_data) {
2071 gst_buffer_map (buf, &map, GST_MAP_READ);
2072 if (map.size != gst_buffer_get_size (h264parse->codec_data) ||
2073 gst_buffer_memcmp (h264parse->codec_data, 0, map.data, map.size))
2076 gst_buffer_unmap (buf, &map);
2078 if (!buf && h264parse->codec_data_in)
2079 buf = gst_buffer_ref (h264parse->codec_data_in);
2085 if (G_UNLIKELY (!sps)) {
2086 caps = gst_caps_copy (sink_caps);
2088 gint crop_width, crop_height;
2089 gint fps_num, fps_den;
2091 GstH264VUIParams *vui = &sps->vui_parameters;
2092 gchar *colorimetry = NULL;
2094 if (sps->frame_cropping_flag) {
2095 crop_width = sps->crop_rect_width;
2096 crop_height = sps->crop_rect_height;
2098 crop_width = sps->width;
2099 crop_height = sps->height;
2102 if (G_UNLIKELY (h264parse->width != crop_width ||
2103 h264parse->height != crop_height)) {
2104 GST_INFO_OBJECT (h264parse, "resolution changed %dx%d",
2105 crop_width, crop_height);
2106 h264parse->width = crop_width;
2107 h264parse->height = crop_height;
2111 /* 0/1 is set as the default in the codec parser, we will set
2112 * it in case we have no info */
2113 gst_h264_video_calculate_framerate (sps, h264parse->field_pic_flag,
2114 h264parse->sei_pic_struct, &fps_num, &fps_den);
2115 if (G_UNLIKELY (h264parse->fps_num != fps_num
2116 || h264parse->fps_den != fps_den)) {
2117 GST_DEBUG_OBJECT (h264parse, "framerate changed %d/%d", fps_num, fps_den);
2118 h264parse->fps_num = fps_num;
2119 h264parse->fps_den = fps_den;
2123 if (vui->aspect_ratio_info_present_flag) {
2124 if (G_UNLIKELY ((h264parse->parsed_par_n != vui->par_n)
2125 || (h264parse->parsed_par_d != vui->par_d))) {
2126 h264parse->parsed_par_n = vui->par_n;
2127 h264parse->parsed_par_d = vui->par_d;
2128 GST_INFO_OBJECT (h264parse, "pixel aspect ratio has been changed %d/%d",
2129 h264parse->parsed_par_n, h264parse->parsed_par_d);
2134 if (vui->video_signal_type_present_flag &&
2135 vui->colour_description_present_flag) {
2136 GstVideoColorimetry ci = { 0, };
2137 gchar *old_colorimetry = NULL;
2139 if (vui->video_full_range_flag)
2140 ci.range = GST_VIDEO_COLOR_RANGE_0_255;
2142 ci.range = GST_VIDEO_COLOR_RANGE_16_235;
2144 ci.matrix = gst_video_color_matrix_from_iso (vui->matrix_coefficients);
2146 gst_video_transfer_function_from_iso (vui->transfer_characteristics);
2147 ci.primaries = gst_video_color_primaries_from_iso (vui->colour_primaries);
2150 gst_video_colorimetry_to_string (&h264parse->parsed_colorimetry);
2151 colorimetry = gst_video_colorimetry_to_string (&ci);
2153 if (colorimetry && g_strcmp0 (old_colorimetry, colorimetry)) {
2154 GST_INFO_OBJECT (h264parse,
2155 "colorimetry has been changed from %s to %s",
2156 GST_STR_NULL (old_colorimetry), colorimetry);
2157 h264parse->parsed_colorimetry = ci;
2161 g_free (old_colorimetry);
2164 if (G_UNLIKELY (modified || h264parse->update_caps)) {
2166 GstClockTime latency = 0;
2168 const gchar *caps_mview_mode = NULL;
2169 GstVideoMultiviewMode mview_mode = h264parse->multiview_mode;
2170 GstVideoMultiviewFlags mview_flags = h264parse->multiview_flags;
2171 const gchar *chroma_format = NULL;
2172 guint bit_depth_chroma;
2174 fps_num = h264parse->fps_num;
2175 fps_den = h264parse->fps_den;
2177 caps = gst_caps_copy (sink_caps);
2179 /* sps should give this but upstream overrides */
2180 if (s && gst_structure_has_field (s, "width"))
2181 gst_structure_get_int (s, "width", &width);
2183 width = h264parse->width;
2185 if (s && gst_structure_has_field (s, "height"))
2186 gst_structure_get_int (s, "height", &height);
2188 height = h264parse->height;
2191 !gst_structure_get_fraction (s, "pixel-aspect-ratio", &par_n,
2193 gst_h264_parse_get_par (h264parse, &par_n, &par_d);
2194 if (par_n != 0 && par_d != 0) {
2195 GST_INFO_OBJECT (h264parse, "PAR %d/%d", par_n, par_d);
2196 gst_caps_set_simple (caps, "pixel-aspect-ratio", GST_TYPE_FRACTION,
2197 par_n, par_d, NULL);
2199 /* Assume par_n/par_d of 1/1 for calcs below, but don't set into caps */
2204 /* Pass through or set output stereo/multiview config */
2205 if (s && gst_structure_has_field (s, "multiview-mode")) {
2206 caps_mview_mode = gst_structure_get_string (s, "multiview-mode");
2207 gst_structure_get_flagset (s, "multiview-flags",
2208 (guint *) & mview_flags, NULL);
2209 } else if (mview_mode != GST_VIDEO_MULTIVIEW_MODE_NONE) {
2210 if (gst_video_multiview_guess_half_aspect (mview_mode,
2211 width, height, par_n, par_d)) {
2212 mview_flags |= GST_VIDEO_MULTIVIEW_FLAGS_HALF_ASPECT;
2215 caps_mview_mode = gst_video_multiview_mode_to_caps_string (mview_mode);
2216 gst_caps_set_simple (caps, "multiview-mode", G_TYPE_STRING,
2217 caps_mview_mode, "multiview-flags",
2218 GST_TYPE_VIDEO_MULTIVIEW_FLAGSET, mview_flags,
2219 GST_FLAG_SET_MASK_EXACT, NULL);
2222 gst_caps_set_simple (caps, "width", G_TYPE_INT, width,
2223 "height", G_TYPE_INT, height, NULL);
2225 /* upstream overrides */
2226 if (s && gst_structure_has_field (s, "framerate")) {
2227 gst_structure_get_fraction (s, "framerate", &fps_num, &fps_den);
2230 /* but not necessarily or reliably this */
2233 gst_caps_set_simple (caps, "framerate",
2234 GST_TYPE_FRACTION, fps_num, fps_den, NULL);
2235 s2 = gst_caps_get_structure (caps, 0);
2236 gst_structure_get_fraction (s2, "framerate", &h264parse->parsed_fps_n,
2237 &h264parse->parsed_fps_d);
2239 /* If we know the frame duration, and if we are not in one of the zero
2240 * latency pattern, add one frame of latency */
2241 if (fps_num > 0 && h264parse->in_align != GST_H264_PARSE_ALIGN_AU &&
2242 !(h264parse->in_align == GST_H264_PARSE_ALIGN_NAL &&
2243 h264parse->align == GST_H264_PARSE_ALIGN_NAL)) {
2244 latency = gst_util_uint64_scale (GST_SECOND, fps_den, fps_num);
2247 gst_base_parse_set_latency (GST_BASE_PARSE (h264parse), latency,
2251 bit_depth_chroma = sps->bit_depth_chroma_minus8 + 8;
2253 switch (sps->chroma_format_idc) {
2255 chroma_format = "4:0:0";
2256 bit_depth_chroma = 0;
2259 chroma_format = "4:2:0";
2262 chroma_format = "4:2:2";
2265 chroma_format = "4:4:4";
2272 gst_caps_set_simple (caps,
2273 "chroma-format", G_TYPE_STRING, chroma_format,
2274 "bit-depth-luma", G_TYPE_UINT, sps->bit_depth_luma_minus8 + 8,
2275 "bit-depth-chroma", G_TYPE_UINT, bit_depth_chroma, NULL);
2277 if (colorimetry && (!s || !gst_structure_has_field (s, "colorimetry"))) {
2278 gst_caps_set_simple (caps, "colorimetry", G_TYPE_STRING, colorimetry,
2283 g_free (colorimetry);
2287 const gchar *mdi_str = NULL;
2288 const gchar *cll_str = NULL;
2289 gboolean codec_data_modified = FALSE;
2291 gst_caps_set_simple (caps, "parsed", G_TYPE_BOOLEAN, TRUE,
2292 "stream-format", G_TYPE_STRING,
2293 gst_h264_parse_get_string (h264parse, TRUE, h264parse->format),
2294 "alignment", G_TYPE_STRING,
2295 gst_h264_parse_get_string (h264parse, FALSE, h264parse->align), NULL);
2297 /* set profile and level in caps */
2299 const gchar *profile, *level;
2301 profile = get_profile_string (sps);
2302 if (profile != NULL)
2303 gst_caps_set_simple (caps, "profile", G_TYPE_STRING, profile, NULL);
2305 level = get_level_string (sps);
2307 gst_caps_set_simple (caps, "level", G_TYPE_STRING, level, NULL);
2309 /* relax the profile constraint to find a suitable decoder */
2310 ensure_caps_profile (h264parse, caps, sps);
2314 mdi_str = gst_structure_get_string (s, "mastering-display-info");
2316 gst_caps_set_simple (caps, "mastering-display-info", G_TYPE_STRING,
2318 } else if (h264parse->mastering_display_info_state !=
2319 GST_H264_PARSE_SEI_EXPIRED &&
2320 !gst_video_mastering_display_info_add_to_caps
2321 (&h264parse->mastering_display_info, caps)) {
2322 GST_WARNING_OBJECT (h264parse,
2323 "Couldn't set mastering display info to caps");
2327 cll_str = gst_structure_get_string (s, "content-light-level");
2329 gst_caps_set_simple (caps, "content-light-level", G_TYPE_STRING, cll_str,
2331 } else if (h264parse->content_light_level_state !=
2332 GST_H264_PARSE_SEI_EXPIRED &&
2333 !gst_video_content_light_level_add_to_caps
2334 (&h264parse->content_light_level, caps)) {
2335 GST_WARNING_OBJECT (h264parse,
2336 "Couldn't set content light level to caps");
2339 src_caps = gst_pad_get_current_caps (GST_BASE_PARSE_SRC_PAD (h264parse));
2342 GstStructure *src_caps_str = gst_caps_get_structure (src_caps, 0);
2344 /* use codec data from old caps for comparison if we have pushed frame for now.
2345 * we don't want to resend caps if everything is same except codec data.
2346 * However, if the updated sps/pps is not in bitstream, we should put
2347 * it on bitstream */
2348 if (gst_structure_has_field (src_caps_str, "codec_data")) {
2349 const GValue *codec_data_value =
2350 gst_structure_get_value (src_caps_str, "codec_data");
2352 if (!GST_VALUE_HOLDS_BUFFER (codec_data_value)) {
2353 GST_WARNING_OBJECT (h264parse, "codec_data does not hold buffer");
2354 } else if (!h264parse->first_frame) {
2355 /* If there is no pushed frame before, we can update caps without worry.
2356 * But updating codec_data in the middle of frames
2357 * (especially on non-keyframe) might make downstream be confused.
2358 * Therefore we are setting old codec data
2359 * (i.e., was pushed to downstream previously) to new caps candidate
2360 * here for gst_caps_is_strictly_equal() to be returned TRUE if only
2361 * the codec_data is different, and to avoid re-sending caps it
2364 gst_caps_set_value (caps, "codec_data", codec_data_value);
2366 /* check for codec_data update to re-send sps/pps inband data if
2367 * current frame has no sps/pps but upstream codec_data was updated */
2368 if ((!h264parse->have_sps_in_frame || !h264parse->have_pps_in_frame)
2370 GstBuffer *codec_data_buf = gst_value_get_buffer (codec_data_value);
2373 gst_buffer_map (buf, &map, GST_MAP_READ);
2374 if (map.size != gst_buffer_get_size (codec_data_buf) ||
2375 gst_buffer_memcmp (codec_data_buf, 0, map.data, map.size)) {
2376 codec_data_modified = TRUE;
2379 gst_buffer_unmap (buf, &map);
2384 /* remove any left-over codec-data hanging around */
2385 s = gst_caps_get_structure (caps, 0);
2386 gst_structure_remove_field (s, "codec_data");
2390 if (!(src_caps && gst_caps_is_strictly_equal (src_caps, caps))) {
2391 /* update codec data to new value */
2393 gst_caps_set_simple (caps, "codec_data", GST_TYPE_BUFFER, buf, NULL);
2394 gst_buffer_replace (&h264parse->codec_data, buf);
2395 gst_buffer_unref (buf);
2399 /* remove any left-over codec-data hanging around */
2400 s = gst_caps_get_structure (caps, 0);
2401 gst_structure_remove_field (s, "codec_data");
2402 gst_buffer_replace (&h264parse->codec_data, NULL);
2405 gst_pad_set_caps (GST_BASE_PARSE_SRC_PAD (h264parse), caps);
2406 } else if (codec_data_modified) {
2407 GST_DEBUG_OBJECT (h264parse,
2408 "Only codec_data is different, need inband sps/pps update");
2410 /* this will insert updated codec_data with next idr */
2411 h264parse->push_codec = TRUE;
2415 gst_caps_unref (src_caps);
2416 gst_caps_unref (caps);
2419 gst_caps_unref (sink_caps);
2421 gst_buffer_unref (buf);
2425 gst_h264_parse_get_duration (GstH264Parse * h264parse, gboolean frame)
2427 GstClockTime ret = GST_CLOCK_TIME_NONE;
2428 GstH264SPS *sps = h264parse->nalparser->last_sps;
2432 GST_LOG_OBJECT (h264parse, "no frame data -> 0 duration");
2438 GST_DEBUG_OBJECT (h264parse, "referred SPS invalid");
2440 } else if (!sps->vui_parameters_present_flag) {
2441 GST_DEBUG_OBJECT (h264parse, "unable to compute duration: VUI not present");
2443 } else if (!sps->vui_parameters.timing_info_present_flag) {
2444 GST_DEBUG_OBJECT (h264parse,
2445 "unable to compute duration: timing info not present");
2447 } else if (sps->vui_parameters.time_scale == 0) {
2448 GST_DEBUG_OBJECT (h264parse,
2449 "unable to compute duration: time_scale = 0 "
2450 "(this is forbidden in spec; bitstream probably contains error)");
2454 if (h264parse->sei_pic_struct_pres_flag &&
2455 h264parse->sei_pic_struct != (guint8) - 1) {
2456 /* Note that when h264parse->sei_pic_struct == -1 (unspecified), there
2457 * are ways to infer its value. This is related to computing the
2458 * TopFieldOrderCnt and BottomFieldOrderCnt, which looks
2459 * complicated and thus not implemented for the time being. Yet
2460 * the value we have here is correct for many applications
2462 switch (h264parse->sei_pic_struct) {
2463 case GST_H264_SEI_PIC_STRUCT_TOP_FIELD:
2464 case GST_H264_SEI_PIC_STRUCT_BOTTOM_FIELD:
2467 case GST_H264_SEI_PIC_STRUCT_FRAME:
2468 case GST_H264_SEI_PIC_STRUCT_TOP_BOTTOM:
2469 case GST_H264_SEI_PIC_STRUCT_BOTTOM_TOP:
2472 case GST_H264_SEI_PIC_STRUCT_TOP_BOTTOM_TOP:
2473 case GST_H264_SEI_PIC_STRUCT_BOTTOM_TOP_BOTTOM:
2476 case GST_H264_SEI_PIC_STRUCT_FRAME_DOUBLING:
2479 case GST_H264_SEI_PIC_STRUCT_FRAME_TRIPLING:
2483 GST_DEBUG_OBJECT (h264parse,
2484 "h264parse->sei_pic_struct of unknown value %d. Not parsed",
2485 h264parse->sei_pic_struct);
2489 duration = h264parse->field_pic_flag ? 1 : 2;
2492 GST_LOG_OBJECT (h264parse, "frame tick duration %d", duration);
2494 ret = gst_util_uint64_scale (duration * GST_SECOND,
2495 sps->vui_parameters.num_units_in_tick, sps->vui_parameters.time_scale);
2497 if (ret < GST_MSECOND) {
2498 GST_DEBUG_OBJECT (h264parse, "discarding dur %" GST_TIME_FORMAT,
2499 GST_TIME_ARGS (ret));
2507 if (h264parse->parsed_fps_d > 0 && h264parse->parsed_fps_n > 0)
2509 gst_util_uint64_scale (GST_SECOND, h264parse->parsed_fps_d,
2510 h264parse->parsed_fps_n);
2515 gst_h264_parse_get_timestamp (GstH264Parse * h264parse,
2516 GstClockTime * out_ts, GstClockTime * out_dur, gboolean frame)
2518 GstH264SPS *sps = h264parse->nalparser->last_sps;
2519 GstClockTime upstream;
2522 g_return_if_fail (out_dur != NULL);
2523 g_return_if_fail (out_ts != NULL);
2526 GST_LOG_OBJECT (h264parse, "Upstream ts %" GST_TIME_FORMAT,
2527 GST_TIME_ARGS (upstream));
2530 GST_LOG_OBJECT (h264parse, "no frame data -> 0 duration");
2536 GST_DEBUG_OBJECT (h264parse, "referred SPS invalid");
2538 } else if (!sps->vui_parameters_present_flag) {
2539 GST_DEBUG_OBJECT (h264parse,
2540 "unable to compute timestamp: VUI not present");
2542 } else if (!sps->vui_parameters.timing_info_present_flag) {
2543 GST_DEBUG_OBJECT (h264parse,
2544 "unable to compute timestamp: timing info not present");
2546 } else if (sps->vui_parameters.time_scale == 0) {
2547 GST_DEBUG_OBJECT (h264parse,
2548 "unable to compute timestamp: time_scale = 0 "
2549 "(this is forbidden in spec; bitstream probably contains error)");
2553 if (h264parse->sei_pic_struct_pres_flag &&
2554 h264parse->sei_pic_struct != (guint8) - 1) {
2555 /* Note that when h264parse->sei_pic_struct == -1 (unspecified), there
2556 * are ways to infer its value. This is related to computing the
2557 * TopFieldOrderCnt and BottomFieldOrderCnt, which looks
2558 * complicated and thus not implemented for the time being. Yet
2559 * the value we have here is correct for many applications
2561 switch (h264parse->sei_pic_struct) {
2562 case GST_H264_SEI_PIC_STRUCT_TOP_FIELD:
2563 case GST_H264_SEI_PIC_STRUCT_BOTTOM_FIELD:
2566 case GST_H264_SEI_PIC_STRUCT_FRAME:
2567 case GST_H264_SEI_PIC_STRUCT_TOP_BOTTOM:
2568 case GST_H264_SEI_PIC_STRUCT_BOTTOM_TOP:
2571 case GST_H264_SEI_PIC_STRUCT_TOP_BOTTOM_TOP:
2572 case GST_H264_SEI_PIC_STRUCT_BOTTOM_TOP_BOTTOM:
2575 case GST_H264_SEI_PIC_STRUCT_FRAME_DOUBLING:
2578 case GST_H264_SEI_PIC_STRUCT_FRAME_TRIPLING:
2582 GST_DEBUG_OBJECT (h264parse,
2583 "h264parse->sei_pic_struct of unknown value %d. Not parsed",
2584 h264parse->sei_pic_struct);
2588 duration = h264parse->field_pic_flag ? 1 : 2;
2591 GST_LOG_OBJECT (h264parse, "frame tick duration %d", duration);
2594 * h264parse.264 C.1.2 Timing of coded picture removal (equivalent to DTS):
2595 * Tr,n(0) = initial_cpb_removal_delay[ SchedSelIdx ] / 90000
2596 * Tr,n(n) = Tr,n(nb) + Tc * cpb_removal_delay(n)
2598 * Tc = num_units_in_tick / time_scale
2601 if (h264parse->ts_trn_nb != GST_CLOCK_TIME_NONE) {
2602 GST_LOG_OBJECT (h264parse, "buffering based ts");
2603 /* buffering period is present */
2604 if (upstream != GST_CLOCK_TIME_NONE) {
2605 /* If upstream timestamp is valid, we respect it and adjust current
2606 * reference point */
2607 h264parse->ts_trn_nb = upstream -
2608 (GstClockTime) gst_util_uint64_scale
2609 (h264parse->sei_cpb_removal_delay * GST_SECOND,
2610 sps->vui_parameters.num_units_in_tick,
2611 sps->vui_parameters.time_scale);
2613 /* If no upstream timestamp is given, we write in new timestamp */
2614 upstream = h264parse->dts = h264parse->ts_trn_nb +
2615 (GstClockTime) gst_util_uint64_scale
2616 (h264parse->sei_cpb_removal_delay * GST_SECOND,
2617 sps->vui_parameters.num_units_in_tick,
2618 sps->vui_parameters.time_scale);
2623 GST_LOG_OBJECT (h264parse, "duration based ts");
2624 /* naive method: no removal delay specified
2625 * track upstream timestamp and provide best guess frame duration */
2626 dur = gst_util_uint64_scale (duration * GST_SECOND,
2627 sps->vui_parameters.num_units_in_tick, sps->vui_parameters.time_scale);
2629 if (dur < GST_MSECOND) {
2630 GST_DEBUG_OBJECT (h264parse, "discarding dur %" GST_TIME_FORMAT,
2631 GST_TIME_ARGS (dur));
2638 if (GST_CLOCK_TIME_IS_VALID (upstream))
2639 *out_ts = h264parse->dts = upstream;
2641 if (GST_CLOCK_TIME_IS_VALID (*out_dur) &&
2642 GST_CLOCK_TIME_IS_VALID (h264parse->dts))
2643 h264parse->dts += *out_dur;
2646 static GstFlowReturn
2647 gst_h264_parse_parse_frame (GstBaseParse * parse, GstBaseParseFrame * frame)
2649 GstH264Parse *h264parse;
2653 h264parse = GST_H264_PARSE (parse);
2654 buffer = frame->buffer;
2656 gst_h264_parse_update_src_caps (h264parse, NULL);
2658 /* don't mess with timestamps if provided by upstream,
2659 * particularly since our ts not that good they handle seeking etc */
2660 if (h264parse->do_ts) {
2661 gst_h264_parse_get_timestamp (h264parse,
2662 &GST_BUFFER_DTS (buffer), &GST_BUFFER_DURATION (buffer),
2663 h264parse->frame_start);
2666 /* We don't want to let baseparse select a duration itself based
2667 * solely on the framerate, as we have more per-frame information
2669 if (!GST_CLOCK_TIME_IS_VALID (GST_BUFFER_DURATION (buffer))) {
2670 GST_BUFFER_DURATION (buffer) =
2671 gst_h264_parse_get_duration (h264parse, h264parse->frame_start);
2674 if (h264parse->keyframe)
2675 GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DELTA_UNIT);
2677 GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT);
2679 if (h264parse->discard_bidirectional && h264parse->bidirectional)
2682 if (h264parse->header)
2683 GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_HEADER);
2685 GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_HEADER);
2687 if (h264parse->discont) {
2688 GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT);
2689 h264parse->discont = FALSE;
2692 if (h264parse->marker) {
2693 GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_MARKER);
2694 h264parse->marker = FALSE;
2696 GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_MARKER);
2699 /* replace with transformed AVC output if applicable */
2700 av = gst_adapter_available (h264parse->frame_out);
2704 buf = gst_adapter_take_buffer (h264parse->frame_out, av);
2705 gst_buffer_copy_into (buf, buffer, GST_BUFFER_COPY_METADATA, 0, -1);
2706 gst_buffer_replace (&frame->out_buffer, buf);
2707 gst_buffer_unref (buf);
2714 GST_DEBUG_OBJECT (h264parse, "Discarding bidirectional frame");
2715 frame->flags |= GST_BASE_PARSE_FRAME_FLAG_DROP;
2716 gst_h264_parse_reset_frame (h264parse);
2720 /* sends a codec NAL downstream, decorating and transforming as needed.
2721 * No ownership is taken of @nal */
2722 static GstFlowReturn
2723 gst_h264_parse_push_codec_buffer (GstH264Parse * h264parse,
2724 GstBuffer * nal, GstBuffer * buffer)
2727 GstBuffer *wrapped_nal;
2729 gst_buffer_map (nal, &map, GST_MAP_READ);
2730 wrapped_nal = gst_h264_parse_wrap_nal (h264parse, h264parse->format,
2731 map.data, map.size);
2732 gst_buffer_unmap (nal, &map);
2734 GST_BUFFER_PTS (wrapped_nal) = GST_BUFFER_PTS (buffer);
2735 GST_BUFFER_DTS (wrapped_nal) = GST_BUFFER_DTS (buffer);
2736 GST_BUFFER_DURATION (wrapped_nal) = 0;
2738 return gst_pad_push (GST_BASE_PARSE_SRC_PAD (h264parse), wrapped_nal);
2742 check_pending_key_unit_event (GstEvent * pending_event,
2743 GstSegment * segment, GstClockTime timestamp, guint flags,
2744 GstClockTime pending_key_unit_ts)
2746 GstClockTime running_time, stream_time;
2747 gboolean all_headers;
2749 GstEvent *event = NULL;
2751 g_return_val_if_fail (segment != NULL, NULL);
2753 if (pending_event == NULL)
2756 if (GST_CLOCK_TIME_IS_VALID (pending_key_unit_ts) &&
2757 timestamp == GST_CLOCK_TIME_NONE)
2760 running_time = gst_segment_to_running_time (segment,
2761 GST_FORMAT_TIME, timestamp);
2763 GST_INFO ("now %" GST_TIME_FORMAT " wanted %" GST_TIME_FORMAT,
2764 GST_TIME_ARGS (running_time), GST_TIME_ARGS (pending_key_unit_ts));
2765 if (GST_CLOCK_TIME_IS_VALID (pending_key_unit_ts) &&
2766 running_time < pending_key_unit_ts)
2769 if (flags & GST_BUFFER_FLAG_DELTA_UNIT) {
2770 GST_DEBUG ("pending force key unit, waiting for keyframe");
2774 stream_time = gst_segment_to_stream_time (segment,
2775 GST_FORMAT_TIME, timestamp);
2777 if (!gst_video_event_parse_upstream_force_key_unit (pending_event,
2778 NULL, &all_headers, &count)) {
2779 gst_video_event_parse_downstream_force_key_unit (pending_event, NULL,
2780 NULL, NULL, &all_headers, &count);
2784 gst_video_event_new_downstream_force_key_unit (timestamp, stream_time,
2785 running_time, all_headers, count);
2786 gst_event_set_seqnum (event, gst_event_get_seqnum (pending_event));
2793 gst_h264_parse_prepare_key_unit (GstH264Parse * parse, GstEvent * event)
2795 GstClockTime running_time;
2797 #ifndef GST_DISABLE_GST_DEBUG
2798 gboolean have_sps, have_pps;
2802 parse->pending_key_unit_ts = GST_CLOCK_TIME_NONE;
2803 gst_event_replace (&parse->force_key_unit_event, NULL);
2805 gst_video_event_parse_downstream_force_key_unit (event,
2806 NULL, NULL, &running_time, NULL, &count);
2808 GST_INFO_OBJECT (parse, "pushing downstream force-key-unit event %d "
2809 "%" GST_TIME_FORMAT " count %d", gst_event_get_seqnum (event),
2810 GST_TIME_ARGS (running_time), count);
2811 gst_pad_push_event (GST_BASE_PARSE_SRC_PAD (parse), event);
2813 #ifndef GST_DISABLE_GST_DEBUG
2814 have_sps = have_pps = FALSE;
2815 for (i = 0; i < GST_H264_MAX_SPS_COUNT; i++) {
2816 if (parse->sps_nals[i] != NULL) {
2821 for (i = 0; i < GST_H264_MAX_PPS_COUNT; i++) {
2822 if (parse->pps_nals[i] != NULL) {
2828 GST_INFO_OBJECT (parse, "preparing key unit, have sps %d have pps %d",
2829 have_sps, have_pps);
2832 /* set push_codec to TRUE so that pre_push_frame sends SPS/PPS again */
2833 parse->push_codec = TRUE;
2837 gst_h264_parse_handle_sps_pps_nals (GstH264Parse * h264parse,
2838 GstBuffer * buffer, GstBaseParseFrame * frame)
2840 GstBuffer *codec_nal;
2842 gboolean send_done = FALSE;
2844 if (h264parse->have_sps_in_frame && h264parse->have_pps_in_frame) {
2845 GST_DEBUG_OBJECT (h264parse, "SPS/PPS exist in frame, will not insert");
2849 if (h264parse->align == GST_H264_PARSE_ALIGN_NAL) {
2850 /* send separate config NAL buffers */
2851 GST_DEBUG_OBJECT (h264parse, "- sending SPS/PPS");
2852 for (i = 0; i < GST_H264_MAX_SPS_COUNT; i++) {
2853 if ((codec_nal = h264parse->sps_nals[i])) {
2854 GST_DEBUG_OBJECT (h264parse, "sending SPS nal");
2855 gst_h264_parse_push_codec_buffer (h264parse, codec_nal, buffer);
2859 for (i = 0; i < GST_H264_MAX_PPS_COUNT; i++) {
2860 if ((codec_nal = h264parse->pps_nals[i])) {
2861 GST_DEBUG_OBJECT (h264parse, "sending PPS nal");
2862 gst_h264_parse_push_codec_buffer (h264parse, codec_nal, buffer);
2867 /* insert config NALs into AU */
2870 const gboolean bs = h264parse->format == GST_H264_PARSE_FORMAT_BYTE;
2871 const gint nls = 4 - h264parse->nal_length_size;
2874 gst_byte_writer_init_with_size (&bw, gst_buffer_get_size (buffer), FALSE);
2875 ok = gst_byte_writer_put_buffer (&bw, buffer, 0, h264parse->idr_pos);
2876 GST_DEBUG_OBJECT (h264parse, "- inserting SPS/PPS");
2877 for (i = 0; i < GST_H264_MAX_SPS_COUNT; i++) {
2878 if ((codec_nal = h264parse->sps_nals[i])) {
2879 gsize nal_size = gst_buffer_get_size (codec_nal);
2880 GST_DEBUG_OBJECT (h264parse, "inserting SPS nal");
2882 ok &= gst_byte_writer_put_uint32_be (&bw, 1);
2884 ok &= gst_byte_writer_put_uint32_be (&bw, (nal_size << (nls * 8)));
2885 ok &= gst_byte_writer_set_pos (&bw,
2886 gst_byte_writer_get_pos (&bw) - nls);
2889 ok &= gst_byte_writer_put_buffer (&bw, codec_nal, 0, nal_size);
2893 for (i = 0; i < GST_H264_MAX_PPS_COUNT; i++) {
2894 if ((codec_nal = h264parse->pps_nals[i])) {
2895 gsize nal_size = gst_buffer_get_size (codec_nal);
2896 GST_DEBUG_OBJECT (h264parse, "inserting PPS nal");
2898 ok &= gst_byte_writer_put_uint32_be (&bw, 1);
2900 ok &= gst_byte_writer_put_uint32_be (&bw, (nal_size << (nls * 8)));
2901 ok &= gst_byte_writer_set_pos (&bw,
2902 gst_byte_writer_get_pos (&bw) - nls);
2904 ok &= gst_byte_writer_put_buffer (&bw, codec_nal, 0, nal_size);
2908 ok &= gst_byte_writer_put_buffer (&bw, buffer, h264parse->idr_pos, -1);
2909 /* collect result and push */
2910 new_buf = gst_byte_writer_reset_and_get_buffer (&bw);
2911 gst_buffer_copy_into (new_buf, buffer, GST_BUFFER_COPY_METADATA, 0, -1);
2912 /* should already be keyframe/IDR, but it may not have been,
2913 * so mark it as such to avoid being discarded by picky decoder */
2914 GST_BUFFER_FLAG_UNSET (new_buf, GST_BUFFER_FLAG_DELTA_UNIT);
2915 gst_buffer_replace (&frame->out_buffer, new_buf);
2916 gst_buffer_unref (new_buf);
2917 /* some result checking seems to make some compilers happy */
2918 if (G_UNLIKELY (!ok)) {
2919 GST_ERROR_OBJECT (h264parse, "failed to insert SPS/PPS");
2927 gst_h264_parse_create_pic_timing_sei (GstH264Parse * h264parse,
2931 const guint8 num_clock_ts_table[9] = {
2932 1, 1, 1, 2, 2, 3, 3, 2, 3
2935 GstBuffer *out_buf = NULL;
2939 GstH264SEIMessage sei;
2940 GstH264PicTiming *pic_timing;
2941 GstVideoTimeCodeMeta *tc_meta;
2942 gpointer iter = NULL;
2943 guint8 ct_type = GST_H264_CT_TYPE_PROGRESSIVE;
2945 if (!h264parse->update_timecode)
2948 num_meta = gst_buffer_get_n_meta (buffer, GST_VIDEO_TIME_CODE_META_API_TYPE);
2952 if (!h264parse->sei_pic_struct_pres_flag || h264parse->pic_timing_sei_pos < 0) {
2953 GST_ELEMENT_WARNING (h264parse, STREAM, NOT_IMPLEMENTED, (NULL),
2954 ("timecode update was requested but VUI doesn't support timecode"));
2958 g_assert (h264parse->sei_pic_struct <=
2959 GST_H264_SEI_PIC_STRUCT_FRAME_TRIPLING);
2961 num_clock_ts = num_clock_ts_table[h264parse->sei_pic_struct];
2963 if (num_meta > num_clock_ts) {
2964 GST_LOG_OBJECT (h264parse,
2965 "The number of timecode meta %d is superior to required %d",
2966 num_meta, num_clock_ts);
2971 GST_LOG_OBJECT (h264parse,
2972 "The number of timecode meta %d is compatible", num_meta);
2974 memset (&sei, 0, sizeof (GstH264SEIMessage));
2975 sei.payloadType = GST_H264_SEI_PIC_TIMING;
2976 memcpy (&sei.payload.pic_timing,
2977 &h264parse->pic_timing_sei, sizeof (GstH264PicTiming));
2979 pic_timing = &sei.payload.pic_timing;
2981 switch (h264parse->sei_pic_struct) {
2982 case GST_H264_SEI_PIC_STRUCT_FRAME:
2983 case GST_H264_SEI_PIC_STRUCT_FRAME_DOUBLING:
2984 case GST_H264_SEI_PIC_STRUCT_FRAME_TRIPLING:
2985 ct_type = GST_H264_CT_TYPE_PROGRESSIVE;
2987 case GST_H264_SEI_PIC_STRUCT_TOP_BOTTOM:
2988 case GST_H264_SEI_PIC_STRUCT_BOTTOM_TOP:
2989 case GST_H264_SEI_PIC_STRUCT_TOP_BOTTOM_TOP:
2990 case GST_H264_SEI_PIC_STRUCT_BOTTOM_TOP_BOTTOM:
2991 ct_type = GST_H264_CT_TYPE_INTERLACED;
2994 ct_type = GST_H264_CT_TYPE_UNKNOWN;
3000 (GstVideoTimeCodeMeta *) gst_buffer_iterate_meta_filtered (buffer,
3001 &iter, GST_VIDEO_TIME_CODE_META_API_TYPE))) {
3002 GstH264ClockTimestamp *tim = &pic_timing->clock_timestamp[i];
3003 GstVideoTimeCode *tc = &tc_meta->tc;
3005 pic_timing->clock_timestamp_flag[i] = 1;
3006 tim->ct_type = ct_type;
3007 tim->nuit_field_based_flag = 1;
3008 tim->counting_type = 0;
3010 if ((tc->config.flags & GST_VIDEO_TIME_CODE_FLAGS_DROP_FRAME)
3011 == GST_VIDEO_TIME_CODE_FLAGS_DROP_FRAME)
3012 tim->counting_type = 4;
3014 tim->discontinuity_flag = 0;
3015 tim->cnt_dropped_flag = 0;
3016 tim->n_frames = tc->frames;
3018 tim->hours_value = tc->hours;
3019 tim->minutes_value = tc->minutes;
3020 tim->seconds_value = tc->seconds;
3022 tim->full_timestamp_flag =
3023 tim->seconds_flag = tim->minutes_flag = tim->hours_flag = 0;
3026 tim->full_timestamp_flag = 1;
3027 else if (tc->minutes > 0)
3028 tim->seconds_flag = tim->minutes_flag = 1;
3029 else if (tc->seconds > 0)
3030 tim->seconds_flag = 1;
3032 GST_LOG_OBJECT (h264parse,
3033 "New time code value %02u:%02u:%02u:%02u",
3034 tim->hours_value, tim->minutes_value, tim->seconds_value,
3040 for (j = i; j < 3; j++)
3041 pic_timing->clock_timestamp_flag[j] = 0;
3043 msg_array = g_array_new (FALSE, FALSE, sizeof (GstH264SEIMessage));
3044 g_array_set_clear_func (msg_array, (GDestroyNotify) gst_h264_sei_clear);
3046 g_array_append_val (msg_array, sei);
3047 if (h264parse->format == GST_H264_PARSE_FORMAT_BYTE) {
3048 sei_mem = gst_h264_create_sei_memory (3, msg_array);
3050 sei_mem = gst_h264_create_sei_memory_avc (h264parse->nal_length_size,
3053 g_array_unref (msg_array);
3056 GST_WARNING_OBJECT (h264parse, "Cannot create Picture Timing SEI memory");
3060 out_buf = gst_buffer_new ();
3061 gst_buffer_copy_into (out_buf, buffer, GST_BUFFER_COPY_METADATA, 0, -1);
3063 if (h264parse->align == GST_H264_PARSE_ALIGN_NAL) {
3064 gst_buffer_append_memory (out_buf, sei_mem);
3068 mem_size = gst_memory_get_sizes (sei_mem, NULL, NULL);
3070 /* copy every data except for the SEI */
3071 if (h264parse->pic_timing_sei_pos > 0) {
3072 gst_buffer_copy_into (out_buf, buffer, GST_BUFFER_COPY_MEMORY, 0,
3073 h264parse->pic_timing_sei_pos);
3076 /* insert new SEI */
3077 gst_buffer_append_memory (out_buf, sei_mem);
3079 if (gst_buffer_get_size (buffer) >
3080 h264parse->pic_timing_sei_pos + h264parse->pic_timing_sei_size) {
3081 gst_buffer_copy_into (out_buf, buffer, GST_BUFFER_COPY_MEMORY,
3082 h264parse->pic_timing_sei_pos + h264parse->pic_timing_sei_size, -1);
3085 if (h264parse->idr_pos >= 0) {
3086 h264parse->idr_pos += mem_size;
3087 h264parse->idr_pos -= h264parse->pic_timing_sei_size;
3094 static GstFlowReturn
3095 gst_h264_parse_pre_push_frame (GstBaseParse * parse, GstBaseParseFrame * frame)
3097 GstH264Parse *h264parse;
3101 GstBuffer *parse_buffer = NULL;
3102 gboolean is_interlaced = FALSE;
3104 h264parse = GST_H264_PARSE (parse);
3106 if (h264parse->first_frame) {
3107 GstTagList *taglist;
3111 caps = gst_pad_get_current_caps (GST_BASE_PARSE_SRC_PAD (parse));
3113 if (GST_PAD_IS_FLUSHING (GST_BASE_PARSE_SRC_PAD (h264parse))) {
3114 GST_INFO_OBJECT (h264parse, "Src pad is flushing");
3115 return GST_FLOW_FLUSHING;
3117 GST_INFO_OBJECT (h264parse, "Src pad is not negotiated!");
3118 return GST_FLOW_NOT_NEGOTIATED;
3122 taglist = gst_tag_list_new_empty ();
3123 gst_pb_utils_add_codec_description_to_tag_list (taglist,
3124 GST_TAG_VIDEO_CODEC, caps);
3125 gst_caps_unref (caps);
3127 gst_base_parse_merge_tags (parse, taglist, GST_TAG_MERGE_REPLACE);
3128 gst_tag_list_unref (taglist);
3130 /* also signals the end of first-frame processing */
3131 h264parse->first_frame = FALSE;
3134 /* In case of byte-stream, insert au delimiter by default
3135 * if it doesn't exist */
3136 if (h264parse->aud_insert && !h264parse->have_aud_in_frame &&
3137 h264parse->format == GST_H264_PARSE_FORMAT_BYTE) {
3138 GST_DEBUG_OBJECT (h264parse, "Inserting AUD into the stream.");
3139 if (h264parse->align == GST_H264_PARSE_ALIGN_AU) {
3141 gst_memory_new_wrapped (GST_MEMORY_FLAG_READONLY, (guint8 *) au_delim,
3142 sizeof (au_delim), 0, sizeof (au_delim), NULL, NULL);
3144 frame->out_buffer = gst_buffer_copy (frame->buffer);
3145 gst_buffer_prepend_memory (frame->out_buffer, mem);
3146 if (h264parse->idr_pos >= 0)
3147 h264parse->idr_pos += sizeof (au_delim);
3149 buffer = frame->out_buffer;
3151 GstBuffer *aud_buffer = gst_buffer_new_allocate (NULL, 2, NULL);
3152 gst_buffer_fill (aud_buffer, 0, (guint8 *) (au_delim + 4), 2);
3154 buffer = frame->buffer;
3155 gst_h264_parse_push_codec_buffer (h264parse, aud_buffer, buffer);
3156 gst_buffer_unref (aud_buffer);
3159 buffer = frame->buffer;
3161 h264parse->aud_insert = FALSE;
3163 if ((event = check_pending_key_unit_event (h264parse->force_key_unit_event,
3164 &parse->segment, GST_BUFFER_TIMESTAMP (buffer),
3165 GST_BUFFER_FLAGS (buffer), h264parse->pending_key_unit_ts))) {
3166 gst_h264_parse_prepare_key_unit (h264parse, event);
3169 /* handle timecode */
3170 new_buf = gst_h264_parse_create_pic_timing_sei (h264parse, buffer);
3172 if (frame->out_buffer)
3173 gst_buffer_unref (frame->out_buffer);
3175 buffer = frame->out_buffer = new_buf;
3178 /* periodic SPS/PPS sending */
3179 if (h264parse->interval > 0 || h264parse->push_codec) {
3180 GstClockTime timestamp = GST_BUFFER_TIMESTAMP (buffer);
3182 gboolean initial_frame = FALSE;
3185 if (!GST_CLOCK_TIME_IS_VALID (h264parse->last_report)) {
3186 h264parse->last_report = timestamp;
3187 initial_frame = TRUE;
3190 if (h264parse->idr_pos >= 0) {
3191 GST_LOG_OBJECT (h264parse, "IDR nal at offset %d", h264parse->idr_pos);
3193 if (timestamp > h264parse->last_report)
3194 diff = timestamp - h264parse->last_report;
3198 GST_LOG_OBJECT (h264parse,
3199 "now %" GST_TIME_FORMAT ", last SPS/PPS %" GST_TIME_FORMAT,
3200 GST_TIME_ARGS (timestamp), GST_TIME_ARGS (h264parse->last_report));
3202 GST_DEBUG_OBJECT (h264parse,
3203 "interval since last SPS/PPS %" GST_TIME_FORMAT,
3204 GST_TIME_ARGS (diff));
3206 if (GST_TIME_AS_SECONDS (diff) >= h264parse->interval ||
3207 initial_frame || h264parse->push_codec) {
3208 GstClockTime new_ts;
3210 /* avoid overwriting a perfectly fine timestamp */
3211 new_ts = GST_CLOCK_TIME_IS_VALID (timestamp) ? timestamp :
3212 h264parse->last_report;
3214 if (gst_h264_parse_handle_sps_pps_nals (h264parse, buffer, frame)) {
3215 h264parse->last_report = new_ts;
3218 /* we pushed whatever we had */
3219 h264parse->push_codec = FALSE;
3220 h264parse->have_sps = FALSE;
3221 h264parse->have_pps = FALSE;
3222 h264parse->state &= GST_H264_PARSE_STATE_VALID_PICTURE_HEADERS;
3224 } else if (h264parse->interval == -1) {
3225 if (h264parse->idr_pos >= 0) {
3226 GST_LOG_OBJECT (h264parse, "IDR nal at offset %d", h264parse->idr_pos);
3228 gst_h264_parse_handle_sps_pps_nals (h264parse, buffer, frame);
3230 /* we pushed whatever we had */
3231 h264parse->push_codec = FALSE;
3232 h264parse->have_sps = FALSE;
3233 h264parse->have_pps = FALSE;
3234 h264parse->state &= GST_H264_PARSE_STATE_VALID_PICTURE_HEADERS;
3238 /* Fixme: setting passthrough mode causing multiple issues:
3239 * For nal aligned multiresoluton streams, passthrough mode make h264parse
3240 * unable to advertise the new resolutions. Also causing issues while
3241 * parsing MVC streams when it has two layers.
3242 * Disabing passthourgh mode for now */
3244 /* If SPS/PPS and a keyframe have been parsed, and we're not converting,
3245 * we might switch to passthrough mode now on the basis that we've seen
3246 * the SEI packets and know optional caps params (such as multiview).
3247 * This is an efficiency optimisation that relies on stream properties
3248 * remaining uniform in practice. */
3249 if (h264parse->can_passthrough) {
3250 if (h264parse->keyframe && h264parse->have_sps && h264parse->have_pps) {
3251 GST_LOG_OBJECT (parse, "Switching to passthrough mode");
3252 gst_base_parse_set_passthrough (parse, TRUE);
3257 if (frame->out_buffer) {
3258 parse_buffer = frame->out_buffer =
3259 gst_buffer_make_writable (frame->out_buffer);
3261 parse_buffer = frame->buffer = gst_buffer_make_writable (frame->buffer);
3264 if (!gst_buffer_get_video_time_code_meta (parse_buffer)) {
3267 for (i = 0; i < 3 && h264parse->num_clock_timestamp; i++) {
3268 GstH264ClockTimestamp *tim =
3269 &h264parse->pic_timing_sei.clock_timestamp[i];
3270 gint field_count = -1;
3272 GstVideoTimeCodeFlags flags = 0;
3274 if (!h264parse->pic_timing_sei.clock_timestamp_flag[i])
3278 switch (h264parse->sei_pic_struct) {
3279 case GST_H264_SEI_PIC_STRUCT_FRAME:
3280 case GST_H264_SEI_PIC_STRUCT_TOP_FIELD:
3281 case GST_H264_SEI_PIC_STRUCT_BOTTOM_FIELD:
3282 field_count = h264parse->sei_pic_struct;
3284 case GST_H264_SEI_PIC_STRUCT_TOP_BOTTOM:
3285 field_count = i + 1;
3287 case GST_H264_SEI_PIC_STRUCT_BOTTOM_TOP:
3288 field_count = 2 - i;
3290 case GST_H264_SEI_PIC_STRUCT_TOP_BOTTOM_TOP:
3291 field_count = i % 2 ? 2 : 1;
3293 case GST_H264_SEI_PIC_STRUCT_BOTTOM_TOP_BOTTOM:
3294 field_count = i % 2 ? 1 : 2;
3296 case GST_H264_SEI_PIC_STRUCT_FRAME_DOUBLING:
3297 case GST_H264_SEI_PIC_STRUCT_FRAME_TRIPLING:
3302 if (field_count == -1) {
3303 GST_WARNING_OBJECT (parse,
3304 "failed to determine field count for timecode");
3308 /* dropping of the two lowest (value 0 and 1) n_frames
3309 * counts when seconds_value is equal to 0 and
3310 * minutes_value is not an integer multiple of 10 */
3311 if (tim->counting_type == 4)
3312 flags |= GST_VIDEO_TIME_CODE_FLAGS_DROP_FRAME;
3314 if (tim->ct_type == GST_H264_CT_TYPE_INTERLACED) {
3315 flags |= GST_VIDEO_TIME_CODE_FLAGS_INTERLACED;
3316 is_interlaced = TRUE;
3320 gst_util_uint64_scale_int (tim->n_frames, 1,
3321 2 - tim->nuit_field_based_flag);
3323 GST_LOG_OBJECT (h264parse,
3324 "Add time code meta %02u:%02u:%02u:%02u",
3325 tim->hours_value, tim->minutes_value, tim->seconds_value, n_frames);
3327 gst_buffer_add_video_time_code_meta_full (parse_buffer,
3328 h264parse->parsed_fps_n,
3329 h264parse->parsed_fps_d,
3332 tim->hours_flag ? tim->hours_value : 0,
3333 tim->minutes_flag ? tim->minutes_value : 0,
3334 tim->seconds_flag ? tim->seconds_value : 0, n_frames, field_count);
3337 h264parse->num_clock_timestamp = 0;
3340 if (is_interlaced) {
3341 GST_BUFFER_FLAG_SET (parse_buffer, GST_VIDEO_BUFFER_FLAG_INTERLACED);
3342 if (h264parse->sei_pic_struct == GST_H264_SEI_PIC_STRUCT_TOP_FIELD)
3343 GST_BUFFER_FLAG_SET (parse_buffer, GST_VIDEO_BUFFER_FLAG_TFF);
3346 gst_video_push_user_data ((GstElement *) h264parse, &h264parse->user_data,
3349 gst_h264_parse_reset_frame (h264parse);
3355 gst_h264_parse_set_caps (GstBaseParse * parse, GstCaps * caps)
3357 GstH264Parse *h264parse;
3359 const GValue *codec_data_value;
3360 GstBuffer *codec_data = NULL;
3362 guint format, align, off;
3363 GstH264NalUnit nalu;
3364 GstH264ParserResult parseres;
3367 h264parse = GST_H264_PARSE (parse);
3370 h264parse->push_codec = FALSE;
3372 old_caps = gst_pad_get_current_caps (GST_BASE_PARSE_SINK_PAD (parse));
3374 if (!gst_caps_is_equal (old_caps, caps))
3375 gst_h264_parse_reset_stream_info (h264parse);
3376 gst_caps_unref (old_caps);
3379 str = gst_caps_get_structure (caps, 0);
3381 /* accept upstream info if provided */
3382 gst_structure_get_int (str, "width", &h264parse->width);
3383 gst_structure_get_int (str, "height", &h264parse->height);
3384 gst_structure_get_fraction (str, "framerate", &h264parse->fps_num,
3385 &h264parse->fps_den);
3386 gst_structure_get_fraction (str, "pixel-aspect-ratio",
3387 &h264parse->upstream_par_n, &h264parse->upstream_par_d);
3389 /* get upstream format and align from caps */
3390 gst_h264_parse_format_from_caps (caps, &format, &align);
3392 codec_data_value = gst_structure_get_value (str, "codec_data");
3394 /* fix up caps without stream-format for max. backwards compatibility */
3395 if (format == GST_H264_PARSE_FORMAT_NONE) {
3396 /* codec_data implies avc */
3397 if (codec_data_value != NULL) {
3398 GST_ERROR ("video/x-h264 caps with codec_data but no stream-format=avc");
3399 format = GST_H264_PARSE_FORMAT_AVC;
3401 /* otherwise assume bytestream input */
3402 GST_ERROR ("video/x-h264 caps without codec_data or stream-format");
3403 format = GST_H264_PARSE_FORMAT_BYTE;
3407 /* avc caps sanity checks */
3408 if (format == GST_H264_PARSE_FORMAT_AVC) {
3409 /* AVC requires codec_data, AVC3 might have one and/or SPS/PPS inline */
3410 if (codec_data_value == NULL)
3411 goto avc_caps_codec_data_missing;
3413 /* AVC implies alignment=au, everything else is not allowed */
3414 if (align == GST_H264_PARSE_ALIGN_NONE)
3415 align = GST_H264_PARSE_ALIGN_AU;
3416 else if (align != GST_H264_PARSE_ALIGN_AU)
3417 goto avc_caps_wrong_alignment;
3420 /* bytestream caps sanity checks */
3421 if (format == GST_H264_PARSE_FORMAT_BYTE) {
3422 /* should have SPS/PSS in-band (and/or oob in streamheader field) */
3423 if (codec_data_value != NULL)
3424 goto bytestream_caps_with_codec_data;
3427 /* packetized video has codec_data (required for AVC, optional for AVC3) */
3428 if (codec_data_value != NULL) {
3431 guint num_sps, num_pps;
3432 #ifndef GST_DISABLE_GST_DEBUG
3437 GST_DEBUG_OBJECT (h264parse, "have packetized h264");
3438 /* make note for optional split processing */
3439 h264parse->packetized = TRUE;
3441 /* codec_data field should hold a buffer */
3442 if (!GST_VALUE_HOLDS_BUFFER (codec_data_value))
3443 goto avc_caps_codec_data_wrong_type;
3445 codec_data = gst_value_get_buffer (codec_data_value);
3447 goto avc_caps_codec_data_missing;
3448 gst_buffer_map (codec_data, &map, GST_MAP_READ);
3452 /* parse the avcC data */
3453 if (size < 7) { /* when numSPS==0 and numPPS==0, length is 7 bytes */
3454 gst_buffer_unmap (codec_data, &map);
3455 goto avcc_too_small;
3457 /* parse the version, this must be 1 */
3459 gst_buffer_unmap (codec_data, &map);
3462 #ifndef GST_DISABLE_GST_DEBUG
3463 /* AVCProfileIndication */
3464 /* profile_compat */
3465 /* AVCLevelIndication */
3466 profile = (data[1] << 16) | (data[2] << 8) | data[3];
3467 GST_DEBUG_OBJECT (h264parse, "profile %06x", profile);
3470 /* 6 bits reserved | 2 bits lengthSizeMinusOne */
3471 /* this is the number of bytes in front of the NAL units to mark their
3473 h264parse->nal_length_size = (data[4] & 0x03) + 1;
3474 GST_DEBUG_OBJECT (h264parse, "nal length size %u",
3475 h264parse->nal_length_size);
3477 num_sps = data[5] & 0x1f;
3479 for (i = 0; i < num_sps; i++) {
3480 parseres = gst_h264_parser_identify_nalu_avc (h264parse->nalparser,
3481 data, off, size, 2, &nalu);
3482 if (parseres != GST_H264_PARSER_OK) {
3483 gst_buffer_unmap (codec_data, &map);
3484 goto avcc_too_small;
3487 gst_h264_parse_process_nal (h264parse, &nalu);
3488 off = nalu.offset + nalu.size;
3492 gst_buffer_unmap (codec_data, &map);
3493 goto avcc_too_small;
3495 num_pps = data[off];
3498 for (i = 0; i < num_pps; i++) {
3499 parseres = gst_h264_parser_identify_nalu_avc (h264parse->nalparser,
3500 data, off, size, 2, &nalu);
3501 if (parseres != GST_H264_PARSER_OK) {
3502 gst_buffer_unmap (codec_data, &map);
3503 goto avcc_too_small;
3506 gst_h264_parse_process_nal (h264parse, &nalu);
3507 off = nalu.offset + nalu.size;
3510 gst_buffer_unmap (codec_data, &map);
3512 gst_buffer_replace (&h264parse->codec_data_in, codec_data);
3514 /* don't confuse codec_data with inband sps/pps */
3515 h264parse->have_sps_in_frame = FALSE;
3516 h264parse->have_pps_in_frame = FALSE;
3517 } else if (format == GST_H264_PARSE_FORMAT_BYTE) {
3518 GST_DEBUG_OBJECT (h264parse, "have bytestream h264");
3519 /* nothing to pre-process */
3520 h264parse->packetized = FALSE;
3521 /* we have 4 sync bytes */
3522 h264parse->nal_length_size = 4;
3524 /* probably AVC3 without codec_data field, anything to do here? */
3530 /* prefer input type determined above */
3531 in_caps = gst_caps_new_simple ("video/x-h264",
3532 "parsed", G_TYPE_BOOLEAN, TRUE,
3533 "stream-format", G_TYPE_STRING,
3534 gst_h264_parse_get_string (h264parse, TRUE, format),
3535 "alignment", G_TYPE_STRING,
3536 gst_h264_parse_get_string (h264parse, FALSE, align), NULL);
3537 /* negotiate with downstream, sets ->format and ->align */
3538 gst_h264_parse_negotiate (h264parse, format, in_caps);
3539 gst_caps_unref (in_caps);
3542 if (format == h264parse->format && align == h264parse->align) {
3543 /* we did parse codec-data and might supplement src caps */
3544 gst_h264_parse_update_src_caps (h264parse, caps);
3545 } else if (format == GST_H264_PARSE_FORMAT_AVC
3546 || format == GST_H264_PARSE_FORMAT_AVC3) {
3547 /* if input != output, and input is avc, must split before anything else */
3548 /* arrange to insert codec-data in-stream if needed.
3549 * src caps are only arranged for later on */
3550 h264parse->push_codec = TRUE;
3551 h264parse->have_sps = FALSE;
3552 h264parse->have_pps = FALSE;
3553 if (h264parse->align == GST_H264_PARSE_ALIGN_NAL)
3554 h264parse->split_packetized = TRUE;
3555 h264parse->packetized = TRUE;
3558 h264parse->in_align = align;
3563 avc_caps_codec_data_wrong_type:
3565 GST_WARNING_OBJECT (parse, "H.264 AVC caps, codec_data field not a buffer");
3568 avc_caps_codec_data_missing:
3570 GST_WARNING_OBJECT (parse, "H.264 AVC caps, but no codec_data");
3573 avc_caps_wrong_alignment:
3575 GST_WARNING_OBJECT (parse, "H.264 AVC caps with NAL alignment, must be AU");
3578 bytestream_caps_with_codec_data:
3580 GST_WARNING_OBJECT (parse, "H.264 bytestream caps with codec_data is not "
3581 "expected, send SPS/PPS in-band with data or in streamheader field");
3586 GST_DEBUG_OBJECT (h264parse, "avcC size %" G_GSIZE_FORMAT " < 8", size);
3591 GST_DEBUG_OBJECT (h264parse, "wrong avcC version");
3596 GST_WARNING_OBJECT (h264parse, "refused caps %" GST_PTR_FORMAT, caps);
3602 remove_fields (GstCaps * caps, gboolean all)
3606 n = gst_caps_get_size (caps);
3607 for (i = 0; i < n; i++) {
3608 GstStructure *s = gst_caps_get_structure (caps, i);
3611 gst_structure_remove_field (s, "alignment");
3612 gst_structure_remove_field (s, "stream-format");
3614 gst_structure_remove_field (s, "parsed");
3619 gst_h264_parse_get_caps (GstBaseParse * parse, GstCaps * filter)
3621 GstCaps *peercaps, *templ;
3622 GstCaps *res, *tmp, *pcopy;
3624 templ = gst_pad_get_pad_template_caps (GST_BASE_PARSE_SINK_PAD (parse));
3626 GstCaps *fcopy = gst_caps_copy (filter);
3627 /* Remove the fields we convert */
3628 remove_fields (fcopy, TRUE);
3629 peercaps = gst_pad_peer_query_caps (GST_BASE_PARSE_SRC_PAD (parse), fcopy);
3630 gst_caps_unref (fcopy);
3632 peercaps = gst_pad_peer_query_caps (GST_BASE_PARSE_SRC_PAD (parse), NULL);
3634 pcopy = gst_caps_copy (peercaps);
3635 remove_fields (pcopy, TRUE);
3637 res = gst_caps_intersect_full (pcopy, templ, GST_CAPS_INTERSECT_FIRST);
3638 gst_caps_unref (pcopy);
3639 gst_caps_unref (templ);
3642 GstCaps *tmp = gst_caps_intersect_full (res, filter,
3643 GST_CAPS_INTERSECT_FIRST);
3644 gst_caps_unref (res);
3648 /* Try if we can put the downstream caps first */
3649 pcopy = gst_caps_copy (peercaps);
3650 remove_fields (pcopy, FALSE);
3651 tmp = gst_caps_intersect_full (pcopy, res, GST_CAPS_INTERSECT_FIRST);
3652 gst_caps_unref (pcopy);
3653 if (!gst_caps_is_empty (tmp))
3654 res = gst_caps_merge (tmp, res);
3656 gst_caps_unref (tmp);
3658 gst_caps_unref (peercaps);
3663 gst_h264_parse_event (GstBaseParse * parse, GstEvent * event)
3666 GstH264Parse *h264parse = GST_H264_PARSE (parse);
3668 switch (GST_EVENT_TYPE (event)) {
3669 case GST_EVENT_CUSTOM_DOWNSTREAM:
3671 GstClockTime timestamp, stream_time, running_time;
3672 gboolean all_headers;
3675 if (gst_video_event_is_force_key_unit (event)) {
3676 gst_video_event_parse_downstream_force_key_unit (event,
3677 ×tamp, &stream_time, &running_time, &all_headers, &count);
3679 GST_INFO_OBJECT (h264parse,
3680 "received downstream force key unit event, "
3681 "seqnum %d running_time %" GST_TIME_FORMAT
3682 " all_headers %d count %d", gst_event_get_seqnum (event),
3683 GST_TIME_ARGS (running_time), all_headers, count);
3684 if (h264parse->force_key_unit_event) {
3685 GST_INFO_OBJECT (h264parse, "ignoring force key unit event "
3686 "as one is already queued");
3688 h264parse->pending_key_unit_ts = running_time;
3689 gst_event_replace (&h264parse->force_key_unit_event, event);
3691 gst_event_unref (event);
3694 res = GST_BASE_PARSE_CLASS (parent_class)->sink_event (parse, event);
3699 case GST_EVENT_FLUSH_STOP:
3700 case GST_EVENT_SEGMENT_DONE:
3701 h264parse->dts = GST_CLOCK_TIME_NONE;
3702 h264parse->ts_trn_nb = GST_CLOCK_TIME_NONE;
3703 h264parse->push_codec = TRUE;
3705 res = GST_BASE_PARSE_CLASS (parent_class)->sink_event (parse, event);
3707 case GST_EVENT_SEGMENT:
3709 const GstSegment *segment;
3711 gst_event_parse_segment (event, &segment);
3712 /* don't try to mess with more subtle cases (e.g. seek) */
3713 if (segment->format == GST_FORMAT_TIME &&
3714 (segment->start != 0 || segment->rate != 1.0
3715 || segment->applied_rate != 1.0))
3716 h264parse->do_ts = FALSE;
3718 if (segment->flags & GST_SEEK_FLAG_TRICKMODE_FORWARD_PREDICTED) {
3719 GST_DEBUG_OBJECT (h264parse, "Will discard bidirectional frames");
3720 h264parse->discard_bidirectional = TRUE;
3724 h264parse->last_report = GST_CLOCK_TIME_NONE;
3726 res = GST_BASE_PARSE_CLASS (parent_class)->sink_event (parse, event);
3730 res = GST_BASE_PARSE_CLASS (parent_class)->sink_event (parse, event);
3737 gst_h264_parse_src_event (GstBaseParse * parse, GstEvent * event)
3740 GstH264Parse *h264parse = GST_H264_PARSE (parse);
3742 switch (GST_EVENT_TYPE (event)) {
3743 case GST_EVENT_CUSTOM_UPSTREAM:
3745 GstClockTime running_time;
3746 gboolean all_headers;
3749 if (gst_video_event_is_force_key_unit (event)) {
3750 gst_video_event_parse_upstream_force_key_unit (event,
3751 &running_time, &all_headers, &count);
3753 GST_INFO_OBJECT (h264parse, "received upstream force-key-unit event, "
3754 "seqnum %d running_time %" GST_TIME_FORMAT
3755 " all_headers %d count %d", gst_event_get_seqnum (event),
3756 GST_TIME_ARGS (running_time), all_headers, count);
3759 h264parse->pending_key_unit_ts = running_time;
3760 gst_event_replace (&h264parse->force_key_unit_event, event);
3763 res = GST_BASE_PARSE_CLASS (parent_class)->src_event (parse, event);
3767 res = GST_BASE_PARSE_CLASS (parent_class)->src_event (parse, event);
3775 gst_h264_parse_set_property (GObject * object, guint prop_id,
3776 const GValue * value, GParamSpec * pspec)
3778 GstH264Parse *parse;
3780 parse = GST_H264_PARSE (object);
3783 case PROP_CONFIG_INTERVAL:
3784 parse->interval = g_value_get_int (value);
3786 case PROP_UPDATE_TIMECODE:
3787 parse->update_timecode = g_value_get_boolean (value);
3790 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
3796 gst_h264_parse_get_property (GObject * object, guint prop_id,
3797 GValue * value, GParamSpec * pspec)
3799 GstH264Parse *parse;
3801 parse = GST_H264_PARSE (object);
3804 case PROP_CONFIG_INTERVAL:
3805 g_value_set_int (value, parse->interval);
3807 case PROP_UPDATE_TIMECODE:
3808 g_value_set_boolean (value, parse->update_timecode);
3811 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);