2 * Copyright (C) 2019 Seungha Yang <seungha.yang@navercorp.com>
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Library General Public
6 * License as published by the Free Software Foundation; either
7 * version 2 of the License, or (at your option) any later version.
9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Library General Public License for more details.
14 * You should have received a copy of the GNU Library General Public
15 * License along with this library; if not, write to the
16 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
17 * Boston, MA 02110-1301, USA.
19 * NOTE: some of implementations are copied/modified from Chromium code
21 * Copyright 2015 The Chromium Authors. All rights reserved.
23 * Redistribution and use in source and binary forms, with or without
24 * modification, are permitted provided that the following conditions are
27 * * Redistributions of source code must retain the above copyright
28 * notice, this list of conditions and the following disclaimer.
29 * * Redistributions in binary form must reproduce the above
30 * copyright notice, this list of conditions and the following disclaimer
31 * in the documentation and/or other materials provided with the
33 * * Neither the name of Google Inc. nor the names of its
34 * contributors may be used to endorse or promote products derived from
35 * this software without specific prior written permission.
37 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
38 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
39 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
40 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
41 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
42 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
43 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
44 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
45 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
46 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
47 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
50 * SECTION:gsth264decoder
51 * @title: GstH264Decoder
52 * @short_description: Base class to implement stateless H.264 decoders
61 #include <gst/base/base.h>
62 #include "gsth264decoder.h"
64 GST_DEBUG_CATEGORY (gst_h264_decoder_debug);
65 #define GST_CAT_DEFAULT gst_h264_decoder_debug
69 GST_H264_DECODER_FORMAT_NONE,
70 GST_H264_DECODER_FORMAT_AVC,
71 GST_H264_DECODER_FORMAT_BYTE
72 } GstH264DecoderFormat;
76 GST_H264_DECODER_ALIGN_NONE,
77 GST_H264_DECODER_ALIGN_NAL,
78 GST_H264_DECODER_ALIGN_AU
79 } GstH264DecoderAlign;
81 struct _GstH264DecoderPrivate
83 GstH264DecoderCompliance compliance;
88 guint nal_length_size;
91 GstH264DecoderFormat in_format;
92 GstH264DecoderAlign align;
93 GstH264NalParser *parser;
95 /* Cache last field which can not enter the DPB, should be a non ref */
96 GstH264Picture *last_field;
98 /* used for low-latency vs. high throughput mode decision */
101 /* sps/pps of the current slice */
102 const GstH264SPS *active_sps;
103 const GstH264PPS *active_pps;
105 /* Picture currently being processed/decoded */
106 GstH264Picture *current_picture;
107 GstVideoCodecFrame *current_frame;
109 /* Slice (slice header + nalu) currently being processed/decodec */
110 GstH264Slice current_slice;
114 gint max_long_term_frame_idx;
117 gint prev_ref_frame_num;
118 gint prev_frame_num_offset;
119 gboolean prev_has_memmgmnt5;
121 /* Values related to previously decoded reference picture */
122 gboolean prev_ref_has_memmgmnt5;
123 gint prev_ref_top_field_order_cnt;
124 gint prev_ref_pic_order_cnt_msb;
125 gint prev_ref_pic_order_cnt_lsb;
127 GstH264PictureField prev_ref_field;
129 /* PicOrderCount of the previously outputted frame */
130 gint last_output_poc;
132 gboolean process_ref_pic_lists;
133 guint preferred_output_delay;
135 /* Reference picture lists, constructed for each frame */
136 GArray *ref_pic_list_p0;
137 GArray *ref_pic_list_b0;
138 GArray *ref_pic_list_b1;
140 /* Temporary picture list, for reference picture lists in fields,
141 * corresponding to 8.2.4.2.2 refFrameList0ShortTerm, refFrameList0LongTerm
142 * and 8.2.4.2.5 refFrameList1ShortTerm and refFrameListLongTerm */
143 GArray *ref_frame_list_0_short_term;
144 GArray *ref_frame_list_1_short_term;
145 GArray *ref_frame_list_long_term;
147 /* Reference picture lists, constructed for each slice */
148 GArray *ref_pic_list0;
149 GArray *ref_pic_list1;
151 /* For delayed output */
152 GstQueueArray *output_queue;
154 gboolean input_state_changed;
156 /* Latency report params */
157 guint32 max_reorder_count;
158 guint32 last_reorder_frame_number;
166 GstVideoCodecFrame *frame;
167 GstH264Picture *picture;
169 GstH264Decoder *self;
170 } GstH264DecoderOutputFrame;
172 #define UPDATE_FLOW_RETURN(ret,new_ret) G_STMT_START { \
173 if (*(ret) == GST_FLOW_OK) \
177 #define parent_class gst_h264_decoder_parent_class
178 G_DEFINE_ABSTRACT_TYPE_WITH_CODE (GstH264Decoder, gst_h264_decoder,
179 GST_TYPE_VIDEO_DECODER,
180 G_ADD_PRIVATE (GstH264Decoder);
181 GST_DEBUG_CATEGORY_INIT (gst_h264_decoder_debug, "h264decoder", 0,
182 "H.264 Video Decoder"));
184 static void gst_h264_decoder_finalize (GObject * object);
186 static gboolean gst_h264_decoder_start (GstVideoDecoder * decoder);
187 static gboolean gst_h264_decoder_stop (GstVideoDecoder * decoder);
188 static gboolean gst_h264_decoder_set_format (GstVideoDecoder * decoder,
189 GstVideoCodecState * state);
190 static gboolean gst_h264_decoder_negotiate (GstVideoDecoder * decoder);
191 static GstFlowReturn gst_h264_decoder_finish (GstVideoDecoder * decoder);
192 static gboolean gst_h264_decoder_flush (GstVideoDecoder * decoder);
193 static GstFlowReturn gst_h264_decoder_drain (GstVideoDecoder * decoder);
194 static GstFlowReturn gst_h264_decoder_handle_frame (GstVideoDecoder * decoder,
195 GstVideoCodecFrame * frame);
197 /* codec specific functions */
198 static GstFlowReturn gst_h264_decoder_process_sps (GstH264Decoder * self,
200 static GstFlowReturn gst_h264_decoder_decode_slice (GstH264Decoder * self);
201 static GstFlowReturn gst_h264_decoder_decode_nal (GstH264Decoder * self,
202 GstH264NalUnit * nalu);
203 static gboolean gst_h264_decoder_fill_picture_from_slice (GstH264Decoder * self,
204 const GstH264Slice * slice, GstH264Picture * picture);
205 static gboolean gst_h264_decoder_calculate_poc (GstH264Decoder * self,
206 GstH264Picture * picture);
207 static gboolean gst_h264_decoder_init_gap_picture (GstH264Decoder * self,
208 GstH264Picture * picture, gint frame_num);
209 static GstFlowReturn gst_h264_decoder_drain_internal (GstH264Decoder * self);
210 static void gst_h264_decoder_finish_current_picture (GstH264Decoder * self,
211 GstFlowReturn * ret);
212 static void gst_h264_decoder_finish_picture (GstH264Decoder * self,
213 GstH264Picture * picture, GstFlowReturn * ret);
214 static void gst_h264_decoder_prepare_ref_pic_lists (GstH264Decoder * self,
215 GstH264Picture * current_picture);
216 static void gst_h264_decoder_clear_ref_pic_lists (GstH264Decoder * self);
217 static gboolean gst_h264_decoder_modify_ref_pic_lists (GstH264Decoder * self);
219 gst_h264_decoder_sliding_window_picture_marking (GstH264Decoder * self,
220 GstH264Picture * picture);
221 static void gst_h264_decoder_do_output_picture (GstH264Decoder * self,
222 GstH264Picture * picture, GstFlowReturn * ret);
223 static GstH264Picture *gst_h264_decoder_new_field_picture (GstH264Decoder *
224 self, GstH264Picture * picture);
226 gst_h264_decoder_clear_output_frame (GstH264DecoderOutputFrame * output_frame);
235 * gst_h264_decoder_compliance_get_type:
237 * Get the compliance type of the h264 decoder.
242 gst_h264_decoder_compliance_get_type (void)
244 static gsize h264_decoder_compliance_type = 0;
245 static const GEnumValue compliances[] = {
246 {GST_H264_DECODER_COMPLIANCE_AUTO, "GST_H264_DECODER_COMPLIANCE_AUTO",
248 {GST_H264_DECODER_COMPLIANCE_STRICT, "GST_H264_DECODER_COMPLIANCE_STRICT",
250 {GST_H264_DECODER_COMPLIANCE_NORMAL, "GST_H264_DECODER_COMPLIANCE_NORMAL",
252 {GST_H264_DECODER_COMPLIANCE_FLEXIBLE,
253 "GST_H264_DECODER_COMPLIANCE_FLEXIBLE", "flexible"},
258 if (g_once_init_enter (&h264_decoder_compliance_type)) {
261 _type = g_enum_register_static ("GstH264DecoderCompliance", compliances);
262 g_once_init_leave (&h264_decoder_compliance_type, _type);
265 return (GType) h264_decoder_compliance_type;
269 gst_h264_decoder_get_property (GObject * object, guint property_id,
270 GValue * value, GParamSpec * pspec)
272 GstH264Decoder *self = GST_H264_DECODER (object);
273 GstH264DecoderPrivate *priv = self->priv;
275 switch (property_id) {
276 case PROP_COMPLIANCE:
277 GST_OBJECT_LOCK (self);
278 g_value_set_enum (value, priv->compliance);
279 GST_OBJECT_UNLOCK (self);
282 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
288 gst_h264_decoder_set_property (GObject * object, guint property_id,
289 const GValue * value, GParamSpec * pspec)
291 GstH264Decoder *self = GST_H264_DECODER (object);
292 GstH264DecoderPrivate *priv = self->priv;
294 switch (property_id) {
295 case PROP_COMPLIANCE:
296 GST_OBJECT_LOCK (self);
297 priv->compliance = g_value_get_enum (value);
298 GST_OBJECT_UNLOCK (self);
301 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
307 gst_h264_decoder_class_init (GstH264DecoderClass * klass)
309 GstVideoDecoderClass *decoder_class = GST_VIDEO_DECODER_CLASS (klass);
310 GObjectClass *object_class = G_OBJECT_CLASS (klass);
312 object_class->finalize = GST_DEBUG_FUNCPTR (gst_h264_decoder_finalize);
313 object_class->get_property = gst_h264_decoder_get_property;
314 object_class->set_property = gst_h264_decoder_set_property;
316 decoder_class->start = GST_DEBUG_FUNCPTR (gst_h264_decoder_start);
317 decoder_class->stop = GST_DEBUG_FUNCPTR (gst_h264_decoder_stop);
318 decoder_class->set_format = GST_DEBUG_FUNCPTR (gst_h264_decoder_set_format);
319 decoder_class->negotiate = GST_DEBUG_FUNCPTR (gst_h264_decoder_negotiate);
320 decoder_class->finish = GST_DEBUG_FUNCPTR (gst_h264_decoder_finish);
321 decoder_class->flush = GST_DEBUG_FUNCPTR (gst_h264_decoder_flush);
322 decoder_class->drain = GST_DEBUG_FUNCPTR (gst_h264_decoder_drain);
323 decoder_class->handle_frame =
324 GST_DEBUG_FUNCPTR (gst_h264_decoder_handle_frame);
327 * GstH264Decoder:compliance:
329 * The compliance controls the behavior of the decoder to handle some
330 * subtle cases and contexts, such as the low-latency DPB bumping or
331 * mapping the baseline profile as the constrained-baseline profile,
336 g_object_class_install_property (object_class, PROP_COMPLIANCE,
337 g_param_spec_enum ("compliance", "Decoder Compliance",
338 "The decoder's behavior in compliance with the h264 spec.",
339 GST_TYPE_H264_DECODER_COMPLIANCE, GST_H264_DECODER_COMPLIANCE_AUTO,
340 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_CONSTRUCT));
344 gst_h264_decoder_init (GstH264Decoder * self)
346 GstH264DecoderPrivate *priv;
348 gst_video_decoder_set_packetized (GST_VIDEO_DECODER (self), TRUE);
349 gst_video_decoder_set_needs_format (GST_VIDEO_DECODER (self), TRUE);
351 self->priv = priv = gst_h264_decoder_get_instance_private (self);
353 priv->last_output_poc = G_MININT32;
355 priv->ref_pic_list_p0 = g_array_sized_new (FALSE, TRUE,
356 sizeof (GstH264Picture *), 32);
357 g_array_set_clear_func (priv->ref_pic_list_p0,
358 (GDestroyNotify) gst_clear_h264_picture);
360 priv->ref_pic_list_b0 = g_array_sized_new (FALSE, TRUE,
361 sizeof (GstH264Picture *), 32);
362 g_array_set_clear_func (priv->ref_pic_list_b0,
363 (GDestroyNotify) gst_clear_h264_picture);
365 priv->ref_pic_list_b1 = g_array_sized_new (FALSE, TRUE,
366 sizeof (GstH264Picture *), 32);
367 g_array_set_clear_func (priv->ref_pic_list_b1,
368 (GDestroyNotify) gst_clear_h264_picture);
370 priv->ref_frame_list_0_short_term = g_array_sized_new (FALSE, TRUE,
371 sizeof (GstH264Picture *), 32);
372 g_array_set_clear_func (priv->ref_frame_list_0_short_term,
373 (GDestroyNotify) gst_clear_h264_picture);
375 priv->ref_frame_list_1_short_term = g_array_sized_new (FALSE, TRUE,
376 sizeof (GstH264Picture *), 32);
377 g_array_set_clear_func (priv->ref_frame_list_1_short_term,
378 (GDestroyNotify) gst_clear_h264_picture);
380 priv->ref_frame_list_long_term = g_array_sized_new (FALSE, TRUE,
381 sizeof (GstH264Picture *), 32);
382 g_array_set_clear_func (priv->ref_frame_list_long_term,
383 (GDestroyNotify) gst_clear_h264_picture);
385 priv->ref_pic_list0 = g_array_sized_new (FALSE, TRUE,
386 sizeof (GstH264Picture *), 32);
387 priv->ref_pic_list1 = g_array_sized_new (FALSE, TRUE,
388 sizeof (GstH264Picture *), 32);
391 gst_queue_array_new_for_struct (sizeof (GstH264DecoderOutputFrame), 1);
392 gst_queue_array_set_clear_func (priv->output_queue,
393 (GDestroyNotify) gst_h264_decoder_clear_output_frame);
397 gst_h264_decoder_finalize (GObject * object)
399 GstH264Decoder *self = GST_H264_DECODER (object);
400 GstH264DecoderPrivate *priv = self->priv;
402 g_array_unref (priv->ref_pic_list_p0);
403 g_array_unref (priv->ref_pic_list_b0);
404 g_array_unref (priv->ref_pic_list_b1);
405 g_array_unref (priv->ref_frame_list_0_short_term);
406 g_array_unref (priv->ref_frame_list_1_short_term);
407 g_array_unref (priv->ref_frame_list_long_term);
408 g_array_unref (priv->ref_pic_list0);
409 g_array_unref (priv->ref_pic_list1);
410 gst_queue_array_free (priv->output_queue);
412 G_OBJECT_CLASS (parent_class)->finalize (object);
416 gst_h264_decoder_reset_latency_infos (GstH264Decoder * self)
418 GstH264DecoderPrivate *priv = self->priv;
420 priv->max_reorder_count = 0;
421 priv->last_reorder_frame_number = 0;
427 gst_h264_decoder_reset (GstH264Decoder * self)
429 GstH264DecoderPrivate *priv = self->priv;
431 g_clear_pointer (&self->input_state, gst_video_codec_state_unref);
432 g_clear_pointer (&priv->parser, gst_h264_nal_parser_free);
433 g_clear_pointer (&priv->dpb, gst_h264_dpb_free);
434 gst_clear_h264_picture (&priv->last_field);
436 priv->profile_idc = 0;
439 priv->nal_length_size = 4;
441 gst_h264_decoder_reset_latency_infos (self);
445 gst_h264_decoder_start (GstVideoDecoder * decoder)
447 GstH264Decoder *self = GST_H264_DECODER (decoder);
448 GstH264DecoderPrivate *priv = self->priv;
450 gst_h264_decoder_reset (self);
452 priv->parser = gst_h264_nal_parser_new ();
453 priv->dpb = gst_h264_dpb_new ();
459 gst_h264_decoder_stop (GstVideoDecoder * decoder)
461 GstH264Decoder *self = GST_H264_DECODER (decoder);
463 gst_h264_decoder_reset (self);
469 gst_h264_decoder_clear_output_frame (GstH264DecoderOutputFrame * output_frame)
474 if (output_frame->frame) {
475 gst_video_decoder_release_frame (GST_VIDEO_DECODER (output_frame->self),
476 output_frame->frame);
477 output_frame->frame = NULL;
480 gst_clear_h264_picture (&output_frame->picture);
484 gst_h264_decoder_clear_dpb (GstH264Decoder * self, gboolean flush)
486 GstVideoDecoder *decoder = GST_VIDEO_DECODER (self);
487 GstH264DecoderPrivate *priv = self->priv;
488 GstH264Picture *picture;
490 /* If we are not flushing now, videodecoder baseclass will hold
491 * GstVideoCodecFrame. Release frames manually */
493 while ((picture = gst_h264_dpb_bump (priv->dpb, TRUE)) != NULL) {
494 GstVideoCodecFrame *frame = gst_video_decoder_get_frame (decoder,
495 picture->system_frame_number);
498 gst_video_decoder_release_frame (decoder, frame);
499 gst_h264_picture_unref (picture);
503 gst_queue_array_clear (priv->output_queue);
504 gst_h264_decoder_clear_ref_pic_lists (self);
505 gst_clear_h264_picture (&priv->last_field);
506 gst_h264_dpb_clear (priv->dpb);
507 priv->last_output_poc = G_MININT32;
511 gst_h264_decoder_flush (GstVideoDecoder * decoder)
513 GstH264Decoder *self = GST_H264_DECODER (decoder);
515 gst_h264_decoder_clear_dpb (self, TRUE);
521 gst_h264_decoder_drain (GstVideoDecoder * decoder)
523 GstH264Decoder *self = GST_H264_DECODER (decoder);
525 /* dpb will be cleared by this method */
526 return gst_h264_decoder_drain_internal (self);
530 gst_h264_decoder_finish (GstVideoDecoder * decoder)
532 return gst_h264_decoder_drain (decoder);
536 gst_h264_decoder_handle_frame (GstVideoDecoder * decoder,
537 GstVideoCodecFrame * frame)
539 GstH264Decoder *self = GST_H264_DECODER (decoder);
540 GstH264DecoderPrivate *priv = self->priv;
541 GstBuffer *in_buf = frame->input_buffer;
543 GstH264ParserResult pres;
545 GstFlowReturn decode_ret = GST_FLOW_OK;
547 GST_LOG_OBJECT (self,
548 "handle frame, PTS: %" GST_TIME_FORMAT ", DTS: %"
549 GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_PTS (in_buf)),
550 GST_TIME_ARGS (GST_BUFFER_DTS (in_buf)));
552 priv->current_frame = frame;
554 gst_buffer_map (in_buf, &map, GST_MAP_READ);
555 if (priv->in_format == GST_H264_DECODER_FORMAT_AVC) {
556 pres = gst_h264_parser_identify_nalu_avc (priv->parser,
557 map.data, 0, map.size, priv->nal_length_size, &nalu);
559 while (pres == GST_H264_PARSER_OK && decode_ret == GST_FLOW_OK) {
560 decode_ret = gst_h264_decoder_decode_nal (self, &nalu);
562 pres = gst_h264_parser_identify_nalu_avc (priv->parser,
563 map.data, nalu.offset + nalu.size, map.size, priv->nal_length_size,
567 pres = gst_h264_parser_identify_nalu (priv->parser,
568 map.data, 0, map.size, &nalu);
570 if (pres == GST_H264_PARSER_NO_NAL_END)
571 pres = GST_H264_PARSER_OK;
573 while (pres == GST_H264_PARSER_OK && decode_ret == GST_FLOW_OK) {
574 decode_ret = gst_h264_decoder_decode_nal (self, &nalu);
576 pres = gst_h264_parser_identify_nalu (priv->parser,
577 map.data, nalu.offset + nalu.size, map.size, &nalu);
579 if (pres == GST_H264_PARSER_NO_NAL_END)
580 pres = GST_H264_PARSER_OK;
584 gst_buffer_unmap (in_buf, &map);
586 if (decode_ret != GST_FLOW_OK) {
587 if (decode_ret == GST_FLOW_ERROR) {
588 GST_VIDEO_DECODER_ERROR (self, 1, STREAM, DECODE,
589 ("Failed to decode data"), (NULL), decode_ret);
592 gst_video_decoder_drop_frame (decoder, frame);
593 gst_clear_h264_picture (&priv->current_picture);
594 priv->current_frame = NULL;
599 gst_h264_decoder_finish_current_picture (self, &decode_ret);
600 gst_video_codec_frame_unref (frame);
601 priv->current_frame = NULL;
603 if (decode_ret == GST_FLOW_ERROR) {
604 GST_VIDEO_DECODER_ERROR (self, 1, STREAM, DECODE,
605 ("Failed to decode data"), (NULL), decode_ret);
612 gst_h264_decoder_parse_sps (GstH264Decoder * self, GstH264NalUnit * nalu)
614 GstH264DecoderPrivate *priv = self->priv;
616 GstH264ParserResult pres;
619 pres = gst_h264_parse_sps (nalu, &sps);
620 if (pres != GST_H264_PARSER_OK) {
621 GST_WARNING_OBJECT (self, "Failed to parse SPS, result %d", pres);
622 return GST_FLOW_ERROR;
625 GST_LOG_OBJECT (self, "SPS parsed");
627 ret = gst_h264_decoder_process_sps (self, &sps);
628 if (ret != GST_FLOW_OK) {
629 GST_WARNING_OBJECT (self, "Failed to process SPS");
630 } else if (gst_h264_parser_update_sps (priv->parser,
631 &sps) != GST_H264_PARSER_OK) {
632 GST_WARNING_OBJECT (self, "Failed to update SPS");
633 ret = GST_FLOW_ERROR;
636 gst_h264_sps_clear (&sps);
642 gst_h264_decoder_parse_pps (GstH264Decoder * self, GstH264NalUnit * nalu)
644 GstH264DecoderPrivate *priv = self->priv;
646 GstH264ParserResult pres;
647 GstFlowReturn ret = GST_FLOW_OK;
649 pres = gst_h264_parse_pps (priv->parser, nalu, &pps);
650 if (pres != GST_H264_PARSER_OK) {
651 GST_WARNING_OBJECT (self, "Failed to parse PPS, result %d", pres);
652 return GST_FLOW_ERROR;
655 GST_LOG_OBJECT (self, "PPS parsed");
657 if (pps.num_slice_groups_minus1 > 0) {
658 GST_FIXME_OBJECT (self, "FMO is not supported");
659 ret = GST_FLOW_ERROR;
660 } else if (gst_h264_parser_update_pps (priv->parser, &pps)
661 != GST_H264_PARSER_OK) {
662 GST_WARNING_OBJECT (self, "Failed to update PPS");
663 ret = GST_FLOW_ERROR;
666 gst_h264_pps_clear (&pps);
672 gst_h264_decoder_parse_codec_data (GstH264Decoder * self, const guint8 * data,
675 GstH264DecoderPrivate *priv = self->priv;
676 GstH264DecoderConfigRecord *config = NULL;
677 GstFlowReturn ret = GST_FLOW_OK;
678 GstH264NalUnit *nalu;
681 if (gst_h264_parser_parse_decoder_config_record (priv->parser, data, size,
682 &config) != GST_H264_PARSER_OK) {
683 GST_WARNING_OBJECT (self, "Failed to parse codec-data");
684 return GST_FLOW_ERROR;
687 priv->nal_length_size = config->length_size_minus_one + 1;
688 for (i = 0; i < config->sps->len; i++) {
689 nalu = &g_array_index (config->sps, GstH264NalUnit, i);
691 /* TODO: handle subset sps for SVC/MVC. That would need to be stored in
692 * separate array instead of putting SPS/subset-SPS into a single array */
693 if (nalu->type != GST_H264_NAL_SPS)
696 ret = gst_h264_decoder_parse_sps (self, nalu);
697 if (ret != GST_FLOW_OK) {
698 GST_WARNING_OBJECT (self, "Failed to parse SPS");
703 for (i = 0; i < config->pps->len; i++) {
704 nalu = &g_array_index (config->pps, GstH264NalUnit, i);
705 if (nalu->type != GST_H264_NAL_PPS)
708 ret = gst_h264_decoder_parse_pps (self, nalu);
709 if (ret != GST_FLOW_OK) {
710 GST_WARNING_OBJECT (self, "Failed to parse PPS");
716 gst_h264_decoder_config_record_free (config);
721 gst_h264_decoder_preprocess_slice (GstH264Decoder * self, GstH264Slice * slice)
723 GstH264DecoderPrivate *priv = self->priv;
725 if (!priv->current_picture) {
726 if (slice->header.first_mb_in_slice != 0) {
727 GST_ERROR_OBJECT (self, "Invalid stream, first_mb_in_slice %d",
728 slice->header.first_mb_in_slice);
737 gst_h264_decoder_update_pic_nums (GstH264Decoder * self,
738 GstH264Picture * current_picture, gint frame_num)
740 GstH264DecoderPrivate *priv = self->priv;
741 GArray *dpb = gst_h264_dpb_get_pictures_all (priv->dpb);
744 for (i = 0; i < dpb->len; i++) {
745 GstH264Picture *picture = g_array_index (dpb, GstH264Picture *, i);
747 if (!GST_H264_PICTURE_IS_REF (picture))
750 if (GST_H264_PICTURE_IS_LONG_TERM_REF (picture)) {
751 if (GST_H264_PICTURE_IS_FRAME (current_picture))
752 picture->long_term_pic_num = picture->long_term_frame_idx;
753 else if (current_picture->field == picture->field)
754 picture->long_term_pic_num = 2 * picture->long_term_frame_idx + 1;
756 picture->long_term_pic_num = 2 * picture->long_term_frame_idx;
758 if (picture->frame_num > frame_num)
759 picture->frame_num_wrap = picture->frame_num - priv->max_frame_num;
761 picture->frame_num_wrap = picture->frame_num;
763 if (GST_H264_PICTURE_IS_FRAME (current_picture))
764 picture->pic_num = picture->frame_num_wrap;
765 else if (picture->field == current_picture->field)
766 picture->pic_num = 2 * picture->frame_num_wrap + 1;
768 picture->pic_num = 2 * picture->frame_num_wrap;
775 static GstH264Picture *
776 gst_h264_decoder_split_frame (GstH264Decoder * self, GstH264Picture * picture)
778 GstH264Picture *other_field;
780 g_assert (GST_H264_PICTURE_IS_FRAME (picture));
782 other_field = gst_h264_decoder_new_field_picture (self, picture);
784 GST_WARNING_OBJECT (self,
785 "Couldn't split frame into complementary field pair");
789 GST_LOG_OBJECT (self, "Split picture %p, poc %d, frame num %d",
790 picture, picture->pic_order_cnt, picture->frame_num);
792 /* FIXME: enhance TFF decision by using picture timing SEI */
793 if (picture->top_field_order_cnt < picture->bottom_field_order_cnt) {
794 picture->field = GST_H264_PICTURE_FIELD_TOP_FIELD;
795 picture->pic_order_cnt = picture->top_field_order_cnt;
797 other_field->field = GST_H264_PICTURE_FIELD_BOTTOM_FIELD;
798 other_field->pic_order_cnt = picture->bottom_field_order_cnt;
800 picture->field = GST_H264_PICTURE_FIELD_BOTTOM_FIELD;
801 picture->pic_order_cnt = picture->bottom_field_order_cnt;
803 other_field->field = GST_H264_PICTURE_FIELD_TOP_FIELD;
804 other_field->pic_order_cnt = picture->top_field_order_cnt;
807 other_field->top_field_order_cnt = picture->top_field_order_cnt;
808 other_field->bottom_field_order_cnt = picture->bottom_field_order_cnt;
809 other_field->frame_num = picture->frame_num;
810 other_field->ref = picture->ref;
811 other_field->nonexisting = picture->nonexisting;
812 other_field->system_frame_number = picture->system_frame_number;
813 other_field->field_pic_flag = picture->field_pic_flag;
819 output_picture_directly (GstH264Decoder * self, GstH264Picture * picture,
822 GstH264DecoderPrivate *priv = self->priv;
823 GstH264Picture *out_pic = NULL;
824 GstFlowReturn flow_ret = GST_FLOW_OK;
826 g_assert (ret != NULL);
828 if (GST_H264_PICTURE_IS_FRAME (picture)) {
829 g_assert (priv->last_field == NULL);
830 out_pic = g_steal_pointer (&picture);
834 if (priv->last_field == NULL) {
835 if (picture->second_field) {
836 GST_WARNING ("Set the last output %p poc:%d, without first field",
837 picture, picture->pic_order_cnt);
839 flow_ret = GST_FLOW_ERROR;
843 /* Just cache the first field. */
844 priv->last_field = g_steal_pointer (&picture);
846 if (!picture->second_field || !picture->other_field
847 || picture->other_field != priv->last_field) {
848 GST_WARNING ("The last field %p poc:%d is not the pair of the "
849 "current field %p poc:%d",
850 priv->last_field, priv->last_field->pic_order_cnt,
851 picture, picture->pic_order_cnt);
853 gst_clear_h264_picture (&priv->last_field);
854 flow_ret = GST_FLOW_ERROR;
858 GST_TRACE ("Pair the last field %p poc:%d and the current"
860 priv->last_field, priv->last_field->pic_order_cnt,
861 picture, picture->pic_order_cnt);
863 out_pic = priv->last_field;
864 priv->last_field = NULL;
865 /* Link each field. */
866 out_pic->other_field = picture;
871 gst_h264_dpb_set_last_output (priv->dpb, out_pic);
872 gst_h264_decoder_do_output_picture (self, out_pic, &flow_ret);
875 gst_clear_h264_picture (&picture);
877 UPDATE_FLOW_RETURN (ret, flow_ret);
881 add_picture_to_dpb (GstH264Decoder * self, GstH264Picture * picture)
883 GstH264DecoderPrivate *priv = self->priv;
885 if (!gst_h264_dpb_get_interlaced (priv->dpb)) {
886 g_assert (priv->last_field == NULL);
887 gst_h264_dpb_add (priv->dpb, picture);
891 /* The first field of the last picture may not be able to enter the
892 DPB if it is a non ref, but if the second field enters the DPB, we
893 need to add both of them. */
894 if (priv->last_field && picture->other_field == priv->last_field) {
895 gst_h264_dpb_add (priv->dpb, priv->last_field);
896 priv->last_field = NULL;
899 gst_h264_dpb_add (priv->dpb, picture);
903 _bump_dpb (GstH264Decoder * self, GstH264DpbBumpMode bump_level,
904 GstH264Picture * current_picture, GstFlowReturn * ret)
906 GstH264DecoderPrivate *priv = self->priv;
908 g_assert (ret != NULL);
910 while (gst_h264_dpb_needs_bump (priv->dpb, current_picture, bump_level)) {
911 GstH264Picture *to_output;
913 to_output = gst_h264_dpb_bump (priv->dpb, FALSE);
916 GST_WARNING_OBJECT (self, "Bumping is needed but no picture to output");
920 gst_h264_decoder_do_output_picture (self, to_output, ret);
925 gst_h264_decoder_handle_frame_num_gap (GstH264Decoder * self, gint frame_num)
927 GstH264DecoderPrivate *priv = self->priv;
928 const GstH264SPS *sps = priv->active_sps;
929 gint unused_short_term_frame_num;
932 GST_ERROR_OBJECT (self, "No active sps");
933 return GST_FLOW_ERROR;
936 if (priv->prev_ref_frame_num == frame_num) {
937 GST_TRACE_OBJECT (self,
938 "frame_num == PrevRefFrameNum (%d), not a gap", frame_num);
942 if (((priv->prev_ref_frame_num + 1) % priv->max_frame_num) == frame_num) {
943 GST_TRACE_OBJECT (self,
944 "frame_num == (PrevRefFrameNum + 1) %% MaxFrameNum (%d), not a gap",
949 if (gst_h264_dpb_get_size (priv->dpb) == 0) {
950 GST_TRACE_OBJECT (self, "DPB is empty, not a gap");
954 if (!sps->gaps_in_frame_num_value_allowed_flag) {
955 /* This is likely the case where some frames were dropped.
956 * then we need to keep decoding without error out */
957 GST_WARNING_OBJECT (self, "Invalid frame num %d, maybe frame drop",
963 GST_DEBUG_OBJECT (self, "Handling frame num gap %d -> %d (MaxFrameNum: %d)",
964 priv->prev_ref_frame_num, frame_num, priv->max_frame_num);
967 unused_short_term_frame_num =
968 (priv->prev_ref_frame_num + 1) % priv->max_frame_num;
969 while (unused_short_term_frame_num != frame_num) {
970 GstH264Picture *picture = gst_h264_picture_new ();
971 GstFlowReturn ret = GST_FLOW_OK;
973 if (!gst_h264_decoder_init_gap_picture (self, picture,
974 unused_short_term_frame_num))
975 return GST_FLOW_ERROR;
977 gst_h264_decoder_update_pic_nums (self, picture,
978 unused_short_term_frame_num);
981 if (!gst_h264_decoder_sliding_window_picture_marking (self, picture)) {
982 GST_ERROR_OBJECT (self,
983 "Couldn't perform sliding window picture marking");
984 return GST_FLOW_ERROR;
987 gst_h264_dpb_delete_unused (priv->dpb);
989 _bump_dpb (self, GST_H264_DPB_BUMP_NORMAL_LATENCY, picture, &ret);
990 if (ret != GST_FLOW_OK)
993 /* the picture is short term ref, add to DPB. */
994 if (gst_h264_dpb_get_interlaced (priv->dpb)) {
995 GstH264Picture *other_field =
996 gst_h264_decoder_split_frame (self, picture);
998 add_picture_to_dpb (self, picture);
999 add_picture_to_dpb (self, other_field);
1001 add_picture_to_dpb (self, picture);
1004 unused_short_term_frame_num++;
1005 unused_short_term_frame_num %= priv->max_frame_num;
1012 gst_h264_decoder_init_current_picture (GstH264Decoder * self)
1014 GstH264DecoderPrivate *priv = self->priv;
1016 if (!gst_h264_decoder_fill_picture_from_slice (self, &priv->current_slice,
1017 priv->current_picture)) {
1021 if (!gst_h264_decoder_calculate_poc (self, priv->current_picture))
1024 /* If the slice header indicates we will have to perform reference marking
1025 * process after this picture is decoded, store required data for that
1027 if (priv->current_slice.header.
1028 dec_ref_pic_marking.adaptive_ref_pic_marking_mode_flag) {
1029 priv->current_picture->dec_ref_pic_marking =
1030 priv->current_slice.header.dec_ref_pic_marking;
1036 static GstFlowReturn
1037 gst_h264_decoder_start_current_picture (GstH264Decoder * self)
1039 GstH264DecoderClass *klass;
1040 GstH264DecoderPrivate *priv = self->priv;
1041 const GstH264SPS *sps;
1043 GstFlowReturn ret = GST_FLOW_OK;
1044 GstH264Picture *current_picture;
1046 g_assert (priv->current_picture != NULL);
1047 g_assert (priv->active_sps != NULL);
1048 g_assert (priv->active_pps != NULL);
1050 /* If subclass didn't update output state at this point,
1051 * marking this picture as a discont and stores current input state */
1052 if (priv->input_state_changed) {
1053 priv->current_picture->discont_state =
1054 gst_video_codec_state_ref (self->input_state);
1055 priv->input_state_changed = FALSE;
1058 sps = priv->active_sps;
1060 priv->max_frame_num = sps->max_frame_num;
1061 frame_num = priv->current_slice.header.frame_num;
1062 if (priv->current_slice.nalu.idr_pic_flag)
1063 priv->prev_ref_frame_num = 0;
1065 ret = gst_h264_decoder_handle_frame_num_gap (self, frame_num);
1066 if (ret != GST_FLOW_OK)
1069 if (!gst_h264_decoder_init_current_picture (self))
1070 return GST_FLOW_ERROR;
1072 current_picture = priv->current_picture;
1074 /* If the new picture is an IDR, flush DPB */
1075 if (current_picture->idr) {
1076 if (!current_picture->dec_ref_pic_marking.no_output_of_prior_pics_flag) {
1077 ret = gst_h264_decoder_drain_internal (self);
1078 if (ret != GST_FLOW_OK)
1081 /* C.4.4 Removal of pictures from the DPB before possible insertion
1082 * of the current picture
1084 * If decoded picture is IDR and no_output_of_prior_pics_flag is equal to 1
1085 * or is inferred to be equal to 1, all frame buffers in the DPB
1086 * are emptied without output of the pictures they contain,
1087 * and DPB fullness is set to 0.
1089 gst_h264_decoder_clear_dpb (self, FALSE);
1093 gst_h264_decoder_update_pic_nums (self, current_picture, frame_num);
1095 if (priv->process_ref_pic_lists)
1096 gst_h264_decoder_prepare_ref_pic_lists (self, current_picture);
1098 klass = GST_H264_DECODER_GET_CLASS (self);
1099 if (klass->start_picture) {
1100 ret = klass->start_picture (self, priv->current_picture,
1101 &priv->current_slice, priv->dpb);
1103 if (ret != GST_FLOW_OK) {
1104 GST_WARNING_OBJECT (self, "subclass does not want to start picture");
1112 static GstH264Picture *
1113 gst_h264_decoder_new_field_picture (GstH264Decoder * self,
1114 GstH264Picture * picture)
1116 GstH264DecoderClass *klass = GST_H264_DECODER_GET_CLASS (self);
1117 GstH264Picture *new_picture;
1119 if (!klass->new_field_picture) {
1120 GST_WARNING_OBJECT (self, "Subclass does not support interlaced stream");
1124 new_picture = gst_h264_picture_new ();
1125 /* don't confuse subclass by non-existing picture */
1126 if (!picture->nonexisting) {
1129 ret = klass->new_field_picture (self, picture, new_picture);
1130 if (ret != GST_FLOW_OK) {
1131 GST_WARNING_OBJECT (self, "Subclass couldn't handle new field picture");
1132 gst_h264_picture_unref (new_picture);
1138 new_picture->other_field = picture;
1139 new_picture->second_field = TRUE;
1145 gst_h264_decoder_find_first_field_picture (GstH264Decoder * self,
1146 GstH264Slice * slice, GstH264Picture ** first_field)
1148 GstH264DecoderPrivate *priv = self->priv;
1149 const GstH264SliceHdr *slice_hdr = &slice->header;
1150 GstH264Picture *prev_field;
1153 *first_field = NULL;
1156 if (gst_h264_dpb_get_interlaced (priv->dpb)) {
1157 if (priv->last_field) {
1158 prev_field = priv->last_field;
1160 } else if (gst_h264_dpb_get_size (priv->dpb) > 0) {
1161 GstH264Picture *prev_picture;
1164 pictures = gst_h264_dpb_get_pictures_all (priv->dpb);
1166 g_array_index (pictures, GstH264Picture *, pictures->len - 1);
1167 g_array_unref (pictures); /* prev_picture should be held */
1169 /* Previous picture was a field picture. */
1170 if (!GST_H264_PICTURE_IS_FRAME (prev_picture)
1171 && !prev_picture->other_field) {
1172 prev_field = prev_picture;
1177 g_assert (priv->last_field == NULL);
1180 /* This is not a field picture */
1181 if (!slice_hdr->field_pic_flag) {
1185 GST_WARNING_OBJECT (self, "Previous picture %p (poc %d) is not complete",
1186 prev_field, prev_field->pic_order_cnt);
1190 /* OK, this is the first field. */
1194 if (prev_field->frame_num != slice_hdr->frame_num) {
1195 GST_WARNING_OBJECT (self, "Previous picture %p (poc %d) is not complete",
1196 prev_field, prev_field->pic_order_cnt);
1199 GstH264PictureField current_field = slice_hdr->bottom_field_flag ?
1200 GST_H264_PICTURE_FIELD_BOTTOM_FIELD : GST_H264_PICTURE_FIELD_TOP_FIELD;
1202 if (current_field == prev_field->field) {
1203 GST_WARNING_OBJECT (self,
1204 "Currnet picture and previous picture have identical field %d",
1210 *first_field = gst_h264_picture_ref (prev_field);
1215 gst_clear_h264_picture (&priv->last_field);
1217 /* FIXME: implement fill gap field picture if it is already in DPB */
1223 static GstFlowReturn
1224 gst_h264_decoder_parse_slice (GstH264Decoder * self, GstH264NalUnit * nalu)
1226 GstH264DecoderPrivate *priv = self->priv;
1227 GstH264ParserResult pres = GST_H264_PARSER_OK;
1228 GstFlowReturn ret = GST_FLOW_OK;
1230 memset (&priv->current_slice, 0, sizeof (GstH264Slice));
1232 pres = gst_h264_parser_parse_slice_hdr (priv->parser, nalu,
1233 &priv->current_slice.header, TRUE, TRUE);
1235 if (pres != GST_H264_PARSER_OK) {
1236 GST_ERROR_OBJECT (self, "Failed to parse slice header, ret %d", pres);
1237 memset (&priv->current_slice, 0, sizeof (GstH264Slice));
1239 return GST_FLOW_ERROR;
1242 priv->current_slice.nalu = *nalu;
1244 if (!gst_h264_decoder_preprocess_slice (self, &priv->current_slice))
1245 return GST_FLOW_ERROR;
1247 priv->active_pps = priv->current_slice.header.pps;
1248 priv->active_sps = priv->active_pps->sequence;
1250 /* Check whether field picture boundary within given codec frame.
1251 * This might happen in case that upstream sent buffer per frame unit,
1252 * not picture unit (i.e., AU unit).
1253 * If AU boundary is detected, then finish first field picture we decoded
1254 * in this chain, we should finish the current picture and
1255 * start new field picture decoding */
1256 if (gst_h264_dpb_get_interlaced (priv->dpb) && priv->current_picture &&
1257 !GST_H264_PICTURE_IS_FRAME (priv->current_picture) &&
1258 !priv->current_picture->second_field) {
1259 GstH264PictureField prev_field = priv->current_picture->field;
1260 GstH264PictureField cur_field = GST_H264_PICTURE_FIELD_FRAME;
1261 if (priv->current_slice.header.field_pic_flag)
1262 cur_field = priv->current_slice.header.bottom_field_flag ?
1263 GST_H264_PICTURE_FIELD_BOTTOM_FIELD :
1264 GST_H264_PICTURE_FIELD_TOP_FIELD;
1266 if (cur_field != prev_field) {
1267 GST_LOG_OBJECT (self,
1268 "Found new field picture, finishing the first field picture");
1269 gst_h264_decoder_finish_current_picture (self, &ret);
1273 if (!priv->current_picture) {
1274 GstH264DecoderClass *klass = GST_H264_DECODER_GET_CLASS (self);
1275 GstH264Picture *picture = NULL;
1276 GstH264Picture *first_field = NULL;
1277 GstFlowReturn ret = GST_FLOW_OK;
1279 g_assert (priv->current_frame);
1281 if (!gst_h264_decoder_find_first_field_picture (self,
1282 &priv->current_slice, &first_field)) {
1283 GST_ERROR_OBJECT (self, "Couldn't find or determine first picture");
1284 return GST_FLOW_ERROR;
1288 picture = gst_h264_decoder_new_field_picture (self, first_field);
1289 gst_h264_picture_unref (first_field);
1292 GST_ERROR_OBJECT (self, "Couldn't duplicate the first field picture");
1293 return GST_FLOW_ERROR;
1296 picture = gst_h264_picture_new ();
1298 if (klass->new_picture)
1299 ret = klass->new_picture (self, priv->current_frame, picture);
1301 if (ret != GST_FLOW_OK) {
1302 GST_WARNING_OBJECT (self, "subclass does not want accept new picture");
1303 priv->current_picture = NULL;
1304 gst_h264_picture_unref (picture);
1308 priv->last_reorder_frame_number++;
1309 picture->reorder_frame_number = priv->last_reorder_frame_number;
1312 /* This allows accessing the frame from the picture. */
1313 picture->system_frame_number = priv->current_frame->system_frame_number;
1314 priv->current_picture = picture;
1316 ret = gst_h264_decoder_start_current_picture (self);
1317 if (ret != GST_FLOW_OK) {
1318 GST_WARNING_OBJECT (self, "start picture failed");
1323 return gst_h264_decoder_decode_slice (self);
1326 static GstFlowReturn
1327 gst_h264_decoder_decode_nal (GstH264Decoder * self, GstH264NalUnit * nalu)
1329 GstFlowReturn ret = GST_FLOW_OK;
1331 GST_LOG_OBJECT (self, "Parsed nal type: %d, offset %d, size %d",
1332 nalu->type, nalu->offset, nalu->size);
1334 switch (nalu->type) {
1335 case GST_H264_NAL_SPS:
1336 ret = gst_h264_decoder_parse_sps (self, nalu);
1338 case GST_H264_NAL_PPS:
1339 ret = gst_h264_decoder_parse_pps (self, nalu);
1341 case GST_H264_NAL_SLICE:
1342 case GST_H264_NAL_SLICE_DPA:
1343 case GST_H264_NAL_SLICE_DPB:
1344 case GST_H264_NAL_SLICE_DPC:
1345 case GST_H264_NAL_SLICE_IDR:
1346 case GST_H264_NAL_SLICE_EXT:
1347 ret = gst_h264_decoder_parse_slice (self, nalu);
1357 gst_h264_decoder_format_from_caps (GstH264Decoder * self, GstCaps * caps,
1358 GstH264DecoderFormat * format, GstH264DecoderAlign * align)
1361 *format = GST_H264_DECODER_FORMAT_NONE;
1364 *align = GST_H264_DECODER_ALIGN_NONE;
1366 if (!gst_caps_is_fixed (caps)) {
1367 GST_WARNING_OBJECT (self, "Caps wasn't fixed");
1371 GST_DEBUG_OBJECT (self, "parsing caps: %" GST_PTR_FORMAT, caps);
1373 if (caps && gst_caps_get_size (caps) > 0) {
1374 GstStructure *s = gst_caps_get_structure (caps, 0);
1375 const gchar *str = NULL;
1378 if ((str = gst_structure_get_string (s, "stream-format"))) {
1379 if (strcmp (str, "avc") == 0 || strcmp (str, "avc3") == 0)
1380 *format = GST_H264_DECODER_FORMAT_AVC;
1381 else if (strcmp (str, "byte-stream") == 0)
1382 *format = GST_H264_DECODER_FORMAT_BYTE;
1387 if ((str = gst_structure_get_string (s, "alignment"))) {
1388 if (strcmp (str, "au") == 0)
1389 *align = GST_H264_DECODER_ALIGN_AU;
1390 else if (strcmp (str, "nal") == 0)
1391 *align = GST_H264_DECODER_ALIGN_NAL;
1398 gst_h264_decoder_set_format (GstVideoDecoder * decoder,
1399 GstVideoCodecState * state)
1401 GstH264Decoder *self = GST_H264_DECODER (decoder);
1402 GstH264DecoderPrivate *priv = self->priv;
1405 GST_DEBUG_OBJECT (decoder, "Set format");
1407 priv->input_state_changed = TRUE;
1409 if (self->input_state)
1410 gst_video_codec_state_unref (self->input_state);
1412 self->input_state = gst_video_codec_state_ref (state);
1414 /* in case live streaming, we will run on low-latency mode */
1415 priv->is_live = FALSE;
1416 query = gst_query_new_latency ();
1417 if (gst_pad_peer_query (GST_VIDEO_DECODER_SINK_PAD (self), query))
1418 gst_query_parse_latency (query, &priv->is_live, NULL, NULL);
1419 gst_query_unref (query);
1422 GST_DEBUG_OBJECT (self, "Live source, will run on low-latency mode");
1425 GstH264DecoderFormat format;
1426 GstH264DecoderAlign align;
1428 gst_h264_decoder_format_from_caps (self, state->caps, &format, &align);
1430 if (format == GST_H264_DECODER_FORMAT_NONE) {
1431 /* codec_data implies avc */
1432 if (state->codec_data) {
1433 GST_WARNING_OBJECT (self,
1434 "video/x-h264 caps with codec_data but no stream-format=avc");
1435 format = GST_H264_DECODER_FORMAT_AVC;
1437 /* otherwise assume bytestream input */
1438 GST_WARNING_OBJECT (self,
1439 "video/x-h264 caps without codec_data or stream-format");
1440 format = GST_H264_DECODER_FORMAT_BYTE;
1444 if (format == GST_H264_DECODER_FORMAT_AVC) {
1445 /* AVC requires codec_data, AVC3 might have one and/or SPS/PPS inline */
1446 if (!state->codec_data) {
1447 /* Try it with size 4 anyway */
1448 priv->nal_length_size = 4;
1449 GST_WARNING_OBJECT (self,
1450 "avc format without codec data, assuming nal length size is 4");
1453 /* AVC implies alignment=au */
1454 if (align == GST_H264_DECODER_ALIGN_NONE)
1455 align = GST_H264_DECODER_ALIGN_AU;
1458 if (format == GST_H264_DECODER_FORMAT_BYTE && state->codec_data)
1459 GST_WARNING_OBJECT (self, "bytestream with codec data");
1461 priv->in_format = format;
1462 priv->align = align;
1465 if (state->codec_data) {
1468 gst_buffer_map (state->codec_data, &map, GST_MAP_READ);
1469 if (gst_h264_decoder_parse_codec_data (self, map.data, map.size) !=
1471 /* keep going without error.
1472 * Probably inband SPS/PPS might be valid data */
1473 GST_WARNING_OBJECT (self, "Failed to handle codec data");
1475 gst_buffer_unmap (state->codec_data, &map);
1482 gst_h264_decoder_negotiate (GstVideoDecoder * decoder)
1484 GstH264Decoder *self = GST_H264_DECODER (decoder);
1486 /* output state must be updated by subclass using new input state already */
1487 self->priv->input_state_changed = FALSE;
1489 return GST_VIDEO_DECODER_CLASS (parent_class)->negotiate (decoder);
1493 gst_h264_decoder_fill_picture_from_slice (GstH264Decoder * self,
1494 const GstH264Slice * slice, GstH264Picture * picture)
1496 GstH264DecoderClass *klass = GST_H264_DECODER_GET_CLASS (self);
1497 const GstH264SliceHdr *slice_hdr = &slice->header;
1498 const GstH264PPS *pps;
1499 const GstH264SPS *sps;
1501 pps = slice_hdr->pps;
1503 GST_ERROR_OBJECT (self, "No pps in slice header");
1507 sps = pps->sequence;
1509 GST_ERROR_OBJECT (self, "No sps in pps");
1513 picture->idr = slice->nalu.idr_pic_flag;
1514 picture->dec_ref_pic_marking = slice_hdr->dec_ref_pic_marking;
1515 picture->field_pic_flag = slice_hdr->field_pic_flag;
1518 picture->idr_pic_id = slice_hdr->idr_pic_id;
1520 if (slice_hdr->field_pic_flag)
1522 slice_hdr->bottom_field_flag ?
1523 GST_H264_PICTURE_FIELD_BOTTOM_FIELD : GST_H264_PICTURE_FIELD_TOP_FIELD;
1525 picture->field = GST_H264_PICTURE_FIELD_FRAME;
1527 if (!GST_H264_PICTURE_IS_FRAME (picture) && !klass->new_field_picture) {
1528 GST_FIXME_OBJECT (self, "Subclass doesn't support interlace stream");
1532 picture->nal_ref_idc = slice->nalu.ref_idc;
1533 if (slice->nalu.ref_idc != 0)
1534 gst_h264_picture_set_reference (picture,
1535 GST_H264_PICTURE_REF_SHORT_TERM, FALSE);
1537 picture->frame_num = slice_hdr->frame_num;
1540 if (!slice_hdr->field_pic_flag)
1541 picture->pic_num = slice_hdr->frame_num;
1543 picture->pic_num = 2 * slice_hdr->frame_num + 1;
1545 picture->pic_order_cnt_type = sps->pic_order_cnt_type;
1546 switch (picture->pic_order_cnt_type) {
1548 picture->pic_order_cnt_lsb = slice_hdr->pic_order_cnt_lsb;
1549 picture->delta_pic_order_cnt_bottom =
1550 slice_hdr->delta_pic_order_cnt_bottom;
1553 picture->delta_pic_order_cnt0 = slice_hdr->delta_pic_order_cnt[0];
1554 picture->delta_pic_order_cnt1 = slice_hdr->delta_pic_order_cnt[1];
1559 g_assert_not_reached ();
1567 gst_h264_decoder_calculate_poc (GstH264Decoder * self, GstH264Picture * picture)
1569 GstH264DecoderPrivate *priv = self->priv;
1570 const GstH264SPS *sps = priv->active_sps;
1573 GST_ERROR_OBJECT (self, "No active SPS");
1577 switch (picture->pic_order_cnt_type) {
1579 /* See spec 8.2.1.1 */
1580 gint prev_pic_order_cnt_msb, prev_pic_order_cnt_lsb;
1581 gint max_pic_order_cnt_lsb;
1584 prev_pic_order_cnt_msb = prev_pic_order_cnt_lsb = 0;
1586 if (priv->prev_ref_has_memmgmnt5) {
1587 if (priv->prev_ref_field != GST_H264_PICTURE_FIELD_BOTTOM_FIELD) {
1588 prev_pic_order_cnt_msb = 0;
1589 prev_pic_order_cnt_lsb = priv->prev_ref_top_field_order_cnt;
1591 prev_pic_order_cnt_msb = 0;
1592 prev_pic_order_cnt_lsb = 0;
1595 prev_pic_order_cnt_msb = priv->prev_ref_pic_order_cnt_msb;
1596 prev_pic_order_cnt_lsb = priv->prev_ref_pic_order_cnt_lsb;
1600 max_pic_order_cnt_lsb = 1 << (sps->log2_max_pic_order_cnt_lsb_minus4 + 4);
1602 if ((picture->pic_order_cnt_lsb < prev_pic_order_cnt_lsb) &&
1603 (prev_pic_order_cnt_lsb - picture->pic_order_cnt_lsb >=
1604 max_pic_order_cnt_lsb / 2)) {
1605 picture->pic_order_cnt_msb =
1606 prev_pic_order_cnt_msb + max_pic_order_cnt_lsb;
1607 } else if ((picture->pic_order_cnt_lsb > prev_pic_order_cnt_lsb)
1608 && (picture->pic_order_cnt_lsb - prev_pic_order_cnt_lsb >
1609 max_pic_order_cnt_lsb / 2)) {
1610 picture->pic_order_cnt_msb =
1611 prev_pic_order_cnt_msb - max_pic_order_cnt_lsb;
1613 picture->pic_order_cnt_msb = prev_pic_order_cnt_msb;
1616 if (picture->field != GST_H264_PICTURE_FIELD_BOTTOM_FIELD) {
1617 picture->top_field_order_cnt =
1618 picture->pic_order_cnt_msb + picture->pic_order_cnt_lsb;
1621 switch (picture->field) {
1622 case GST_H264_PICTURE_FIELD_FRAME:
1623 picture->top_field_order_cnt = picture->pic_order_cnt_msb +
1624 picture->pic_order_cnt_lsb;
1625 picture->bottom_field_order_cnt = picture->top_field_order_cnt +
1626 picture->delta_pic_order_cnt_bottom;
1628 case GST_H264_PICTURE_FIELD_TOP_FIELD:
1629 picture->top_field_order_cnt = picture->pic_order_cnt_msb +
1630 picture->pic_order_cnt_lsb;
1632 case GST_H264_PICTURE_FIELD_BOTTOM_FIELD:
1633 picture->bottom_field_order_cnt = picture->pic_order_cnt_msb +
1634 picture->pic_order_cnt_lsb;
1641 gint abs_frame_num = 0;
1642 gint expected_pic_order_cnt = 0;
1645 /* See spec 8.2.1.2 */
1646 if (priv->prev_has_memmgmnt5)
1647 priv->prev_frame_num_offset = 0;
1650 picture->frame_num_offset = 0;
1651 else if (priv->prev_frame_num > picture->frame_num)
1652 picture->frame_num_offset =
1653 priv->prev_frame_num_offset + priv->max_frame_num;
1655 picture->frame_num_offset = priv->prev_frame_num_offset;
1657 if (sps->num_ref_frames_in_pic_order_cnt_cycle != 0)
1658 abs_frame_num = picture->frame_num_offset + picture->frame_num;
1662 if (picture->nal_ref_idc == 0 && abs_frame_num > 0)
1665 if (abs_frame_num > 0) {
1666 gint pic_order_cnt_cycle_cnt, frame_num_in_pic_order_cnt_cycle;
1667 gint expected_delta_per_pic_order_cnt_cycle = 0;
1669 if (sps->num_ref_frames_in_pic_order_cnt_cycle == 0) {
1670 GST_WARNING_OBJECT (self,
1671 "Invalid num_ref_frames_in_pic_order_cnt_cycle in stream");
1675 pic_order_cnt_cycle_cnt =
1676 (abs_frame_num - 1) / sps->num_ref_frames_in_pic_order_cnt_cycle;
1677 frame_num_in_pic_order_cnt_cycle =
1678 (abs_frame_num - 1) % sps->num_ref_frames_in_pic_order_cnt_cycle;
1680 for (i = 0; i < sps->num_ref_frames_in_pic_order_cnt_cycle; i++) {
1681 expected_delta_per_pic_order_cnt_cycle +=
1682 sps->offset_for_ref_frame[i];
1685 expected_pic_order_cnt = pic_order_cnt_cycle_cnt *
1686 expected_delta_per_pic_order_cnt_cycle;
1687 /* frame_num_in_pic_order_cnt_cycle is verified < 255 in parser */
1688 for (i = 0; i <= frame_num_in_pic_order_cnt_cycle; ++i)
1689 expected_pic_order_cnt += sps->offset_for_ref_frame[i];
1692 if (!picture->nal_ref_idc)
1693 expected_pic_order_cnt += sps->offset_for_non_ref_pic;
1695 if (GST_H264_PICTURE_IS_FRAME (picture)) {
1696 picture->top_field_order_cnt =
1697 expected_pic_order_cnt + picture->delta_pic_order_cnt0;
1698 picture->bottom_field_order_cnt = picture->top_field_order_cnt +
1699 sps->offset_for_top_to_bottom_field + picture->delta_pic_order_cnt1;
1700 } else if (picture->field != GST_H264_PICTURE_FIELD_BOTTOM_FIELD) {
1701 picture->top_field_order_cnt =
1702 expected_pic_order_cnt + picture->delta_pic_order_cnt0;
1704 picture->bottom_field_order_cnt = expected_pic_order_cnt +
1705 sps->offset_for_top_to_bottom_field + picture->delta_pic_order_cnt0;
1711 gint temp_pic_order_cnt;
1713 /* See spec 8.2.1.3 */
1714 if (priv->prev_has_memmgmnt5)
1715 priv->prev_frame_num_offset = 0;
1718 picture->frame_num_offset = 0;
1719 else if (priv->prev_frame_num > picture->frame_num)
1720 picture->frame_num_offset =
1721 priv->prev_frame_num_offset + priv->max_frame_num;
1723 picture->frame_num_offset = priv->prev_frame_num_offset;
1726 temp_pic_order_cnt = 0;
1727 } else if (!picture->nal_ref_idc) {
1728 temp_pic_order_cnt =
1729 2 * (picture->frame_num_offset + picture->frame_num) - 1;
1731 temp_pic_order_cnt =
1732 2 * (picture->frame_num_offset + picture->frame_num);
1735 if (GST_H264_PICTURE_IS_FRAME (picture)) {
1736 picture->top_field_order_cnt = temp_pic_order_cnt;
1737 picture->bottom_field_order_cnt = temp_pic_order_cnt;
1738 } else if (picture->field == GST_H264_PICTURE_FIELD_BOTTOM_FIELD) {
1739 picture->bottom_field_order_cnt = temp_pic_order_cnt;
1741 picture->top_field_order_cnt = temp_pic_order_cnt;
1747 GST_WARNING_OBJECT (self,
1748 "Invalid pic_order_cnt_type: %d", sps->pic_order_cnt_type);
1752 switch (picture->field) {
1753 case GST_H264_PICTURE_FIELD_FRAME:
1754 picture->pic_order_cnt =
1755 MIN (picture->top_field_order_cnt, picture->bottom_field_order_cnt);
1757 case GST_H264_PICTURE_FIELD_TOP_FIELD:
1758 picture->pic_order_cnt = picture->top_field_order_cnt;
1760 case GST_H264_PICTURE_FIELD_BOTTOM_FIELD:
1761 picture->pic_order_cnt = picture->bottom_field_order_cnt;
1764 g_assert_not_reached ();
1772 gst_h264_decoder_drain_output_queue (GstH264Decoder * self, guint num,
1773 GstFlowReturn * ret)
1775 GstH264DecoderPrivate *priv = self->priv;
1776 GstH264DecoderClass *klass = GST_H264_DECODER_GET_CLASS (self);
1778 g_assert (klass->output_picture);
1779 g_assert (ret != NULL);
1781 while (gst_queue_array_get_length (priv->output_queue) > num) {
1782 GstH264DecoderOutputFrame *output_frame = (GstH264DecoderOutputFrame *)
1783 gst_queue_array_pop_head_struct (priv->output_queue);
1784 GstFlowReturn flow_ret = klass->output_picture (self, output_frame->frame,
1785 output_frame->picture);
1787 UPDATE_FLOW_RETURN (ret, flow_ret);
1792 gst_h264_decoder_do_output_picture (GstH264Decoder * self,
1793 GstH264Picture * picture, GstFlowReturn * ret)
1795 GstH264DecoderPrivate *priv = self->priv;
1796 GstVideoCodecFrame *frame = NULL;
1797 GstH264DecoderOutputFrame output_frame;
1798 GstFlowReturn flow_ret = GST_FLOW_OK;
1800 g_assert (ret != NULL);
1802 GST_LOG_OBJECT (self, "Outputting picture %p (frame_num %d, poc %d)",
1803 picture, picture->frame_num, picture->pic_order_cnt);
1805 if (picture->pic_order_cnt < priv->last_output_poc) {
1806 GST_WARNING_OBJECT (self,
1807 "Outputting out of order %d -> %d, likely a broken stream",
1808 priv->last_output_poc, picture->pic_order_cnt);
1811 priv->last_output_poc = picture->pic_order_cnt;
1813 if (priv->last_reorder_frame_number > picture->reorder_frame_number) {
1814 guint64 diff = priv->last_reorder_frame_number -
1815 picture->reorder_frame_number;
1816 guint64 total_delay = diff + priv->preferred_output_delay;
1817 if (diff > priv->max_reorder_count && total_delay < G_MAXUINT32) {
1818 GstClockTime latency;
1820 priv->max_reorder_count = (guint32) diff;
1821 latency = gst_util_uint64_scale_int (GST_SECOND * total_delay,
1822 priv->fps_d, priv->fps_n);
1824 if (latency != G_MAXUINT64) {
1825 GST_DEBUG_OBJECT (self, "Updating latency to %" GST_TIME_FORMAT
1826 ", reorder count: %" G_GUINT64_FORMAT ", output-delay: %u",
1827 GST_TIME_ARGS (latency), diff, priv->preferred_output_delay);
1829 gst_video_decoder_set_latency (GST_VIDEO_DECODER (self),
1835 frame = gst_video_decoder_get_frame (GST_VIDEO_DECODER (self),
1836 picture->system_frame_number);
1839 /* The case where the end_picture() got failed and corresponding
1840 * GstVideoCodecFrame was dropped already */
1841 if (picture->nonexisting) {
1842 GST_DEBUG_OBJECT (self, "Dropping non-existing picture %p", picture);
1844 GST_ERROR_OBJECT (self,
1845 "No available codec frame with frame number %d",
1846 picture->system_frame_number);
1847 UPDATE_FLOW_RETURN (ret, GST_FLOW_ERROR);
1850 gst_h264_picture_unref (picture);
1855 output_frame.frame = frame;
1856 output_frame.picture = picture;
1857 output_frame.self = self;
1858 gst_queue_array_push_tail_struct (priv->output_queue, &output_frame);
1860 gst_h264_decoder_drain_output_queue (self, priv->preferred_output_delay,
1862 UPDATE_FLOW_RETURN (ret, flow_ret);
1866 gst_h264_decoder_finish_current_picture (GstH264Decoder * self,
1867 GstFlowReturn * ret)
1869 GstH264DecoderPrivate *priv = self->priv;
1870 GstH264DecoderClass *klass;
1871 GstFlowReturn flow_ret = GST_FLOW_OK;
1873 if (!priv->current_picture)
1876 klass = GST_H264_DECODER_GET_CLASS (self);
1878 if (klass->end_picture) {
1879 flow_ret = klass->end_picture (self, priv->current_picture);
1880 if (flow_ret != GST_FLOW_OK) {
1881 GST_WARNING_OBJECT (self,
1882 "end picture failed, marking picture %p non-existing "
1883 "(frame_num %d, poc %d)", priv->current_picture,
1884 priv->current_picture->frame_num,
1885 priv->current_picture->pic_order_cnt);
1886 priv->current_picture->nonexisting = TRUE;
1888 /* this fake nonexisting picture will not trigger ouput_picture() */
1889 gst_video_decoder_drop_frame (GST_VIDEO_DECODER (self),
1890 gst_video_codec_frame_ref (priv->current_frame));
1894 /* We no longer need the per frame reference lists */
1895 gst_h264_decoder_clear_ref_pic_lists (self);
1897 /* finish picture takes ownership of the picture */
1898 gst_h264_decoder_finish_picture (self, priv->current_picture, &flow_ret);
1899 priv->current_picture = NULL;
1901 UPDATE_FLOW_RETURN (ret, flow_ret);
1905 poc_asc_compare (const GstH264Picture ** a, const GstH264Picture ** b)
1907 return (*a)->pic_order_cnt - (*b)->pic_order_cnt;
1911 poc_desc_compare (const GstH264Picture ** a, const GstH264Picture ** b)
1913 return (*b)->pic_order_cnt - (*a)->pic_order_cnt;
1916 static GstFlowReturn
1917 gst_h264_decoder_drain_internal (GstH264Decoder * self)
1919 GstH264DecoderPrivate *priv = self->priv;
1920 GstH264Picture *picture;
1921 GstFlowReturn ret = GST_FLOW_OK;
1923 while ((picture = gst_h264_dpb_bump (priv->dpb, TRUE)) != NULL) {
1924 gst_h264_decoder_do_output_picture (self, picture, &ret);
1927 gst_h264_decoder_drain_output_queue (self, 0, &ret);
1929 gst_clear_h264_picture (&priv->last_field);
1930 gst_h264_dpb_clear (priv->dpb);
1931 priv->last_output_poc = G_MININT32;
1937 gst_h264_decoder_handle_memory_management_opt (GstH264Decoder * self,
1938 GstH264Picture * picture)
1940 GstH264DecoderPrivate *priv = self->priv;
1943 for (i = 0; i < G_N_ELEMENTS (picture->dec_ref_pic_marking.ref_pic_marking);
1945 GstH264RefPicMarking *ref_pic_marking =
1946 &picture->dec_ref_pic_marking.ref_pic_marking[i];
1947 guint8 type = ref_pic_marking->memory_management_control_operation;
1949 GST_TRACE_OBJECT (self, "memory management operation %d, type %d", i, type);
1951 /* Normal end of operations' specification */
1957 priv->max_long_term_frame_idx =
1958 ref_pic_marking->max_long_term_frame_idx_plus1 - 1;
1961 priv->max_long_term_frame_idx = -1;
1967 if (!gst_h264_dpb_perform_memory_management_control_operation (priv->dpb,
1968 ref_pic_marking, picture)) {
1969 GST_WARNING_OBJECT (self, "memory management operation type %d failed",
1971 /* Most likely our implementation fault, but let's just perform
1972 * next MMCO if any */
1980 gst_h264_decoder_sliding_window_picture_marking (GstH264Decoder * self,
1981 GstH264Picture * picture)
1983 GstH264DecoderPrivate *priv = self->priv;
1984 const GstH264SPS *sps = priv->active_sps;
1986 gint max_num_ref_frames;
1988 /* Skip this for the second field */
1989 if (picture->second_field)
1993 GST_ERROR_OBJECT (self, "No active sps");
1997 /* 8.2.5.3. Ensure the DPB doesn't overflow by discarding the oldest picture */
1998 num_ref_pics = gst_h264_dpb_num_ref_frames (priv->dpb);
1999 max_num_ref_frames = MAX (1, sps->num_ref_frames);
2001 if (num_ref_pics < max_num_ref_frames)
2004 /* In theory, num_ref_pics shouldn't be larger than max_num_ref_frames
2005 * but it could happen if our implementation is wrong somehow or so.
2006 * Just try to remove reference pictures as many as possible in order to
2007 * avoid DPB overflow.
2009 while (num_ref_pics >= max_num_ref_frames) {
2010 /* Max number of reference pics reached, need to remove one of the short
2011 * term ones. Find smallest frame_num_wrap short reference picture and mark
2013 GstH264Picture *to_unmark =
2014 gst_h264_dpb_get_lowest_frame_num_short_ref (priv->dpb);
2016 if (num_ref_pics > max_num_ref_frames) {
2017 GST_WARNING_OBJECT (self,
2018 "num_ref_pics %d is larger than allowed maximum %d",
2019 num_ref_pics, max_num_ref_frames);
2023 GST_WARNING_OBJECT (self, "Could not find a short ref picture to unmark");
2027 GST_TRACE_OBJECT (self,
2028 "Unmark reference flag of picture %p (frame_num %d, poc %d)",
2029 to_unmark, to_unmark->frame_num, to_unmark->pic_order_cnt);
2031 gst_h264_picture_set_reference (to_unmark, GST_H264_PICTURE_REF_NONE, TRUE);
2032 gst_h264_picture_unref (to_unmark);
2040 /* This method ensures that DPB does not overflow, either by removing
2041 * reference pictures as specified in the stream, or using a sliding window
2042 * procedure to remove the oldest one.
2043 * It also performs marking and unmarking pictures as reference.
2044 * See spac 8.2.5.1 */
2046 gst_h264_decoder_reference_picture_marking (GstH264Decoder * self,
2047 GstH264Picture * picture)
2049 GstH264DecoderPrivate *priv = self->priv;
2051 /* If the current picture is an IDR, all reference pictures are unmarked */
2053 gst_h264_dpb_mark_all_non_ref (priv->dpb);
2055 if (picture->dec_ref_pic_marking.long_term_reference_flag) {
2056 gst_h264_picture_set_reference (picture,
2057 GST_H264_PICTURE_REF_LONG_TERM, FALSE);
2058 picture->long_term_frame_idx = 0;
2059 priv->max_long_term_frame_idx = 0;
2061 gst_h264_picture_set_reference (picture,
2062 GST_H264_PICTURE_REF_SHORT_TERM, FALSE);
2063 priv->max_long_term_frame_idx = -1;
2069 /* Not an IDR. If the stream contains instructions on how to discard pictures
2070 * from DPB and how to mark/unmark existing reference pictures, do so.
2071 * Otherwise, fall back to default sliding window process */
2072 if (picture->dec_ref_pic_marking.adaptive_ref_pic_marking_mode_flag) {
2073 if (picture->nonexisting) {
2074 GST_WARNING_OBJECT (self,
2075 "Invalid memory management operation for non-existing picture "
2076 "%p (frame_num %d, poc %d", picture, picture->frame_num,
2077 picture->pic_order_cnt);
2080 return gst_h264_decoder_handle_memory_management_opt (self, picture);
2083 return gst_h264_decoder_sliding_window_picture_marking (self, picture);
2086 static GstH264DpbBumpMode
2087 get_bump_level (GstH264Decoder * self)
2089 GstH264DecoderPrivate *priv = self->priv;
2091 /* User set the mode explicitly. */
2092 switch (priv->compliance) {
2093 case GST_H264_DECODER_COMPLIANCE_STRICT:
2094 return GST_H264_DPB_BUMP_NORMAL_LATENCY;
2095 case GST_H264_DECODER_COMPLIANCE_NORMAL:
2096 return GST_H264_DPB_BUMP_LOW_LATENCY;
2097 case GST_H264_DECODER_COMPLIANCE_FLEXIBLE:
2098 return GST_H264_DPB_BUMP_VERY_LOW_LATENCY;
2103 /* GST_H264_DECODER_COMPLIANCE_AUTO case. */
2105 if (priv->is_live) {
2106 /* The baseline and constrained-baseline profiles do not have B frames
2107 and do not use the picture reorder, safe to use the higher bump level. */
2108 if (priv->profile_idc == GST_H264_PROFILE_BASELINE)
2109 return GST_H264_DPB_BUMP_VERY_LOW_LATENCY;
2111 return GST_H264_DPB_BUMP_LOW_LATENCY;
2114 return GST_H264_DPB_BUMP_NORMAL_LATENCY;
2118 gst_h264_decoder_finish_picture (GstH264Decoder * self,
2119 GstH264Picture * picture, GstFlowReturn * ret)
2121 GstVideoDecoder *decoder = GST_VIDEO_DECODER (self);
2122 GstH264DecoderPrivate *priv = self->priv;
2123 GstH264DpbBumpMode bump_level = get_bump_level (self);
2125 /* Finish processing the picture.
2126 * Start by storing previous picture data for later use */
2128 gst_h264_decoder_reference_picture_marking (self, picture);
2129 priv->prev_ref_has_memmgmnt5 = picture->mem_mgmt_5;
2130 priv->prev_ref_top_field_order_cnt = picture->top_field_order_cnt;
2131 priv->prev_ref_pic_order_cnt_msb = picture->pic_order_cnt_msb;
2132 priv->prev_ref_pic_order_cnt_lsb = picture->pic_order_cnt_lsb;
2133 priv->prev_ref_field = picture->field;
2134 priv->prev_ref_frame_num = picture->frame_num;
2137 priv->prev_frame_num = picture->frame_num;
2138 priv->prev_has_memmgmnt5 = picture->mem_mgmt_5;
2139 priv->prev_frame_num_offset = picture->frame_num_offset;
2141 /* Remove unused (for reference or later output) pictures from DPB, marking
2143 gst_h264_dpb_delete_unused (priv->dpb);
2145 /* If field pictures belong to different codec frame,
2146 * drop codec frame of the second field because we are consuming
2147 * only the first codec frame via GstH264Decoder::output_picture() method */
2148 if (picture->second_field && picture->other_field &&
2149 picture->system_frame_number !=
2150 picture->other_field->system_frame_number) {
2151 GstVideoCodecFrame *frame = gst_video_decoder_get_frame (decoder,
2152 picture->system_frame_number);
2154 gst_video_decoder_release_frame (decoder, frame);
2158 if (picture->mem_mgmt_5) {
2159 GstFlowReturn drain_ret;
2161 GST_TRACE_OBJECT (self, "Memory management type 5, drain the DPB");
2163 drain_ret = gst_h264_decoder_drain_internal (self);
2164 UPDATE_FLOW_RETURN (ret, drain_ret);
2167 _bump_dpb (self, bump_level, picture, ret);
2169 /* Add a ref to avoid the case of directly outputed and destroyed. */
2170 gst_h264_picture_ref (picture);
2173 - If the current decoded picture is the second field of a complementary
2174 reference field pair, add to DPB.
2176 For A reference decoded picture, the "bumping" process is invoked
2177 repeatedly until there is an empty frame buffer, then add to DPB:
2179 For a non-reference decoded picture, if there is empty frame buffer
2180 after bumping the smaller POC, add to DPB.
2181 Otherwise, output directly. */
2182 if ((picture->second_field && picture->other_field
2183 && picture->other_field->ref)
2184 || picture->ref || gst_h264_dpb_has_empty_frame_buffer (priv->dpb)) {
2185 /* Split frame into top/bottom field pictures for reference picture marking
2186 * process. Even if current picture has field_pic_flag equal to zero,
2187 * if next picture is a field picture, complementary field pair of reference
2188 * frame should have individual pic_num and long_term_pic_num.
2190 if (gst_h264_dpb_get_interlaced (priv->dpb) &&
2191 GST_H264_PICTURE_IS_FRAME (picture)) {
2192 GstH264Picture *other_field =
2193 gst_h264_decoder_split_frame (self, picture);
2195 add_picture_to_dpb (self, picture);
2197 GST_WARNING_OBJECT (self,
2198 "Couldn't split frame into complementary field pair");
2199 /* Keep decoding anyway... */
2201 add_picture_to_dpb (self, other_field);
2204 add_picture_to_dpb (self, picture);
2207 output_picture_directly (self, picture, ret);
2210 GST_LOG_OBJECT (self,
2211 "Finishing picture %p (frame_num %d, poc %d), entries in DPB %d",
2212 picture, picture->frame_num, picture->pic_order_cnt,
2213 gst_h264_dpb_get_size (priv->dpb));
2215 gst_h264_picture_unref (picture);
2217 /* For low-latency output, we try to bump here to avoid waiting
2218 * for another decoding circle. */
2219 if (bump_level != GST_H264_DPB_BUMP_NORMAL_LATENCY)
2220 _bump_dpb (self, bump_level, NULL, ret);
2224 gst_h264_decoder_get_max_num_reorder_frames (GstH264Decoder * self,
2225 GstH264SPS * sps, gint max_dpb_size)
2227 GstH264DecoderPrivate *priv = self->priv;
2229 if (sps->vui_parameters_present_flag
2230 && sps->vui_parameters.bitstream_restriction_flag) {
2231 if (sps->vui_parameters.num_reorder_frames > max_dpb_size) {
2233 ("max_num_reorder_frames present, but larger than MaxDpbFrames (%d > %d)",
2234 sps->vui_parameters.num_reorder_frames, max_dpb_size);
2235 return max_dpb_size;
2238 return sps->vui_parameters.num_reorder_frames;
2239 } else if (sps->constraint_set3_flag) {
2240 /* If max_num_reorder_frames is not present, if profile id is equal to
2241 * 44, 86, 100, 110, 122, or 244 and constraint_set3_flag is equal to 1,
2242 * max_num_reorder_frames shall be inferred to be equal to 0 */
2243 switch (sps->profile_idc) {
2256 /* Relaxed conditions (undefined by spec) */
2257 if (priv->compliance != GST_H264_DECODER_COMPLIANCE_STRICT &&
2258 (sps->profile_idc == 66 || sps->profile_idc == 83)) {
2259 /* baseline, constrained baseline and scalable-baseline profiles
2260 * only contain I/P frames. */
2264 return max_dpb_size;
2269 GST_H264_LEVEL_L1 = 10,
2270 GST_H264_LEVEL_L1B = 9,
2271 GST_H264_LEVEL_L1_1 = 11,
2272 GST_H264_LEVEL_L1_2 = 12,
2273 GST_H264_LEVEL_L1_3 = 13,
2274 GST_H264_LEVEL_L2_0 = 20,
2275 GST_H264_LEVEL_L2_1 = 21,
2276 GST_H264_LEVEL_L2_2 = 22,
2277 GST_H264_LEVEL_L3 = 30,
2278 GST_H264_LEVEL_L3_1 = 31,
2279 GST_H264_LEVEL_L3_2 = 32,
2280 GST_H264_LEVEL_L4 = 40,
2281 GST_H264_LEVEL_L4_1 = 41,
2282 GST_H264_LEVEL_L4_2 = 42,
2283 GST_H264_LEVEL_L5 = 50,
2284 GST_H264_LEVEL_L5_1 = 51,
2285 GST_H264_LEVEL_L5_2 = 52,
2286 GST_H264_LEVEL_L6 = 60,
2287 GST_H264_LEVEL_L6_1 = 61,
2288 GST_H264_LEVEL_L6_2 = 62,
2289 } GstH264DecoderLevel;
2293 GstH264DecoderLevel level;
2297 guint32 max_dpb_mbs;
2298 guint32 max_main_br;
2301 static const LevelLimits level_limits_map[] = {
2302 {GST_H264_LEVEL_L1, 1485, 99, 396, 64},
2303 {GST_H264_LEVEL_L1B, 1485, 99, 396, 128},
2304 {GST_H264_LEVEL_L1_1, 3000, 396, 900, 192},
2305 {GST_H264_LEVEL_L1_2, 6000, 396, 2376, 384},
2306 {GST_H264_LEVEL_L1_3, 11800, 396, 2376, 768},
2307 {GST_H264_LEVEL_L2_0, 11880, 396, 2376, 2000},
2308 {GST_H264_LEVEL_L2_1, 19800, 792, 4752, 4000},
2309 {GST_H264_LEVEL_L2_2, 20250, 1620, 8100, 4000},
2310 {GST_H264_LEVEL_L3, 40500, 1620, 8100, 10000},
2311 {GST_H264_LEVEL_L3_1, 108000, 3600, 18000, 14000},
2312 {GST_H264_LEVEL_L3_2, 216000, 5120, 20480, 20000},
2313 {GST_H264_LEVEL_L4, 245760, 8192, 32768, 20000},
2314 {GST_H264_LEVEL_L4_1, 245760, 8192, 32768, 50000},
2315 {GST_H264_LEVEL_L4_2, 522240, 8704, 34816, 50000},
2316 {GST_H264_LEVEL_L5, 589824, 22080, 110400, 135000},
2317 {GST_H264_LEVEL_L5_1, 983040, 36864, 184320, 240000},
2318 {GST_H264_LEVEL_L5_2, 2073600, 36864, 184320, 240000},
2319 {GST_H264_LEVEL_L6, 4177920, 139264, 696320, 240000},
2320 {GST_H264_LEVEL_L6_1, 8355840, 139264, 696320, 480000},
2321 {GST_H264_LEVEL_L6_2, 16711680, 139264, 696320, 800000}
2325 h264_level_to_max_dpb_mbs (GstH264DecoderLevel level)
2328 for (i = 0; i < G_N_ELEMENTS (level_limits_map); i++) {
2329 if (level == level_limits_map[i].level)
2330 return level_limits_map[i].max_dpb_mbs;
2337 gst_h264_decoder_set_latency (GstH264Decoder * self, const GstH264SPS * sps,
2340 GstH264DecoderPrivate *priv = self->priv;
2342 GstClockTime min, max;
2343 GstStructure *structure;
2344 gint fps_d = 1, fps_n = 0;
2345 GstH264DpbBumpMode bump_level;
2346 guint32 frames_delay, max_frames_delay;
2348 caps = gst_pad_get_current_caps (GST_VIDEO_DECODER_SRC_PAD (self));
2349 if (!caps && self->input_state)
2350 caps = gst_caps_ref (self->input_state->caps);
2353 structure = gst_caps_get_structure (caps, 0);
2354 if (gst_structure_get_fraction (structure, "framerate", &fps_n, &fps_d)) {
2356 /* variable framerate: see if we have a max-framerate */
2357 gst_structure_get_fraction (structure, "max-framerate", &fps_n, &fps_d);
2360 gst_caps_unref (caps);
2363 /* if no fps or variable, then 25/1 */
2369 frames_delay = max_dpb_size;
2371 bump_level = get_bump_level (self);
2372 if (bump_level != GST_H264_DPB_BUMP_NORMAL_LATENCY) {
2373 GST_DEBUG_OBJECT (self, "Actual latency will be updated later");
2377 priv->max_reorder_count = frames_delay;
2378 priv->fps_n = fps_n;
2379 priv->fps_d = fps_d;
2381 /* Consider output delay wanted by subclass */
2382 frames_delay += priv->preferred_output_delay;
2384 max_frames_delay = max_dpb_size + priv->preferred_output_delay;
2386 min = gst_util_uint64_scale_int (frames_delay * GST_SECOND, fps_d, fps_n);
2387 max = gst_util_uint64_scale_int (max_frames_delay * GST_SECOND, fps_d, fps_n);
2389 GST_DEBUG_OBJECT (self,
2390 "latency min %" GST_TIME_FORMAT ", max %" GST_TIME_FORMAT
2391 ", frames-delay %d", GST_TIME_ARGS (min), GST_TIME_ARGS (max),
2394 gst_video_decoder_set_latency (GST_VIDEO_DECODER (self), min, max);
2397 static GstFlowReturn
2398 gst_h264_decoder_process_sps (GstH264Decoder * self, GstH264SPS * sps)
2400 GstH264DecoderClass *klass = GST_H264_DECODER_GET_CLASS (self);
2401 GstH264DecoderPrivate *priv = self->priv;
2404 gint width_mb, height_mb;
2405 gint max_dpb_frames;
2407 gint prev_max_dpb_size;
2408 gint max_reorder_frames;
2409 gint prev_max_reorder_frames;
2410 gboolean prev_interlaced;
2411 gboolean interlaced;
2412 GstFlowReturn ret = GST_FLOW_OK;
2414 if (sps->frame_mbs_only_flag == 0) {
2415 if (!klass->new_field_picture) {
2416 GST_FIXME_OBJECT (self,
2417 "frame_mbs_only_flag != 1 not supported by subclass");
2418 return GST_FLOW_NOT_NEGOTIATED;
2421 if (sps->mb_adaptive_frame_field_flag) {
2422 GST_LOG_OBJECT (self,
2423 "mb_adaptive_frame_field_flag == 1, MBAFF sequence");
2425 GST_LOG_OBJECT (self, "mb_adaptive_frame_field_flag == 0, PAFF sequence");
2429 interlaced = !sps->frame_mbs_only_flag;
2431 /* Spec A.3.1 and A.3.2
2432 * For Baseline, Constrained Baseline and Main profile, the indicated level is
2433 * Level 1b if level_idc is equal to 11 and constraint_set3_flag is equal to 1
2435 level = sps->level_idc;
2436 if (level == 11 && (sps->profile_idc == 66 || sps->profile_idc == 77) &&
2437 sps->constraint_set3_flag) {
2442 max_dpb_mbs = h264_level_to_max_dpb_mbs ((GstH264DecoderLevel) level);
2444 return GST_FLOW_ERROR;
2446 width_mb = sps->width / 16;
2447 height_mb = sps->height / 16;
2449 max_dpb_frames = MIN (max_dpb_mbs / (width_mb * height_mb),
2450 GST_H264_DPB_MAX_SIZE);
2452 if (sps->vui_parameters_present_flag
2453 && sps->vui_parameters.bitstream_restriction_flag)
2454 max_dpb_frames = MAX (1, sps->vui_parameters.max_dec_frame_buffering);
2456 /* Case 1) There might be some non-conforming streams that require more DPB
2457 * size than that of specified one by SPS
2458 * Case 2) If bitstream_restriction_flag is not present,
2459 * max_dec_frame_buffering should be inferred
2460 * to be equal to MaxDpbFrames, then MaxDpbFrames can exceed num_ref_frames
2461 * See https://chromium-review.googlesource.com/c/chromium/src/+/760276/
2463 max_dpb_size = MAX (max_dpb_frames, sps->num_ref_frames);
2464 if (max_dpb_size > GST_H264_DPB_MAX_SIZE) {
2465 GST_WARNING_OBJECT (self, "Too large calculated DPB size %d", max_dpb_size);
2466 max_dpb_size = GST_H264_DPB_MAX_SIZE;
2469 /* Safety, so that subclass don't need bound checking */
2470 g_return_val_if_fail (max_dpb_size <= GST_H264_DPB_MAX_SIZE, GST_FLOW_ERROR);
2472 prev_max_dpb_size = gst_h264_dpb_get_max_num_frames (priv->dpb);
2473 prev_interlaced = gst_h264_dpb_get_interlaced (priv->dpb);
2475 prev_max_reorder_frames = gst_h264_dpb_get_max_num_reorder_frames (priv->dpb);
2476 max_reorder_frames =
2477 gst_h264_decoder_get_max_num_reorder_frames (self, sps, max_dpb_size);
2479 if (priv->width != sps->width || priv->height != sps->height ||
2480 prev_max_dpb_size != max_dpb_size || prev_interlaced != interlaced ||
2481 prev_max_reorder_frames != max_reorder_frames) {
2482 GstH264DecoderClass *klass = GST_H264_DECODER_GET_CLASS (self);
2484 GST_DEBUG_OBJECT (self,
2485 "SPS updated, resolution: %dx%d -> %dx%d, dpb size: %d -> %d, "
2486 "interlaced %d -> %d, max_reorder_frames: %d -> %d",
2487 priv->width, priv->height, sps->width, sps->height,
2488 prev_max_dpb_size, max_dpb_size, prev_interlaced, interlaced,
2489 prev_max_reorder_frames, max_reorder_frames);
2491 ret = gst_h264_decoder_drain (GST_VIDEO_DECODER (self));
2492 if (ret != GST_FLOW_OK)
2495 gst_h264_decoder_reset_latency_infos (self);
2497 g_assert (klass->new_sequence);
2499 if (klass->get_preferred_output_delay) {
2500 priv->preferred_output_delay =
2501 klass->get_preferred_output_delay (self, priv->is_live);
2503 priv->preferred_output_delay = 0;
2506 ret = klass->new_sequence (self,
2507 sps, max_dpb_size + priv->preferred_output_delay);
2508 if (ret != GST_FLOW_OK) {
2509 GST_WARNING_OBJECT (self, "subclass does not want accept new sequence");
2513 priv->profile_idc = sps->profile_idc;
2514 priv->width = sps->width;
2515 priv->height = sps->height;
2517 gst_h264_dpb_set_max_num_frames (priv->dpb, max_dpb_size);
2518 gst_h264_dpb_set_interlaced (priv->dpb, interlaced);
2519 gst_h264_dpb_set_max_num_reorder_frames (priv->dpb, max_reorder_frames);
2520 gst_h264_decoder_set_latency (self, sps, max_dpb_size);
2527 gst_h264_decoder_init_gap_picture (GstH264Decoder * self,
2528 GstH264Picture * picture, gint frame_num)
2530 picture->nonexisting = TRUE;
2531 picture->nal_ref_idc = 1;
2532 picture->frame_num = picture->pic_num = frame_num;
2533 picture->dec_ref_pic_marking.adaptive_ref_pic_marking_mode_flag = FALSE;
2534 picture->ref = GST_H264_PICTURE_REF_SHORT_TERM;
2535 picture->ref_pic = TRUE;
2536 picture->dec_ref_pic_marking.long_term_reference_flag = FALSE;
2537 picture->field = GST_H264_PICTURE_FIELD_FRAME;
2539 return gst_h264_decoder_calculate_poc (self, picture);
2542 static GstFlowReturn
2543 gst_h264_decoder_decode_slice (GstH264Decoder * self)
2545 GstH264DecoderClass *klass = GST_H264_DECODER_GET_CLASS (self);
2546 GstH264DecoderPrivate *priv = self->priv;
2547 GstH264Slice *slice = &priv->current_slice;
2548 GstH264Picture *picture = priv->current_picture;
2549 GArray *ref_pic_list0 = NULL;
2550 GArray *ref_pic_list1 = NULL;
2551 GstFlowReturn ret = GST_FLOW_OK;
2554 GST_ERROR_OBJECT (self, "No current picture");
2555 return GST_FLOW_ERROR;
2558 GST_LOG_OBJECT (self, "Decode picture %p (frame_num %d, poc %d)",
2559 picture, picture->frame_num, picture->pic_order_cnt);
2561 priv->max_pic_num = slice->header.max_pic_num;
2563 if (priv->process_ref_pic_lists) {
2564 if (!gst_h264_decoder_modify_ref_pic_lists (self)) {
2565 ret = GST_FLOW_ERROR;
2569 ref_pic_list0 = priv->ref_pic_list0;
2570 ref_pic_list1 = priv->ref_pic_list1;
2573 g_assert (klass->decode_slice);
2575 ret = klass->decode_slice (self, picture, slice, ref_pic_list0,
2577 if (ret != GST_FLOW_OK) {
2578 GST_WARNING_OBJECT (self,
2579 "Subclass didn't want to decode picture %p (frame_num %d, poc %d)",
2580 picture, picture->frame_num, picture->pic_order_cnt);
2584 g_array_set_size (priv->ref_pic_list0, 0);
2585 g_array_set_size (priv->ref_pic_list1, 0);
2591 pic_num_desc_compare (const GstH264Picture ** a, const GstH264Picture ** b)
2593 return (*b)->pic_num - (*a)->pic_num;
2597 long_term_pic_num_asc_compare (const GstH264Picture ** a,
2598 const GstH264Picture ** b)
2600 return (*a)->long_term_pic_num - (*b)->long_term_pic_num;
2604 construct_ref_pic_lists_p (GstH264Decoder * self,
2605 GstH264Picture * current_picture)
2607 GstH264DecoderPrivate *priv = self->priv;
2610 /* RefPicList0 (8.2.4.2.1) [[1] [2]], where:
2611 * [1] shortterm ref pics sorted by descending pic_num,
2612 * [2] longterm ref pics by ascending long_term_pic_num.
2614 g_array_set_size (priv->ref_pic_list_p0, 0);
2616 gst_h264_dpb_get_pictures_short_term_ref (priv->dpb,
2617 TRUE, FALSE, priv->ref_pic_list_p0);
2618 g_array_sort (priv->ref_pic_list_p0, (GCompareFunc) pic_num_desc_compare);
2620 pos = priv->ref_pic_list_p0->len;
2621 gst_h264_dpb_get_pictures_long_term_ref (priv->dpb,
2622 FALSE, priv->ref_pic_list_p0);
2623 g_qsort_with_data (&g_array_index (priv->ref_pic_list_p0, gpointer, pos),
2624 priv->ref_pic_list_p0->len - pos, sizeof (gpointer),
2625 (GCompareDataFunc) long_term_pic_num_asc_compare, NULL);
2627 #ifndef GST_DISABLE_GST_DEBUG
2628 if (gst_debug_category_get_threshold (GST_CAT_DEFAULT) >= GST_LEVEL_DEBUG) {
2629 GString *str = g_string_new (NULL);
2630 for (pos = 0; pos < priv->ref_pic_list_p0->len; pos++) {
2631 GstH264Picture *ref =
2632 g_array_index (priv->ref_pic_list_p0, GstH264Picture *, pos);
2633 if (!GST_H264_PICTURE_IS_LONG_TERM_REF (ref))
2634 g_string_append_printf (str, "|%i", ref->pic_num);
2636 g_string_append_printf (str, "|%is", ref->pic_num);
2638 GST_DEBUG_OBJECT (self, "ref_pic_list_p0: %s|", str->str);
2639 g_string_free (str, TRUE);
2645 frame_num_wrap_desc_compare (const GstH264Picture ** a,
2646 const GstH264Picture ** b)
2648 return (*b)->frame_num_wrap - (*a)->frame_num_wrap;
2652 long_term_frame_idx_asc_compare (const GstH264Picture ** a,
2653 const GstH264Picture ** b)
2655 return (*a)->long_term_frame_idx - (*b)->long_term_frame_idx;
2658 /* init_picture_refs_fields_1 in gstvaapidecoder_h264.c */
2660 init_picture_refs_fields_1 (GstH264Decoder * self, GstH264PictureField field,
2661 GArray * ref_frame_list, GArray * ref_pic_list_x)
2666 for (; i < ref_frame_list->len; i++) {
2667 GstH264Picture *pic = g_array_index (ref_frame_list, GstH264Picture *, i);
2668 if (pic->field == field) {
2669 pic = gst_h264_picture_ref (pic);
2670 g_array_append_val (ref_pic_list_x, pic);
2676 for (; j < ref_frame_list->len; j++) {
2677 GstH264Picture *pic = g_array_index (ref_frame_list, GstH264Picture *, j);
2678 if (pic->field != field) {
2679 pic = gst_h264_picture_ref (pic);
2680 g_array_append_val (ref_pic_list_x, pic);
2685 } while (i < ref_frame_list->len || j < ref_frame_list->len);
2689 construct_ref_field_pic_lists_p (GstH264Decoder * self,
2690 GstH264Picture * current_picture)
2692 GstH264DecoderPrivate *priv = self->priv;
2695 g_array_set_size (priv->ref_pic_list_p0, 0);
2696 g_array_set_size (priv->ref_frame_list_0_short_term, 0);
2697 g_array_set_size (priv->ref_frame_list_long_term, 0);
2699 /* 8.2.4.2.2, 8.2.4.2.5 refFrameList0ShortTerm:
2700 * short-term ref pictures sorted by descending frame_num_wrap.
2702 gst_h264_dpb_get_pictures_short_term_ref (priv->dpb,
2703 TRUE, TRUE, priv->ref_frame_list_0_short_term);
2704 g_array_sort (priv->ref_frame_list_0_short_term,
2705 (GCompareFunc) frame_num_wrap_desc_compare);
2707 #ifndef GST_DISABLE_GST_DEBUG
2708 if (gst_debug_category_get_threshold (GST_CAT_DEFAULT) >= GST_LEVEL_TRACE
2709 && priv->ref_frame_list_0_short_term->len) {
2710 GString *str = g_string_new (NULL);
2711 for (pos = 0; pos < priv->ref_frame_list_0_short_term->len; pos++) {
2712 GstH264Picture *ref = g_array_index (priv->ref_frame_list_0_short_term,
2713 GstH264Picture *, pos);
2714 g_string_append_printf (str, "|%i(%d)", ref->frame_num_wrap, ref->field);
2716 GST_TRACE_OBJECT (self, "ref_frame_list_0_short_term (%d): %s|",
2717 current_picture->field, str->str);
2718 g_string_free (str, TRUE);
2722 /* 8.2.4.2.2 refFrameList0LongTerm,:
2723 * long-term ref pictures sorted by ascending long_term_frame_idx.
2725 gst_h264_dpb_get_pictures_long_term_ref (priv->dpb,
2726 TRUE, priv->ref_frame_list_long_term);
2727 g_array_sort (priv->ref_frame_list_long_term,
2728 (GCompareFunc) long_term_frame_idx_asc_compare);
2730 #ifndef GST_DISABLE_GST_DEBUG
2731 if (gst_debug_category_get_threshold (GST_CAT_DEFAULT) >= GST_LEVEL_TRACE
2732 && priv->ref_frame_list_long_term->len) {
2733 GString *str = g_string_new (NULL);
2734 for (pos = 0; pos < priv->ref_frame_list_long_term->len; pos++) {
2735 GstH264Picture *ref = g_array_index (priv->ref_frame_list_0_short_term,
2736 GstH264Picture *, pos);
2737 g_string_append_printf (str, "|%i(%d)", ref->long_term_frame_idx,
2740 GST_TRACE_OBJECT (self, "ref_frame_list_0_long_term (%d): %s|",
2741 current_picture->field, str->str);
2742 g_string_free (str, TRUE);
2747 init_picture_refs_fields_1 (self, current_picture->field,
2748 priv->ref_frame_list_0_short_term, priv->ref_pic_list_p0);
2749 init_picture_refs_fields_1 (self, current_picture->field,
2750 priv->ref_frame_list_long_term, priv->ref_pic_list_p0);
2752 #ifndef GST_DISABLE_GST_DEBUG
2753 if (gst_debug_category_get_threshold (GST_CAT_DEFAULT) >= GST_LEVEL_DEBUG
2754 && priv->ref_pic_list_p0->len) {
2755 GString *str = g_string_new (NULL);
2756 for (pos = 0; pos < priv->ref_pic_list_p0->len; pos++) {
2757 GstH264Picture *ref =
2758 g_array_index (priv->ref_pic_list_p0, GstH264Picture *, pos);
2759 if (!GST_H264_PICTURE_IS_LONG_TERM_REF (ref))
2760 g_string_append_printf (str, "|%i(%d)s", ref->frame_num_wrap,
2763 g_string_append_printf (str, "|%i(%d)l", ref->long_term_frame_idx,
2766 GST_DEBUG_OBJECT (self, "ref_pic_list_p0 (%d): %s|", current_picture->field,
2768 g_string_free (str, TRUE);
2772 /* Clear temporary lists, now pictures are owned by ref_pic_list_p0 */
2773 g_array_set_size (priv->ref_frame_list_0_short_term, 0);
2774 g_array_set_size (priv->ref_frame_list_long_term, 0);
2778 lists_are_equal (GArray * l1, GArray * l2)
2782 if (l1->len != l2->len)
2785 for (i = 0; i < l1->len; i++)
2786 if (g_array_index (l1, gpointer, i) != g_array_index (l2, gpointer, i))
2793 split_ref_pic_list_b (GstH264Decoder * self, GArray * ref_pic_list_b,
2794 GCompareFunc compare_func)
2798 for (pos = 0; pos < ref_pic_list_b->len; pos++) {
2799 GstH264Picture *pic = g_array_index (ref_pic_list_b, GstH264Picture *, pos);
2800 if (compare_func (&pic, &self->priv->current_picture) > 0)
2808 print_ref_pic_list_b (GstH264Decoder * self, GArray * ref_list_b,
2811 #ifndef GST_DISABLE_GST_DEBUG
2815 if (gst_debug_category_get_threshold (GST_CAT_DEFAULT) < GST_LEVEL_DEBUG)
2818 str = g_string_new (NULL);
2820 for (i = 0; i < ref_list_b->len; i++) {
2821 GstH264Picture *ref = g_array_index (ref_list_b, GstH264Picture *, i);
2823 if (!GST_H264_PICTURE_IS_LONG_TERM_REF (ref))
2824 g_string_append_printf (str, "|%i", ref->pic_order_cnt);
2826 g_string_append_printf (str, "|%il", ref->long_term_pic_num);
2829 GST_DEBUG_OBJECT (self, "%s: %s| curr %i", name, str->str,
2830 self->priv->current_picture->pic_order_cnt);
2831 g_string_free (str, TRUE);
2836 construct_ref_pic_lists_b (GstH264Decoder * self,
2837 GstH264Picture * current_picture)
2839 GstH264DecoderPrivate *priv = self->priv;
2842 /* RefPicList0 (8.2.4.2.3) [[1] [2] [3]], where:
2843 * [1] shortterm ref pics with POC < current_picture's POC sorted by descending POC,
2844 * [2] shortterm ref pics with POC > current_picture's POC by ascending POC,
2845 * [3] longterm ref pics by ascending long_term_pic_num.
2847 g_array_set_size (priv->ref_pic_list_b0, 0);
2848 g_array_set_size (priv->ref_pic_list_b1, 0);
2851 * When pic_order_cnt_type is equal to 0, reference pictures that are marked
2852 * as "non-existing" as specified in clause 8.2.5.2 are not included in either
2853 * RefPicList0 or RefPicList1
2855 gst_h264_dpb_get_pictures_short_term_ref (priv->dpb,
2856 current_picture->pic_order_cnt_type != 0, FALSE, priv->ref_pic_list_b0);
2858 /* First sort ascending, this will put [1] in right place and finish
2860 print_ref_pic_list_b (self, priv->ref_pic_list_b0, "ref_pic_list_b0");
2861 g_array_sort (priv->ref_pic_list_b0, (GCompareFunc) poc_asc_compare);
2862 print_ref_pic_list_b (self, priv->ref_pic_list_b0, "ref_pic_list_b0");
2864 /* Find first with POC > current_picture's POC to get first element
2866 pos = split_ref_pic_list_b (self, priv->ref_pic_list_b0,
2867 (GCompareFunc) poc_asc_compare);
2869 GST_DEBUG_OBJECT (self, "split point %i", pos);
2871 /* and sort [1] descending, thus finishing sequence [1] [2]. */
2872 g_qsort_with_data (priv->ref_pic_list_b0->data, pos, sizeof (gpointer),
2873 (GCompareDataFunc) poc_desc_compare, NULL);
2875 /* Now add [3] and sort by ascending long_term_pic_num. */
2876 pos = priv->ref_pic_list_b0->len;
2877 gst_h264_dpb_get_pictures_long_term_ref (priv->dpb,
2878 FALSE, priv->ref_pic_list_b0);
2879 g_qsort_with_data (&g_array_index (priv->ref_pic_list_b0, gpointer, pos),
2880 priv->ref_pic_list_b0->len - pos, sizeof (gpointer),
2881 (GCompareDataFunc) long_term_pic_num_asc_compare, NULL);
2883 /* RefPicList1 (8.2.4.2.4) [[1] [2] [3]], where:
2884 * [1] shortterm ref pics with POC > curr_pic's POC sorted by ascending POC,
2885 * [2] shortterm ref pics with POC < curr_pic's POC by descending POC,
2886 * [3] longterm ref pics by ascending long_term_pic_num.
2888 gst_h264_dpb_get_pictures_short_term_ref (priv->dpb,
2889 current_picture->pic_order_cnt_type != 0, FALSE, priv->ref_pic_list_b1);
2891 /* First sort by descending POC. */
2892 g_array_sort (priv->ref_pic_list_b1, (GCompareFunc) poc_desc_compare);
2894 /* Split at first with POC < current_picture's POC to get first element
2896 pos = split_ref_pic_list_b (self, priv->ref_pic_list_b1,
2897 (GCompareFunc) poc_desc_compare);
2899 /* and sort [1] ascending. */
2900 g_qsort_with_data (priv->ref_pic_list_b1->data, pos, sizeof (gpointer),
2901 (GCompareDataFunc) poc_asc_compare, NULL);
2903 /* Now add [3] and sort by ascending long_term_pic_num */
2904 pos = priv->ref_pic_list_b1->len;
2905 gst_h264_dpb_get_pictures_long_term_ref (priv->dpb,
2906 FALSE, priv->ref_pic_list_b1);
2907 g_qsort_with_data (&g_array_index (priv->ref_pic_list_b1, gpointer, pos),
2908 priv->ref_pic_list_b1->len - pos, sizeof (gpointer),
2909 (GCompareDataFunc) long_term_pic_num_asc_compare, NULL);
2911 /* If lists identical, swap first two entries in RefPicList1 (spec
2913 if (priv->ref_pic_list_b1->len > 1
2914 && lists_are_equal (priv->ref_pic_list_b0, priv->ref_pic_list_b1)) {
2916 GstH264Picture **list = (GstH264Picture **) priv->ref_pic_list_b1->data;
2917 GstH264Picture *pic = list[0];
2922 print_ref_pic_list_b (self, priv->ref_pic_list_b0, "ref_pic_list_b0");
2923 print_ref_pic_list_b (self, priv->ref_pic_list_b1, "ref_pic_list_b1");
2927 construct_ref_field_pic_lists_b (GstH264Decoder * self,
2928 GstH264Picture * current_picture)
2930 GstH264DecoderPrivate *priv = self->priv;
2933 /* refFrameList0ShortTerm (8.2.4.2.4) [[1] [2]], where:
2934 * [1] shortterm ref pics with POC < current_picture's POC sorted by descending POC,
2935 * [2] shortterm ref pics with POC > current_picture's POC by ascending POC,
2937 g_array_set_size (priv->ref_pic_list_b0, 0);
2938 g_array_set_size (priv->ref_pic_list_b1, 0);
2939 g_array_set_size (priv->ref_frame_list_0_short_term, 0);
2940 g_array_set_size (priv->ref_frame_list_1_short_term, 0);
2941 g_array_set_size (priv->ref_frame_list_long_term, 0);
2944 * When pic_order_cnt_type is equal to 0, reference pictures that are marked
2945 * as "non-existing" as specified in clause 8.2.5.2 are not included in either
2946 * RefPicList0 or RefPicList1
2948 gst_h264_dpb_get_pictures_short_term_ref (priv->dpb,
2949 current_picture->pic_order_cnt_type != 0, TRUE,
2950 priv->ref_frame_list_0_short_term);
2952 /* First sort ascending, this will put [1] in right place and finish
2954 print_ref_pic_list_b (self, priv->ref_frame_list_0_short_term,
2955 "ref_frame_list_0_short_term");
2956 g_array_sort (priv->ref_frame_list_0_short_term,
2957 (GCompareFunc) poc_asc_compare);
2958 print_ref_pic_list_b (self, priv->ref_frame_list_0_short_term,
2959 "ref_frame_list_0_short_term");
2961 /* Find first with POC > current_picture's POC to get first element
2963 pos = split_ref_pic_list_b (self, priv->ref_frame_list_0_short_term,
2964 (GCompareFunc) poc_asc_compare);
2966 GST_DEBUG_OBJECT (self, "split point %i", pos);
2968 /* and sort [1] descending, thus finishing sequence [1] [2]. */
2969 g_qsort_with_data (priv->ref_frame_list_0_short_term->data, pos,
2970 sizeof (gpointer), (GCompareDataFunc) poc_desc_compare, NULL);
2972 /* refFrameList1ShortTerm (8.2.4.2.4) [[1] [2]], where:
2973 * [1] shortterm ref pics with POC > curr_pic's POC sorted by ascending POC,
2974 * [2] shortterm ref pics with POC < curr_pic's POC by descending POC,
2976 gst_h264_dpb_get_pictures_short_term_ref (priv->dpb,
2977 current_picture->pic_order_cnt_type != 0, TRUE,
2978 priv->ref_frame_list_1_short_term);
2980 /* First sort by descending POC. */
2981 g_array_sort (priv->ref_frame_list_1_short_term,
2982 (GCompareFunc) poc_desc_compare);
2984 /* Split at first with POC < current_picture's POC to get first element
2986 pos = split_ref_pic_list_b (self, priv->ref_frame_list_1_short_term,
2987 (GCompareFunc) poc_desc_compare);
2989 /* and sort [1] ascending. */
2990 g_qsort_with_data (priv->ref_frame_list_1_short_term->data, pos,
2991 sizeof (gpointer), (GCompareDataFunc) poc_asc_compare, NULL);
2993 /* 8.2.4.2.2 refFrameList0LongTerm,:
2994 * long-term ref pictures sorted by ascending long_term_frame_idx.
2996 gst_h264_dpb_get_pictures_long_term_ref (priv->dpb,
2997 TRUE, priv->ref_frame_list_long_term);
2998 g_array_sort (priv->ref_frame_list_long_term,
2999 (GCompareFunc) long_term_frame_idx_asc_compare);
3001 /* 8.2.4.2.5 RefPicList0 */
3002 init_picture_refs_fields_1 (self, current_picture->field,
3003 priv->ref_frame_list_0_short_term, priv->ref_pic_list_b0);
3004 init_picture_refs_fields_1 (self, current_picture->field,
3005 priv->ref_frame_list_long_term, priv->ref_pic_list_b0);
3007 /* 8.2.4.2.5 RefPicList1 */
3008 init_picture_refs_fields_1 (self, current_picture->field,
3009 priv->ref_frame_list_1_short_term, priv->ref_pic_list_b1);
3010 init_picture_refs_fields_1 (self, current_picture->field,
3011 priv->ref_frame_list_long_term, priv->ref_pic_list_b1);
3013 /* If lists identical, swap first two entries in RefPicList1 (spec
3015 if (priv->ref_pic_list_b1->len > 1
3016 && lists_are_equal (priv->ref_pic_list_b0, priv->ref_pic_list_b1)) {
3018 GstH264Picture **list = (GstH264Picture **) priv->ref_pic_list_b1->data;
3019 GstH264Picture *pic = list[0];
3024 print_ref_pic_list_b (self, priv->ref_pic_list_b0, "ref_pic_list_b0");
3025 print_ref_pic_list_b (self, priv->ref_pic_list_b1, "ref_pic_list_b1");
3027 /* Clear temporary lists, now pictures are owned by ref_pic_list_b0
3028 * and ref_pic_list_b1 */
3029 g_array_set_size (priv->ref_frame_list_0_short_term, 0);
3030 g_array_set_size (priv->ref_frame_list_1_short_term, 0);
3031 g_array_set_size (priv->ref_frame_list_long_term, 0);
3035 gst_h264_decoder_prepare_ref_pic_lists (GstH264Decoder * self,
3036 GstH264Picture * current_picture)
3038 GstH264DecoderPrivate *priv = self->priv;
3039 gboolean construct_list = FALSE;
3041 GArray *dpb_array = gst_h264_dpb_get_pictures_all (priv->dpb);
3043 /* 8.2.4.2.1 ~ 8.2.4.2.4
3044 * When this process is invoked, there shall be at least one reference entry
3045 * that is currently marked as "used for reference"
3046 * (i.e., as "used for short-term reference" or "used for long-term reference")
3047 * and is not marked as "non-existing"
3049 for (i = 0; i < dpb_array->len; i++) {
3050 GstH264Picture *picture = g_array_index (dpb_array, GstH264Picture *, i);
3051 if (GST_H264_PICTURE_IS_REF (picture) && !picture->nonexisting) {
3052 construct_list = TRUE;
3056 g_array_unref (dpb_array);
3058 if (!construct_list) {
3059 gst_h264_decoder_clear_ref_pic_lists (self);
3063 if (GST_H264_PICTURE_IS_FRAME (current_picture)) {
3064 construct_ref_pic_lists_p (self, current_picture);
3065 construct_ref_pic_lists_b (self, current_picture);
3067 construct_ref_field_pic_lists_p (self, current_picture);
3068 construct_ref_field_pic_lists_b (self, current_picture);
3073 gst_h264_decoder_clear_ref_pic_lists (GstH264Decoder * self)
3075 GstH264DecoderPrivate *priv = self->priv;
3077 g_array_set_size (priv->ref_pic_list_p0, 0);
3078 g_array_set_size (priv->ref_pic_list_b0, 0);
3079 g_array_set_size (priv->ref_pic_list_b1, 0);
3083 long_term_pic_num_f (GstH264Decoder * self, const GstH264Picture * picture)
3085 if (GST_H264_PICTURE_IS_LONG_TERM_REF (picture))
3086 return picture->long_term_pic_num;
3087 return 2 * (self->priv->max_long_term_frame_idx + 1);
3091 pic_num_f (GstH264Decoder * self, const GstH264Picture * picture)
3093 if (!GST_H264_PICTURE_IS_LONG_TERM_REF (picture))
3094 return picture->pic_num;
3095 return self->priv->max_pic_num;
3098 /* shift elements on the |array| starting from |from| to |to|,
3099 * inclusive, one position to the right and insert pic at |from| */
3101 shift_right_and_insert (GArray * array, gint from, gint to,
3102 GstH264Picture * picture)
3104 g_return_if_fail (from <= to);
3105 g_return_if_fail (array && picture);
3107 g_array_set_size (array, to + 2);
3108 g_array_insert_val (array, from, picture);
3111 /* This can process either ref_pic_list0 or ref_pic_list1, depending
3112 * on the list argument. Set up pointers to proper list to be
3113 * processed here. */
3115 modify_ref_pic_list (GstH264Decoder * self, int list)
3117 GstH264DecoderPrivate *priv = self->priv;
3118 GstH264Picture *picture = priv->current_picture;
3119 GArray *ref_pic_listx;
3120 const GstH264SliceHdr *slice_hdr = &priv->current_slice.header;
3121 const GstH264RefPicListModification *list_mod;
3122 gboolean ref_pic_list_modification_flag_lX;
3123 gint num_ref_idx_lX_active_minus1;
3124 guint num_ref_pic_list_modifications;
3126 gint pic_num_lx_pred = picture->pic_num;
3127 gint ref_idx_lx = 0, src, dst;
3128 gint pic_num_lx_no_wrap;
3130 gboolean done = FALSE;
3131 GstH264Picture *pic;
3134 ref_pic_listx = priv->ref_pic_list0;
3135 ref_pic_list_modification_flag_lX =
3136 slice_hdr->ref_pic_list_modification_flag_l0;
3137 num_ref_pic_list_modifications = slice_hdr->n_ref_pic_list_modification_l0;
3138 num_ref_idx_lX_active_minus1 = slice_hdr->num_ref_idx_l0_active_minus1;
3139 list_mod = slice_hdr->ref_pic_list_modification_l0;
3141 ref_pic_listx = priv->ref_pic_list1;
3142 ref_pic_list_modification_flag_lX =
3143 slice_hdr->ref_pic_list_modification_flag_l1;
3144 num_ref_pic_list_modifications = slice_hdr->n_ref_pic_list_modification_l1;
3145 num_ref_idx_lX_active_minus1 = slice_hdr->num_ref_idx_l1_active_minus1;
3146 list_mod = slice_hdr->ref_pic_list_modification_l1;
3149 /* Resize the list to the size requested in the slice header.
3151 * Note that per 8.2.4.2 it's possible for
3152 * num_ref_idx_lX_active_minus1 to indicate there should be more ref
3153 * pics on list than we constructed. Those superfluous ones should
3154 * be treated as non-reference and will be initialized to null,
3155 * which must be handled by clients */
3156 g_assert (num_ref_idx_lX_active_minus1 >= 0);
3157 if (ref_pic_listx->len > num_ref_idx_lX_active_minus1 + 1)
3158 g_array_set_size (ref_pic_listx, num_ref_idx_lX_active_minus1 + 1);
3160 if (!ref_pic_list_modification_flag_lX)
3164 * Reorder pictures on the list in a way specified in the stream. */
3165 for (i = 0; i < num_ref_pic_list_modifications && !done; i++) {
3166 switch (list_mod->modification_of_pic_nums_idc) {
3167 /* 8.2.4.3.1 - Modify short reference picture position. */
3171 if (list_mod->modification_of_pic_nums_idc == 0) {
3172 /* Substract given value from predicted PicNum. */
3173 pic_num_lx_no_wrap = pic_num_lx_pred -
3174 (list_mod->value.abs_diff_pic_num_minus1 + 1);
3175 /* Wrap around max_pic_num if it becomes < 0 as result of
3177 if (pic_num_lx_no_wrap < 0)
3178 pic_num_lx_no_wrap += priv->max_pic_num;
3180 /* Add given value to predicted PicNum. */
3181 pic_num_lx_no_wrap = pic_num_lx_pred +
3182 (list_mod->value.abs_diff_pic_num_minus1 + 1);
3183 /* Wrap around max_pic_num if it becomes >= max_pic_num as
3184 * result of the addition */
3185 if (pic_num_lx_no_wrap >= priv->max_pic_num)
3186 pic_num_lx_no_wrap -= priv->max_pic_num;
3189 /* For use in next iteration */
3190 pic_num_lx_pred = pic_num_lx_no_wrap;
3193 if (pic_num_lx_no_wrap > picture->pic_num)
3194 pic_num_lx = pic_num_lx_no_wrap - priv->max_pic_num;
3196 pic_num_lx = pic_num_lx_no_wrap;
3199 g_assert (num_ref_idx_lX_active_minus1 + 1 < 32);
3200 pic = gst_h264_dpb_get_short_ref_by_pic_num (priv->dpb, pic_num_lx);
3202 GST_WARNING_OBJECT (self, "Malformed stream, no pic num %d",
3206 shift_right_and_insert (ref_pic_listx, ref_idx_lx,
3207 num_ref_idx_lX_active_minus1, pic);
3210 for (src = ref_idx_lx, dst = ref_idx_lx;
3211 src <= num_ref_idx_lX_active_minus1 + 1; src++) {
3212 GstH264Picture *src_pic =
3213 g_array_index (ref_pic_listx, GstH264Picture *, src);
3214 gint src_pic_num_lx = src_pic ? pic_num_f (self, src_pic) : -1;
3215 if (src_pic_num_lx != pic_num_lx)
3216 g_array_index (ref_pic_listx, GstH264Picture *, dst++) = src_pic;
3221 /* 8.2.4.3.2 - Long-term reference pictures */
3224 g_assert (num_ref_idx_lX_active_minus1 + 1 < 32);
3225 pic = gst_h264_dpb_get_long_ref_by_long_term_pic_num (priv->dpb,
3226 list_mod->value.long_term_pic_num);
3228 GST_WARNING_OBJECT (self, "Malformed stream, no pic num %d",
3229 list_mod->value.long_term_pic_num);
3232 shift_right_and_insert (ref_pic_listx, ref_idx_lx,
3233 num_ref_idx_lX_active_minus1, pic);
3236 for (src = ref_idx_lx, dst = ref_idx_lx;
3237 src <= num_ref_idx_lX_active_minus1 + 1; src++) {
3238 GstH264Picture *src_pic =
3239 g_array_index (ref_pic_listx, GstH264Picture *, src);
3240 if (long_term_pic_num_f (self, src_pic) !=
3241 list_mod->value.long_term_pic_num)
3242 g_array_index (ref_pic_listx, GstH264Picture *, dst++) = src_pic;
3247 /* End of modification list */
3253 /* may be recoverable */
3254 GST_WARNING ("Invalid modification_of_pic_nums_idc = %d",
3255 list_mod->modification_of_pic_nums_idc);
3262 /* Per NOTE 2 in 8.2.4.3.2, the ref_pic_listx in the above loop is
3263 * temporarily made one element longer than the required final list.
3264 * Resize the list back to its required size. */
3265 if (ref_pic_listx->len > num_ref_idx_lX_active_minus1 + 1)
3266 g_array_set_size (ref_pic_listx, num_ref_idx_lX_active_minus1 + 1);
3272 copy_pic_list_into (GArray * dest, GArray * src)
3275 g_array_set_size (dest, 0);
3277 for (i = 0; i < src->len; i++)
3278 g_array_append_val (dest, g_array_index (src, gpointer, i));
3282 gst_h264_decoder_modify_ref_pic_lists (GstH264Decoder * self)
3284 GstH264DecoderPrivate *priv = self->priv;
3285 GstH264SliceHdr *slice_hdr = &priv->current_slice.header;
3287 g_array_set_size (priv->ref_pic_list0, 0);
3288 g_array_set_size (priv->ref_pic_list1, 0);
3290 if (GST_H264_IS_P_SLICE (slice_hdr) || GST_H264_IS_SP_SLICE (slice_hdr)) {
3291 /* 8.2.4 fill reference picture list RefPicList0 for P or SP slice */
3292 copy_pic_list_into (priv->ref_pic_list0, priv->ref_pic_list_p0);
3293 return modify_ref_pic_list (self, 0);
3294 } else if (GST_H264_IS_B_SLICE (slice_hdr)) {
3295 /* 8.2.4 fill reference picture list RefPicList0 and RefPicList1 for B slice */
3296 copy_pic_list_into (priv->ref_pic_list0, priv->ref_pic_list_b0);
3297 copy_pic_list_into (priv->ref_pic_list1, priv->ref_pic_list_b1);
3298 return modify_ref_pic_list (self, 0)
3299 && modify_ref_pic_list (self, 1);
3306 * gst_h264_decoder_set_process_ref_pic_lists:
3307 * @decoder: a #GstH264Decoder
3308 * @process: whether subclass is requiring reference picture modification process
3310 * Called to en/disable reference picture modification process.
3315 gst_h264_decoder_set_process_ref_pic_lists (GstH264Decoder * decoder,
3318 decoder->priv->process_ref_pic_lists = process;
3322 * gst_h264_decoder_get_picture:
3323 * @decoder: a #GstH264Decoder
3324 * @system_frame_number: a target system frame number of #GstH264Picture
3326 * Retrive DPB and return a #GstH264Picture corresponding to
3327 * the @system_frame_number
3329 * Returns: (transfer full) (nullable): a #GstH264Picture if successful, or %NULL otherwise
3334 gst_h264_decoder_get_picture (GstH264Decoder * decoder,
3335 guint32 system_frame_number)
3337 return gst_h264_dpb_get_picture (decoder->priv->dpb, system_frame_number);