decoder: h264: fix memory leak in PPS.
[platform/upstream/gstreamer-vaapi.git] / gst-libs / gst / vaapi / gstvaapidecoder_h264.c
index 1e709b5..7caabfb 100644 (file)
@@ -104,6 +104,9 @@ gst_vaapi_parser_info_h264_finalize(GstVaapiParserInfoH264 *pi)
     case GST_H264_NAL_SUBSET_SPS:
         gst_h264_sps_clear(&pi->data.sps);
         break;
+    case GST_H264_NAL_PPS:
+        gst_h264_pps_clear(&pi->data.pps);
+        break;
     case GST_H264_NAL_SEI:
         if (pi->data.sei) {
             g_array_unref(pi->data.sei);
@@ -445,6 +448,7 @@ struct _GstVaapiDecoderH264Private {
     GstH264NalParser           *parser;
     guint                       parser_state;
     guint                       decoder_state;
+    GstVaapiStreamAlignH264     stream_alignment;
     GstVaapiPictureH264        *current_picture;
     GstVaapiParserInfoH264     *sps[GST_H264_MAX_SPS_COUNT];
     GstVaapiParserInfoH264     *active_sps;
@@ -688,11 +692,11 @@ dpb_output(
 {
     picture->output_needed = FALSE;
 
-    if (fs) {
-        if (--fs->output_needed > 0)
-            return TRUE;
-        picture = fs->buffers[0];
-    }
+    if (--fs->output_needed > 0)
+        return TRUE;
+
+    if (!GST_VAAPI_PICTURE_IS_COMPLETE(picture))
+        return TRUE;
     return gst_vaapi_picture_output(GST_VAAPI_PICTURE_CAST(picture));
 }
 
@@ -846,18 +850,25 @@ dpb_clear(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
         gst_vaapi_frame_store_replace(&priv->dpb[i], NULL);
     }
 
+    /* Compact the resulting DPB, i.e. remove holes */
     for (i = 0, n = 0; i < priv->dpb_count; i++) {
-        if (priv->dpb[i])
-            priv->dpb[n++] = priv->dpb[i];
+        if (priv->dpb[i]) {
+            if (i != n) {
+                priv->dpb[n] = priv->dpb[i];
+                priv->dpb[i] = NULL;
+            }
+            n++;
+        }
     }
     priv->dpb_count = n;
 
     /* Clear previous frame buffers only if this is a "flush-all" operation,
        or if the picture is the first one in the access unit */
-    if (!picture || GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
-            GST_VAAPI_PICTURE_FLAG_AU_START)) {
+    if (priv->prev_frames && (!picture ||
+            GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
+                GST_VAAPI_PICTURE_FLAG_AU_START))) {
         for (i = 0; i < priv->max_views; i++)
-            gst_vaapi_picture_replace(&priv->prev_frames[i], NULL);
+            gst_vaapi_frame_store_replace(&priv->prev_frames[i], NULL);
     }
 }
 
@@ -921,6 +932,14 @@ dpb_add(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
             GST_VAAPI_PICTURE_H264(picture->base.parent_picture));
         if (found_index >= 0)
             return gst_vaapi_frame_store_add(priv->dpb[found_index], picture);
+
+        // ... also check the previous picture that was immediately output
+        fs = priv->prev_frames[picture->base.voc];
+        if (fs && &fs->buffers[0]->base == picture->base.parent_picture) {
+            if (!gst_vaapi_frame_store_add(fs, picture))
+                return FALSE;
+            return dpb_output(decoder, fs, picture);
+        }
     }
 
     // Create new frame store, and split fields if necessary
@@ -930,6 +949,11 @@ dpb_add(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
     gst_vaapi_frame_store_replace(&priv->prev_frames[picture->base.voc], fs);
     gst_vaapi_frame_store_unref(fs);
 
+    if (picture->output_flag) {
+        picture->output_needed = TRUE;
+        fs->output_needed++;
+    }
+
     if (!priv->progressive_sequence && gst_vaapi_frame_store_has_frame(fs)) {
         if (!gst_vaapi_frame_store_split_fields(fs))
             return FALSE;
@@ -953,20 +977,17 @@ dpb_add(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
         if (!picture->output_flag && !StoreInterViewOnlyRefFlag)
             return TRUE;
         while (priv->dpb_count == priv->dpb_size) {
+            GstVaapiPictureH264 *found_picture;
             if (!StoreInterViewOnlyRefFlag) {
-                if (dpb_find_lowest_poc(decoder, picture, NULL) < 0)
-                    return dpb_output(decoder, NULL, picture);
+                if (dpb_find_lowest_poc(decoder, picture, &found_picture) < 0 ||
+                    found_picture->base.poc > picture->base.poc)
+                    return dpb_output(decoder, fs, picture);
             }
             if (!dpb_bump(decoder, picture))
                 return FALSE;
         }
     }
-
     gst_vaapi_frame_store_replace(&priv->dpb[priv->dpb_count++], fs);
-    if (picture->output_flag) {
-        picture->output_needed = TRUE;
-        fs->output_needed++;
-    }
     return TRUE;
 }
 
@@ -975,9 +996,6 @@ dpb_reset(GstVaapiDecoderH264 *decoder, guint dpb_size)
 {
     GstVaapiDecoderH264Private * const priv = &decoder->priv;
 
-    if (dpb_size < priv->dpb_count)
-        return FALSE;
-
     if (dpb_size > priv->dpb_size_max) {
         priv->dpb = g_try_realloc_n(priv->dpb, dpb_size, sizeof(*priv->dpb));
         if (!priv->dpb)
@@ -986,11 +1004,7 @@ dpb_reset(GstVaapiDecoderH264 *decoder, guint dpb_size)
             (dpb_size - priv->dpb_size_max) * sizeof(*priv->dpb));
         priv->dpb_size_max = dpb_size;
     }
-
-    if (priv->dpb_size < dpb_size)
-        priv->dpb_size = dpb_size;
-    else if (dpb_size < priv->dpb_count)
-        return FALSE;
+    priv->dpb_size = dpb_size;
 
     GST_DEBUG("DPB size %u", priv->dpb_size);
     return TRUE;
@@ -1022,7 +1036,7 @@ mvc_reset(GstVaapiDecoderH264 *decoder)
 
     // Resize array of previous frame buffers
     for (i = priv->max_views; i < priv->prev_frames_alloc; i++)
-        gst_vaapi_picture_replace(&priv->prev_frames[i], NULL);
+        gst_vaapi_frame_store_replace(&priv->prev_frames[i], NULL);
 
     priv->prev_frames = g_try_realloc_n(priv->prev_frames, priv->max_views,
         sizeof(*priv->prev_frames));
@@ -1290,7 +1304,13 @@ ensure_context(GstVaapiDecoderH264 *decoder, GstH264SPS *sps)
     GstVaapiProfile profile;
     GstVaapiChromaType chroma_type;
     gboolean reset_context = FALSE;
-    guint mb_width, mb_height, dpb_size;
+    guint mb_width, mb_height, dpb_size, num_views;
+
+    num_views = get_num_views(sps);
+    if (priv->max_views < num_views) {
+        priv->max_views = num_views;
+        GST_DEBUG("maximum number of views changed to %u", num_views);
+    }
 
     dpb_size = get_max_dec_frame_buffering(sps);
     if (priv->dpb_size < dpb_size) {
@@ -1446,12 +1466,12 @@ decode_current_picture(GstVaapiDecoderH264 *decoder)
     if (!picture)
         return GST_VAAPI_DECODER_STATUS_SUCCESS;
 
+    if (!gst_vaapi_picture_decode(GST_VAAPI_PICTURE_CAST(picture)))
+        goto error;
     if (!exec_ref_pic_marking(decoder, picture))
         goto error;
     if (!dpb_add(decoder, picture))
         goto error;
-    if (!gst_vaapi_picture_decode(GST_VAAPI_PICTURE_CAST(picture)))
-        goto error;
     gst_vaapi_picture_replace(&priv->current_picture, NULL);
     return GST_VAAPI_DECODER_STATUS_SUCCESS;
 
@@ -1485,9 +1505,6 @@ parse_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
     if (result != GST_H264_PARSER_OK)
         return get_status(result);
 
-    /* Reset defaults */
-    priv->max_views = 1;
-
     priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SPS;
     return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
@@ -1567,7 +1584,6 @@ parse_slice(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
     GstH264NalUnit * const nalu = &pi->nalu;
     GstH264SPS *sps;
     GstH264ParserResult result;
-    guint num_views;
 
     GST_DEBUG("parse slice");
 
@@ -1615,11 +1631,6 @@ parse_slice(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
     sps = slice_hdr->pps->sequence;
 
     /* Update MVC data */
-    num_views = get_num_views(sps);
-    if (priv->max_views < num_views) {
-        priv->max_views = num_views;
-        GST_DEBUG("maximum number of views changed to %u", num_views);
-    }
     pi->view_id = get_view_id(&pi->nalu);
     pi->voc = get_view_order_index(sps, pi->view_id);
 
@@ -1669,6 +1680,7 @@ decode_pps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
 static GstVaapiDecoderStatus
 decode_sequence_end(GstVaapiDecoderH264 *decoder)
 {
+    GstVaapiDecoderH264Private * const priv = &decoder->priv;
     GstVaapiDecoderStatus status;
 
     GST_DEBUG("decode sequence-end");
@@ -1678,6 +1690,9 @@ decode_sequence_end(GstVaapiDecoderH264 *decoder)
         return status;
 
     dpb_flush(decoder, NULL);
+
+    /* Reset defaults, should there be a new sequence available next */
+    priv->max_views = 1;
     return GST_VAAPI_DECODER_STATUS_SUCCESS;
 }
 
@@ -3875,12 +3890,21 @@ gst_vaapi_decoder_h264_parse(GstVaapiDecoder *base_decoder,
     guint i, size, buf_size, nalu_size, flags;
     guint32 start_code;
     gint ofs, ofs2;
+    gboolean at_au_end = FALSE;
 
     status = ensure_decoder(decoder);
     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
         return status;
 
-    size = gst_adapter_available(adapter);
+    switch (priv->stream_alignment) {
+    case GST_VAAPI_STREAM_ALIGN_H264_NALU:
+    case GST_VAAPI_STREAM_ALIGN_H264_AU:
+        size = gst_adapter_available_fast(adapter);
+        break;
+    default:
+        size = gst_adapter_available(adapter);
+        break;
+    }
 
     if (priv->is_avcC) {
         if (size < priv->nal_length_size)
@@ -3897,35 +3921,44 @@ gst_vaapi_decoder_h264_parse(GstVaapiDecoder *base_decoder,
         buf_size = priv->nal_length_size + nalu_size;
         if (size < buf_size)
             return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
+        else if (priv->stream_alignment == GST_VAAPI_STREAM_ALIGN_H264_AU)
+            at_au_end = (buf_size == size);
     }
     else {
         if (size < 4)
             return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
 
-        ofs = scan_for_start_code(adapter, 0, size, NULL);
-        if (ofs < 0)
-            return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
-
-        if (ofs > 0) {
-            gst_adapter_flush(adapter, ofs);
-            size -= ofs;
-        }
+        if (priv->stream_alignment == GST_VAAPI_STREAM_ALIGN_H264_NALU)
+            buf_size = size;
+        else {
+            ofs = scan_for_start_code(adapter, 0, size, NULL);
+            if (ofs < 0)
+                return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
 
-        ofs2 = ps->input_offset2 - ofs - 4;
-        if (ofs2 < 4)
-            ofs2 = 4;
+            if (ofs > 0) {
+                gst_adapter_flush(adapter, ofs);
+                size -= ofs;
+            }
 
-        ofs = G_UNLIKELY(size < ofs2 + 4) ? -1 :
-            scan_for_start_code(adapter, ofs2, size - ofs2, NULL);
-        if (ofs < 0) {
-            // Assume the whole NAL unit is present if end-of-stream
-            if (!at_eos) {
-                ps->input_offset2 = size;
-                return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
+            ofs2 = ps->input_offset2 - ofs - 4;
+            if (ofs2 < 4)
+                ofs2 = 4;
+
+            ofs = G_UNLIKELY(size < ofs2 + 4) ? -1 :
+                scan_for_start_code(adapter, ofs2, size - ofs2, NULL);
+            if (ofs < 0) {
+                // Assume the whole NAL unit is present if end-of-stream
+                // or stream buffers aligned on access unit boundaries
+                if (priv->stream_alignment == GST_VAAPI_STREAM_ALIGN_H264_AU)
+                    at_au_end = TRUE;
+                else if (!at_eos) {
+                    ps->input_offset2 = size;
+                    return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
+                }
+                ofs = size;
             }
-            ofs = size;
+            buf_size = ofs;
         }
-        buf_size = ofs;
     }
     ps->input_offset2 = 0;
 
@@ -3983,6 +4016,10 @@ gst_vaapi_decoder_h264_parse(GstVaapiDecoder *base_decoder,
         return status;
 
     flags = 0;
+    if (at_au_end) {
+        flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_END |
+            GST_VAAPI_DECODER_UNIT_FLAG_AU_END;
+    }
     switch (pi->nalu.type) {
     case GST_H264_NAL_AU_DELIMITER:
         flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
@@ -4014,7 +4051,12 @@ gst_vaapi_decoder_h264_parse(GstVaapiDecoder *base_decoder,
     case GST_H264_NAL_SLICE_IDR:
     case GST_H264_NAL_SLICE:
         flags |= GST_VAAPI_DECODER_UNIT_FLAG_SLICE;
-        if (is_new_picture(pi, priv->prev_slice_pi)) {
+        if (priv->prev_pi &&
+            (priv->prev_pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_END)) {
+            flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
+                GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
+        }
+        else if (is_new_picture(pi, priv->prev_slice_pi)) {
             flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
             if (is_new_access_unit(pi, priv->prev_slice_pi))
                 flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
@@ -4128,6 +4170,24 @@ gst_vaapi_decoder_h264_class(void)
 }
 
 /**
+ * gst_vaapi_decoder_h264_set_alignment:
+ * @decoder: a #GstVaapiDecoderH264
+ * @alignment: the #GstVaapiStreamAlignH264
+ *
+ * Specifies how stream buffers are aligned / fed, i.e. the boundaries
+ * of each buffer that is supplied to the decoder. This could be no
+ * specific alignment, NAL unit boundaries, or access unit boundaries.
+ */
+void
+gst_vaapi_decoder_h264_set_alignment(GstVaapiDecoderH264 *decoder,
+    GstVaapiStreamAlignH264 alignment)
+{
+    g_return_if_fail(decoder != NULL);
+
+    decoder->priv.stream_alignment = alignment;
+}
+
+/**
  * gst_vaapi_decoder_h264_new:
  * @display: a #GstVaapiDisplay
  * @caps: a #GstCaps holding codec information