static void gst_h264_decoder_clear_ref_pic_lists (GstH264Decoder * self);
static gboolean gst_h264_decoder_modify_ref_pic_lists (GstH264Decoder * self);
static gboolean
-gst_h264_decoder_sliding_window_picture_marking (GstH264Decoder * self);
+gst_h264_decoder_sliding_window_picture_marking (GstH264Decoder * self,
+ GstH264Picture * picture);
static void gst_h264_decoder_do_output_picture (GstH264Decoder * self,
GstH264Picture * picture);
continue;
if (GST_H264_PICTURE_IS_LONG_TERM_REF (picture)) {
- picture->long_term_pic_num = picture->long_term_frame_idx;
+ if (current_picture->field == GST_H264_PICTURE_FIELD_FRAME)
+ picture->long_term_pic_num = picture->long_term_frame_idx;
+ else if (current_picture->field == picture->field)
+ picture->long_term_pic_num = 2 * picture->long_term_frame_idx + 1;
+ else
+ picture->long_term_pic_num = 2 * picture->long_term_frame_idx;
} else {
if (picture->frame_num > frame_num)
picture->frame_num_wrap = picture->frame_num - priv->max_frame_num;
else
picture->frame_num_wrap = picture->frame_num;
- picture->pic_num = picture->frame_num_wrap;
+ if (current_picture->field == GST_H264_PICTURE_FIELD_FRAME)
+ picture->pic_num = picture->frame_num_wrap;
+ else if (picture->field == current_picture->field)
+ picture->pic_num = 2 * picture->frame_num_wrap + 1;
+ else
+ picture->pic_num = 2 * picture->frame_num_wrap;
}
}
unused_short_term_frame_num);
/* C.2.1 */
- if (!gst_h264_decoder_sliding_window_picture_marking (self)) {
+ if (!gst_h264_decoder_sliding_window_picture_marking (self, picture)) {
GST_ERROR_OBJECT (self,
"Couldn't perform sliding window picture marking");
return FALSE;
return TRUE;
}
+static GstH264Picture *
+gst_h264_decoder_new_field_picture (GstH264Decoder * self,
+ GstH264Picture * picture)
+{
+ GstH264DecoderClass *klass = GST_H264_DECODER_GET_CLASS (self);
+ GstH264Picture *new_picture;
+
+ /* Should not happen */
+ g_assert (picture->field != GST_H264_PICTURE_FIELD_FRAME);
+
+ if (!klass->new_field_picture) {
+ GST_WARNING_OBJECT (self, "Subclass does not support interlaced stream");
+ return NULL;
+ }
+
+ new_picture = gst_h264_picture_new ();
+ if (!klass->new_field_picture (self, picture, new_picture)) {
+ GST_ERROR_OBJECT (self, "Subclass couldn't handle new field picture");
+ gst_h264_picture_unref (new_picture);
+
+ return NULL;
+ }
+
+ new_picture->other_field = picture;
+ new_picture->second_field = TRUE;
+ new_picture->field = picture->field == GST_H264_PICTURE_FIELD_TOP_FIELD ?
+ GST_H264_PICTURE_FIELD_BOTTOM_FIELD : GST_H264_PICTURE_FIELD_TOP_FIELD;
+
+ return new_picture;
+}
+
+static gboolean
+gst_h264_decoder_find_first_field_picture (GstH264Decoder * self,
+ GstH264Slice * slice, GstH264Picture ** first_field)
+{
+ GstH264DecoderPrivate *priv = self->priv;
+ const GstH264SliceHdr *slice_hdr = &slice->header;
+ GstH264Picture *prev_picture;
+ GArray *pictures;
+
+ *first_field = NULL;
+
+ /* DPB is empty, must be the first field */
+ if (gst_h264_dpb_get_size (priv->dpb) == 0)
+ return TRUE;
+
+ pictures = gst_h264_dpb_get_pictures_all (priv->dpb);
+ prev_picture = g_array_index (pictures, GstH264Picture *, pictures->len - 1);
+
+ /* This is not a field picture */
+ if (!slice_hdr->field_pic_flag) {
+ /* Check whether the last picture is complete or not */
+ if (prev_picture->field != GST_H264_PICTURE_FIELD_FRAME &&
+ !prev_picture->other_field) {
+ GST_WARNING_OBJECT (self, "Previous picture %p (poc %d) is not complete",
+ prev_picture, prev_picture->pic_order_cnt);
+
+ /* FIXME: implement fill gap field picture */
+ return FALSE;
+ }
+
+ return TRUE;
+ }
+
+ /* Previous picture was not a field picture or complete already */
+ if (prev_picture->field == GST_H264_PICTURE_FIELD_FRAME ||
+ prev_picture->other_field)
+ return TRUE;
+
+ if (prev_picture->frame_num == slice_hdr->frame_num) {
+ GstH264PictureField current_field = slice_hdr->bottom_field_flag ?
+ GST_H264_PICTURE_FIELD_BOTTOM_FIELD : GST_H264_PICTURE_FIELD_TOP_FIELD;
+
+ if (current_field == prev_picture->field) {
+ GST_WARNING_OBJECT (self,
+ "Currnet picture and previous picture have identical field %d",
+ current_field);
+
+ /* FIXME: implement fill gap field picture */
+ return FALSE;
+ }
+
+ *first_field = prev_picture;
+ return TRUE;
+ }
+
+ return TRUE;
+}
+
static gboolean
gst_h264_decoder_parse_slice (GstH264Decoder * self, GstH264NalUnit * nalu)
{
if (!priv->current_picture) {
GstH264DecoderClass *klass = GST_H264_DECODER_GET_CLASS (self);
- GstH264Picture *picture;
+ GstH264Picture *picture = NULL;
+ GstH264Picture *first_field = NULL;
gboolean ret = TRUE;
- picture = gst_h264_picture_new ();
- /* This allows accessing the frame from the picture. */
- picture->system_frame_number = priv->current_frame->system_frame_number;
-
- priv->current_picture = picture;
g_assert (priv->current_frame);
- if (klass->new_picture)
- ret = klass->new_picture (self, priv->current_frame, picture);
-
- if (!ret) {
- GST_ERROR_OBJECT (self, "subclass does not want accept new picture");
- priv->current_picture = NULL;
- gst_h264_picture_unref (picture);
+ if (!gst_h264_decoder_find_first_field_picture (self,
+ &priv->current_slice, &first_field)) {
+ GST_ERROR_OBJECT (self, "Couldn't find or determine first picture");
return FALSE;
}
+ if (first_field) {
+ picture = gst_h264_decoder_new_field_picture (self, first_field);
+
+ if (!picture) {
+ GST_ERROR_OBJECT (self, "Couldn't duplicate the first field picture");
+ return FALSE;
+ }
+ } else {
+ picture = gst_h264_picture_new ();
+
+ if (klass->new_picture)
+ ret = klass->new_picture (self, priv->current_frame, picture);
+
+ if (!ret) {
+ GST_ERROR_OBJECT (self, "subclass does not want accept new picture");
+ gst_h264_picture_unref (picture);
+ return FALSE;
+ }
+ }
+
+ /* This allows accessing the frame from the picture. */
+ picture->system_frame_number = priv->current_frame->system_frame_number;
+ priv->current_picture = picture;
+
if (!gst_h264_decoder_start_current_picture (self)) {
GST_ERROR_OBJECT (self, "start picture failed");
return FALSE;
picture->nal_ref_idc = slice->nalu.ref_idc;
if (slice->nalu.ref_idc != 0)
- gst_h264_picture_set_reference (picture, GST_H264_PICTURE_REF_SHORT_TERM);
+ gst_h264_picture_set_reference (picture,
+ GST_H264_PICTURE_REF_SHORT_TERM, FALSE);
- /* This assumes non-interlaced stream */
- picture->frame_num = picture->pic_num = slice_hdr->frame_num;
+ picture->frame_num = slice_hdr->frame_num;
+
+ /* 7.4.3 */
+ if (!slice_hdr->field_pic_flag)
+ picture->pic_num = slice_hdr->frame_num;
+ else
+ picture->pic_num = 2 * slice_hdr->frame_num + 1;
picture->pic_order_cnt_type = sps->pic_order_cnt_type;
switch (picture->pic_order_cnt_type) {
}
static gboolean
-gst_h264_decoder_sliding_window_picture_marking (GstH264Decoder * self)
+gst_h264_decoder_sliding_window_picture_marking (GstH264Decoder * self,
+ GstH264Picture * picture)
{
GstH264DecoderPrivate *priv = self->priv;
const GstH264SPS *sps = priv->active_sps;
gint num_ref_pics;
gint max_num_ref_frames;
+ /* Skip this for the second field */
+ if (picture->second_field)
+ return TRUE;
+
if (!sps) {
GST_ERROR_OBJECT (self, "No active sps");
return FALSE;
"Unmark reference flag of picture %p (frame_num %d, poc %d)",
to_unmark, to_unmark->frame_num, to_unmark->pic_order_cnt);
- gst_h264_picture_set_reference (to_unmark, GST_H264_PICTURE_REF_NONE);
+ gst_h264_picture_set_reference (to_unmark, GST_H264_PICTURE_REF_NONE, TRUE);
gst_h264_picture_unref (to_unmark);
num_ref_pics--;
gst_h264_dpb_mark_all_non_ref (priv->dpb);
if (picture->dec_ref_pic_marking.long_term_reference_flag) {
- gst_h264_picture_set_reference (picture, GST_H264_PICTURE_REF_LONG_TERM);
+ gst_h264_picture_set_reference (picture,
+ GST_H264_PICTURE_REF_LONG_TERM, FALSE);
picture->long_term_frame_idx = 0;
priv->max_long_term_frame_idx = 0;
} else {
- gst_h264_picture_set_reference (picture, GST_H264_PICTURE_REF_SHORT_TERM);
+ gst_h264_picture_set_reference (picture,
+ GST_H264_PICTURE_REF_SHORT_TERM, FALSE);
priv->max_long_term_frame_idx = -1;
}
return gst_h264_decoder_handle_memory_management_opt (self, picture);
}
- return gst_h264_decoder_sliding_window_picture_marking (self);
+ return gst_h264_decoder_sliding_window_picture_marking (self, picture);
}
static gboolean
gst_h264_decoder_finish_picture (GstH264Decoder * self,
GstH264Picture * picture)
{
+ GstVideoDecoder *decoder = GST_VIDEO_DECODER (self);
GstH264DecoderPrivate *priv = self->priv;
/* Finish processing the picture.
/* Remove unused (for reference or later output) pictures from DPB, marking
* them as such */
gst_h264_dpb_delete_unused (priv->dpb);
+
+ /* If this is the second field, drop corresponding frame */
+ if (picture->second_field) {
+ GstVideoCodecFrame *frame = gst_video_decoder_get_frame (decoder,
+ picture->system_frame_number);
+
+ gst_video_decoder_release_frame (decoder, frame);
+ }
+
gst_h264_dpb_add (priv->dpb, picture);
GST_LOG_OBJECT (self,
gint max_dpb_frames;
gint max_dpb_size;
gint prev_max_dpb_size;
+ gboolean prev_interlaced;
+ gboolean interlaced;
- if (sps->frame_mbs_only_flag == 0 && !klass->new_field_picture) {
- GST_FIXME_OBJECT (self,
- "frame_mbs_only_flag != 1 not supported by subclass");
- return FALSE;
+ if (sps->frame_mbs_only_flag == 0) {
+ if (!klass->new_field_picture) {
+ GST_FIXME_OBJECT (self,
+ "frame_mbs_only_flag != 1 not supported by subclass");
+ return FALSE;
+ }
+
+ if (sps->mb_adaptive_frame_field_flag) {
+ GST_LOG_OBJECT (self,
+ "mb_adaptive_frame_field_flag == 1, MBAFF sequence");
+ } else {
+ GST_LOG_OBJECT (self, "mb_adaptive_frame_field_flag == 0, PAFF sequence");
+ }
}
+ interlaced = !sps->frame_mbs_only_flag;
+
/* Spec A.3.1 and A.3.2
* For Baseline, Constrained Baseline and Main profile, the indicated level is
* Level 1b if level_idc is equal to 11 and constraint_set3_flag is equal to 1
g_return_val_if_fail (max_dpb_size <= GST_H264_DPB_MAX_SIZE, FALSE);
prev_max_dpb_size = gst_h264_dpb_get_max_num_frames (priv->dpb);
+ prev_interlaced = gst_h264_dpb_get_interlaced (priv->dpb);
if (priv->width != sps->width || priv->height != sps->height ||
- prev_max_dpb_size != max_dpb_size) {
+ prev_max_dpb_size != max_dpb_size || prev_interlaced != interlaced) {
GstH264DecoderClass *klass = GST_H264_DECODER_GET_CLASS (self);
GST_DEBUG_OBJECT (self,
- "SPS updated, resolution: %dx%d -> %dx%d, dpb size: %d -> %d",
+ "SPS updated, resolution: %dx%d -> %dx%d, dpb size: %d -> %d, "
+ "interlaced %d -> %d",
priv->width, priv->height, sps->width, sps->height,
- prev_max_dpb_size, max_dpb_size);
+ prev_max_dpb_size, max_dpb_size, prev_interlaced, interlaced);
if (gst_h264_decoder_drain (GST_VIDEO_DECODER (self)) != GST_FLOW_OK)
return FALSE;
gst_h264_decoder_set_latency (self, sps, max_dpb_size);
gst_h264_dpb_set_max_num_frames (priv->dpb, max_dpb_size);
+ gst_h264_dpb_set_interlaced (priv->dpb, interlaced);
}
return gst_h264_decoder_update_max_num_reorder_frames (self, sps);
gint max_num_frames;
gint num_output_needed;
gint32 last_output_poc;
+
+ gboolean interlaced;
};
static void
return dpb->max_num_frames;
}
+/**
+ * gst_h264_dpb_set_interlaced:
+ * @dpb: a #GstH264Dpb
+ * @interlaced: %TRUE if interlaced
+ *
+ * Since: 1.20
+ */
+void
+gst_h264_dpb_set_interlaced (GstH264Dpb * dpb, gboolean interlaced)
+{
+ g_return_if_fail (dpb != NULL);
+
+ dpb->interlaced = interlaced;
+}
+
+/**
+ * gst_h264_dpb_get_interlaced:
+ * @dpb: a #GstH264Dpb
+ *
+ * Returns: %TRUE if @dpb is configured for interlaced stream
+ *
+ * Since: 1.20
+ */
+gboolean
+gst_h264_dpb_get_interlaced (GstH264Dpb * dpb)
+{
+ g_return_val_if_fail (dpb != NULL, FALSE);
+
+ return dpb->interlaced;
+}
+
/**
* gst_h264_dpb_free:
* @dpb: a #GstH264Dpb to free
* as "not needed for output", and the DPB fullness is incremented by one */
if (!picture->nonexisting) {
picture->needed_for_output = TRUE;
- dpb->num_output_needed++;
+
+ if (picture->field == GST_H264_PICTURE_FIELD_FRAME) {
+ dpb->num_output_needed++;
+ } else {
+ /* We can do output only when field pair are complete */
+ if (picture->second_field) {
+ dpb->num_output_needed++;
+
+ /* And link each field */
+ if (picture->other_field)
+ picture->other_field->other_field = picture;
+ }
+ }
} else {
picture->needed_for_output = FALSE;
}
/* NOTE: don't use g_array_remove_index_fast here since the last picture
* need to be referenced for bumping decision */
if (!picture->needed_for_output && !GST_H264_PICTURE_IS_REF (picture)) {
- GST_TRACE ("remove picture %p (frame num %d) from dpb",
- picture, picture->frame_num);
+ GST_TRACE
+ ("remove picture %p (frame num: %d, poc: %d, field: %d) from dpb",
+ picture, picture->frame_num, picture->pic_order_cnt, picture->field);
g_array_remove_index (dpb->pic_list, i);
i--;
}
GstH264Picture *picture =
g_array_index (dpb->pic_list, GstH264Picture *, i);
+ /* Count frame, not field picture */
+ if (picture->second_field)
+ continue;
+
if (GST_H264_PICTURE_IS_REF (picture))
ret++;
}
GstH264Picture *picture =
g_array_index (dpb->pic_list, GstH264Picture *, i);
- gst_h264_picture_set_reference (picture, GST_H264_PICTURE_REF_NONE);
+ gst_h264_picture_set_reference (picture, GST_H264_PICTURE_REF_NONE, FALSE);
}
}
static gboolean
gst_h264_dpb_has_empty_frame_buffer (GstH264Dpb * dpb)
{
- if (dpb->pic_list->len <= dpb->max_num_frames)
- return TRUE;
+ if (!dpb->interlaced) {
+ if (dpb->pic_list->len <= dpb->max_num_frames)
+ return TRUE;
+ } else {
+ gint i;
+ gint count = 0;
+ /* Count pictures without second fields */
+ for (i = 0; i < dpb->pic_list->len; i++) {
+ GstH264Picture *picture =
+ g_array_index (dpb->pic_list, GstH264Picture *, i);
+
+ if (picture->second_field)
+ continue;
+
+ count++;
+ }
+
+ if (count <= dpb->max_num_frames)
+ return TRUE;
+ }
return FALSE;
}
if (!picture->needed_for_output)
continue;
+ if (picture->field != GST_H264_PICTURE_FIELD_FRAME &&
+ (!picture->other_field || picture->second_field))
+ continue;
+
if (!lowest) {
lowest = picture;
index = i;
gst_h264_dpb_bump (GstH264Dpb * dpb, gboolean drain)
{
GstH264Picture *picture;
+ GstH264Picture *other_picture;
+ gint i;
gint index;
g_return_val_if_fail (dpb != NULL, NULL);
if (!GST_H264_PICTURE_IS_REF (picture) || drain)
g_array_remove_index (dpb->pic_list, index);
+ other_picture = picture->other_field;
+ if (other_picture) {
+ other_picture->needed_for_output = FALSE;
+
+ /* At this moment, this picture should be interlaced */
+ picture->buffer_flags |= GST_VIDEO_BUFFER_FLAG_INTERLACED;
+
+ /* FIXME: need to check picture timing SEI for the case where top/bottom poc
+ * are identical */
+ if (picture->pic_order_cnt < other_picture->pic_order_cnt)
+ picture->buffer_flags |= GST_VIDEO_BUFFER_FLAG_TFF;
+
+ if (!other_picture->ref) {
+ for (i = 0; i < dpb->pic_list->len; i++) {
+ GstH264Picture *tmp =
+ g_array_index (dpb->pic_list, GstH264Picture *, i);
+
+ if (tmp == other_picture) {
+ g_array_remove_index (dpb->pic_list, i);
+ break;
+ }
+ }
+ }
+ /* Now other field may or may not exist */
+ }
+
dpb->last_output_poc = picture->pic_order_cnt;
return picture;
static gint
get_picNumX (GstH264Picture * picture, GstH264RefPicMarking * ref_pic_marking)
{
- /* FIXME: support interlaced */
return picture->pic_num -
(ref_pic_marking->difference_of_pic_nums_minus1 + 1);
}
pic_num_x = get_picNumX (picture, ref_pic_marking);
other = gst_h264_dpb_get_short_ref_by_pic_num (dpb, pic_num_x);
if (other) {
- gst_h264_picture_set_reference (other, GST_H264_PICTURE_REF_NONE);
+ gst_h264_picture_set_reference (other,
+ GST_H264_PICTURE_REF_NONE, FALSE);
GST_TRACE ("MMCO-1: unmark short-term ref picture %p, (poc %d)",
other, other->pic_order_cnt);
} else {
other = gst_h264_dpb_get_long_ref_by_long_term_pic_num (dpb,
ref_pic_marking->long_term_pic_num);
if (other) {
- gst_h264_picture_set_reference (other, GST_H264_PICTURE_REF_NONE);
+ gst_h264_picture_set_reference (other,
+ GST_H264_PICTURE_REF_NONE, FALSE);
GST_TRACE ("MMCO-2: unmark long-term ref picture %p, (poc %d)",
other, other->pic_order_cnt);
} else {
if (GST_H264_PICTURE_IS_LONG_TERM_REF (other)
&& other->long_term_frame_idx ==
ref_pic_marking->long_term_frame_idx) {
- gst_h264_picture_set_reference (other, GST_H264_PICTURE_REF_NONE);
+ gst_h264_picture_set_reference (other,
+ GST_H264_PICTURE_REF_NONE, TRUE);
GST_TRACE ("MMCO-3: unmark old long-term ref pic %p (poc %d)",
other, other->pic_order_cnt);
break;
pic_num_x = get_picNumX (picture, ref_pic_marking);
other = gst_h264_dpb_get_short_ref_by_pic_num (dpb, pic_num_x);
if (other) {
- gst_h264_picture_set_reference (other, GST_H264_PICTURE_REF_LONG_TERM);
+ gst_h264_picture_set_reference (other,
+ GST_H264_PICTURE_REF_LONG_TERM, picture->second_field);
other->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
GST_TRACE ("MMCO-3: mark long-term ref pic %p, index %d, (poc %d)",
other, other->long_term_frame_idx, other->pic_order_cnt);
+
+ if (picture->other_field &&
+ GST_H264_PICTURE_IS_LONG_TERM_REF (picture->other_field)) {
+ picture->other_field->long_term_frame_idx =
+ ref_pic_marking->long_term_frame_idx;
+ }
} else {
GST_WARNING ("Invalid picNumX %d for operation type 3", pic_num_x);
return FALSE;
if (GST_H264_PICTURE_IS_LONG_TERM_REF (other) &&
other->long_term_frame_idx > max_long_term_frame_idx) {
- gst_h264_picture_set_reference (other, GST_H264_PICTURE_REF_NONE);
+ gst_h264_picture_set_reference (other,
+ GST_H264_PICTURE_REF_NONE, FALSE);
GST_TRACE ("MMCO-4: unmark long-term ref pic %p, index %d, (poc %d)",
other, other->long_term_frame_idx, other->pic_order_cnt);
}
/* 8.2.5.4.5 Unmark all reference pictures */
for (i = 0; i < dpb->pic_list->len; i++) {
other = g_array_index (dpb->pic_list, GstH264Picture *, i);
- gst_h264_picture_set_reference (other, GST_H264_PICTURE_REF_NONE);
+ gst_h264_picture_set_reference (other,
+ GST_H264_PICTURE_REF_NONE, FALSE);
}
picture->mem_mgmt_5 = TRUE;
picture->frame_num = 0;
ref_pic_marking->long_term_frame_idx) {
GST_TRACE ("MMCO-6: unmark old long-term ref pic %p (poc %d)",
other, other->pic_order_cnt);
- gst_h264_picture_set_reference (other, GST_H264_PICTURE_REF_NONE);
+ gst_h264_picture_set_reference (other,
+ GST_H264_PICTURE_REF_NONE, TRUE);
break;
}
}
- gst_h264_picture_set_reference (picture, GST_H264_PICTURE_REF_LONG_TERM);
+ gst_h264_picture_set_reference (picture,
+ GST_H264_PICTURE_REF_LONG_TERM, picture->second_field);
picture->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
+ if (picture->other_field &&
+ GST_H264_PICTURE_IS_LONG_TERM_REF (picture->other_field)) {
+ picture->other_field->long_term_frame_idx =
+ ref_pic_marking->long_term_frame_idx;
+ }
break;
default:
g_assert_not_reached ();
* gst_h264_picture_set_reference:
* @picture: a #GstH264Picture
* @reference: a GstH264PictureReference
+ * @other_field: %TRUE if @reference needs to be applied to the
+ * other field if any
*
* Update reference picture type of @picture with @reference
*
*/
void
gst_h264_picture_set_reference (GstH264Picture * picture,
- GstH264PictureReference reference)
+ GstH264PictureReference reference, gboolean other_field)
{
g_return_if_fail (picture != NULL);
picture->ref = reference;
+
+ if (other_field && picture->other_field)
+ picture->other_field->ref = reference;
}