/*
* gstvaapidecoder_mpeg2.c - MPEG-2 decoder
*
- * Copyright (C) 2011 Intel Corporation
+ * Copyright (C) 2011-2013 Intel Corporation
+ * Author: Gwenole Beauchesne <gwenole.beauchesne@intel.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public License
#define DEBUG 1
#include "gstvaapidebug.h"
-G_DEFINE_TYPE(GstVaapiDecoderMpeg2,
- gst_vaapi_decoder_mpeg2,
- GST_VAAPI_TYPE_DECODER)
-
-#define GST_VAAPI_DECODER_MPEG2_GET_PRIVATE(obj) \
- (G_TYPE_INSTANCE_GET_PRIVATE((obj), \
- GST_VAAPI_TYPE_DECODER_MPEG2, \
- GstVaapiDecoderMpeg2Private))
-
-#define READ_UINT8(br, val, nbits) G_STMT_START { \
- if (!gst_bit_reader_get_bits_uint8 (br, &val, nbits)) { \
- GST_WARNING ("failed to read uint8, nbits: %d", nbits); \
- goto failed; \
- } \
-} G_STMT_END
-
-#define SKIP(reader, nbits) G_STMT_START { \
- if (!gst_bit_reader_skip (reader, nbits)) { \
- GST_WARNING ("failed to skip nbits: %d", nbits); \
- goto failed; \
- } \
-} G_STMT_END
-
/* ------------------------------------------------------------------------- */
/* --- PTS Generator --- */
/* ------------------------------------------------------------------------- */
if (!GST_CLOCK_TIME_IS_VALID(tsg->gop_pts))
tsg->gop_pts = 0;
- pts = tsg->gop_pts + pts_get_duration(tsg, tsg->ovl_tsn * 1024 + pic_tsn);
+ pts = pic_pts;
+ if (!GST_CLOCK_TIME_IS_VALID (pts))
+ pts = tsg->gop_pts + pts_get_duration(tsg, tsg->ovl_tsn * 1024 + pic_tsn);
if (!GST_CLOCK_TIME_IS_VALID(tsg->max_pts) || tsg->max_pts < pts)
tsg->max_pts = pts;
tsg->ovl_tsn++;
}
tsg->lst_tsn = pic_tsn;
+
return pts;
}
/* ------------------------------------------------------------------------- */
-/* --- MPEG-2 Decoder Units --- */
+/* --- MPEG-2 Parser Info --- */
/* ------------------------------------------------------------------------- */
-typedef struct _GstVaapiDecoderUnitMpeg2 GstVaapiDecoderUnitMpeg2;
-struct _GstVaapiDecoderUnitMpeg2 {
- GstVaapiDecoderUnit base;
- GstMpegVideoPacket packet;
- guint8 extension_type; /* for Extension packets */
+typedef struct _GstVaapiParserInfoMpeg2 GstVaapiParserInfoMpeg2;
+struct _GstVaapiParserInfoMpeg2 {
+ GstVaapiMiniObject parent_instance;
+ GstMpegVideoPacket packet;
+ guint8 extension_type; /* for Extension packets */
union {
GstMpegVideoSequenceHdr seq_hdr;
GstMpegVideoSequenceExt seq_ext;
+ GstMpegVideoSequenceDisplayExt seq_display_ext;
+ GstMpegVideoSequenceScalableExt seq_scalable_ext;
GstMpegVideoGop gop;
GstMpegVideoQuantMatrixExt quant_matrix;
GstMpegVideoPictureHdr pic_hdr;
GstMpegVideoPictureExt pic_ext;
+ GstMpegVideoSliceHdr slice_hdr;
} data;
};
-static GstVaapiDecoderUnitMpeg2 *
-gst_vaapi_decoder_unit_mpeg2_new(guint size)
+static inline const GstVaapiMiniObjectClass *
+gst_vaapi_parser_info_mpeg2_class(void)
{
- GstVaapiDecoderUnitMpeg2 *unit;
-
- static const GstVaapiMiniObjectClass GstVaapiDecoderUnitMpeg2Class = {
- sizeof(GstVaapiDecoderUnitMpeg2),
- (GDestroyNotify)gst_vaapi_decoder_unit_finalize
+ static const GstVaapiMiniObjectClass GstVaapiParserInfoMpeg2Class = {
+ sizeof(GstVaapiParserInfoMpeg2),
+ NULL
};
+ return &GstVaapiParserInfoMpeg2Class;
+}
+
+static inline GstVaapiParserInfoMpeg2 *
+gst_vaapi_parser_info_mpeg2_new(void)
+{
+ return (GstVaapiParserInfoMpeg2 *)
+ gst_vaapi_mini_object_new(gst_vaapi_parser_info_mpeg2_class());
+}
+
+static inline GstVaapiParserInfoMpeg2 *
+gst_vaapi_parser_info_mpeg2_ensure(GstVaapiParserInfoMpeg2 **pi_ptr)
+{
+ GstVaapiParserInfoMpeg2 *pi = *pi_ptr;
- unit = (GstVaapiDecoderUnitMpeg2 *)
- gst_vaapi_mini_object_new(&GstVaapiDecoderUnitMpeg2Class);
- if (!unit)
- return NULL;
+ if (G_LIKELY(pi != NULL))
+ return pi;
- gst_vaapi_decoder_unit_init(&unit->base, size);
- return unit;
+ *pi_ptr = pi = gst_vaapi_parser_info_mpeg2_new();
+ return pi;
}
+#define gst_vaapi_parser_info_mpeg2_ref(pi) \
+ gst_vaapi_mini_object_ref(GST_VAAPI_MINI_OBJECT(pi))
+
+#define gst_vaapi_parser_info_mpeg2_unref(pi) \
+ gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(pi))
+
+#define gst_vaapi_parser_info_mpeg2_replace(old_pi_ptr, new_pi) \
+ gst_vaapi_mini_object_replace((GstVaapiMiniObject **)(old_pi_ptr), \
+ (GstVaapiMiniObject *)(new_pi))
+
/* ------------------------------------------------------------------------- */
/* --- MPEG-2 Decoder --- */
/* ------------------------------------------------------------------------- */
+#define GST_VAAPI_DECODER_MPEG2_CAST(decoder) \
+ ((GstVaapiDecoderMpeg2 *)(decoder))
+
+typedef struct _GstVaapiDecoderMpeg2Private GstVaapiDecoderMpeg2Private;
+typedef struct _GstVaapiDecoderMpeg2Class GstVaapiDecoderMpeg2Class;
+
+typedef enum {
+ GST_MPEG_VIDEO_STATE_GOT_SEQ_HDR = 1 << 0,
+ GST_MPEG_VIDEO_STATE_GOT_SEQ_EXT = 1 << 1,
+ GST_MPEG_VIDEO_STATE_GOT_PIC_HDR = 1 << 2,
+ GST_MPEG_VIDEO_STATE_GOT_PIC_EXT = 1 << 3,
+ GST_MPEG_VIDEO_STATE_GOT_SLICE = 1 << 4,
+
+ GST_MPEG_VIDEO_STATE_VALID_SEQ_HEADERS = (
+ GST_MPEG_VIDEO_STATE_GOT_SEQ_HDR|
+ GST_MPEG_VIDEO_STATE_GOT_SEQ_EXT),
+ GST_MPEG_VIDEO_STATE_VALID_PIC_HEADERS = (
+ GST_MPEG_VIDEO_STATE_GOT_PIC_HDR|
+ GST_MPEG_VIDEO_STATE_GOT_PIC_EXT),
+ GST_MPEG_VIDEO_STATE_VALID_PICTURE = (
+ GST_MPEG_VIDEO_STATE_VALID_SEQ_HEADERS|
+ GST_MPEG_VIDEO_STATE_VALID_PIC_HEADERS|
+ GST_MPEG_VIDEO_STATE_GOT_SLICE)
+} GstMpegVideoState;
+
struct _GstVaapiDecoderMpeg2Private {
GstVaapiProfile profile;
GstVaapiProfile hw_profile;
guint height;
guint fps_n;
guint fps_d;
- GstMpegVideoSequenceHdr seq_hdr;
- GstMpegVideoSequenceExt seq_ext;
- GstMpegVideoPictureHdr pic_hdr;
- GstMpegVideoPictureExt pic_ext;
- GstMpegVideoQuantMatrixExt quant_matrix_ext;
+ guint state;
+ GstVaapiRectangle crop_rect;
+ GstVaapiParserInfoMpeg2 *seq_hdr;
+ GstVaapiParserInfoMpeg2 *seq_ext;
+ GstVaapiParserInfoMpeg2 *seq_display_ext;
+ GstVaapiParserInfoMpeg2 *seq_scalable_ext;
+ GstVaapiParserInfoMpeg2 *gop;
+ GstVaapiParserInfoMpeg2 *pic_hdr;
+ GstVaapiParserInfoMpeg2 *pic_ext;
+ GstVaapiParserInfoMpeg2 *pic_display_ext;
+ GstVaapiParserInfoMpeg2 *quant_matrix;
+ GstVaapiParserInfoMpeg2 *slice_hdr;
GstVaapiPicture *current_picture;
GstVaapiDpb *dpb;
PTSGenerator tsg;
- guint is_constructed : 1;
guint is_opened : 1;
- guint has_seq_ext : 1;
- guint has_seq_scalable_ext : 1;
- guint has_pic_ext : 1;
- guint has_quant_matrix_ext : 1;
guint size_changed : 1;
guint profile_changed : 1;
guint quant_matrix_changed : 1;
guint broken_link : 1;
};
-/* VLC decoder from gst-plugins-bad */
-typedef struct _VLCTable VLCTable;
-struct _VLCTable {
- gint value;
- guint cword;
- guint cbits;
-};
-
-static gboolean
-decode_vlc(GstBitReader *br, gint *res, const VLCTable *table, guint length)
-{
- guint8 i;
- guint cbits = 0;
- guint32 value = 0;
-
- for (i = 0; i < length; i++) {
- if (cbits != table[i].cbits) {
- cbits = table[i].cbits;
- if (!gst_bit_reader_peek_bits_uint32(br, &value, cbits)) {
- goto failed;
- }
- }
-
- if (value == table[i].cword) {
- SKIP(br, cbits);
- if (res)
- *res = table[i].value;
- return TRUE;
- }
- }
- GST_DEBUG("failed to find VLC code");
-
-failed:
- GST_WARNING("failed to decode VLC, returning");
- return FALSE;
-}
-
-enum {
- GST_MPEG_VIDEO_MACROBLOCK_ESCAPE = -1,
+/**
+ * GstVaapiDecoderMpeg2:
+ *
+ * A decoder based on Mpeg2.
+ */
+struct _GstVaapiDecoderMpeg2 {
+ /*< private >*/
+ GstVaapiDecoder parent_instance;
+ GstVaapiDecoderMpeg2Private priv;
};
-/* Table B-1: Variable length codes for macroblock_address_increment */
-static const VLCTable mpeg2_mbaddr_vlc_table[] = {
- { 1, 0x01, 1 },
- { 2, 0x03, 3 },
- { 3, 0x02, 3 },
- { 4, 0x03, 4 },
- { 5, 0x02, 4 },
- { 6, 0x03, 5 },
- { 7, 0x02, 5 },
- { 8, 0x07, 7 },
- { 9, 0x06, 7 },
- { 10, 0x0b, 8 },
- { 11, 0x0a, 8 },
- { 12, 0x09, 8 },
- { 13, 0x08, 8 },
- { 14, 0x07, 8 },
- { 15, 0x06, 8 },
- { 16, 0x17, 10 },
- { 17, 0x16, 10 },
- { 18, 0x15, 10 },
- { 19, 0x14, 10 },
- { 20, 0x13, 10 },
- { 21, 0x12, 10 },
- { 22, 0x23, 11 },
- { 23, 0x22, 11 },
- { 24, 0x21, 11 },
- { 25, 0x20, 11 },
- { 26, 0x1f, 11 },
- { 27, 0x1e, 11 },
- { 28, 0x1d, 11 },
- { 29, 0x1c, 11 },
- { 30, 0x1b, 11 },
- { 31, 0x1a, 11 },
- { 32, 0x19, 11 },
- { 33, 0x18, 11 },
- { GST_MPEG_VIDEO_MACROBLOCK_ESCAPE, 0x08, 11 }
+/**
+ * GstVaapiDecoderMpeg2Class:
+ *
+ * A decoder class based on Mpeg2.
+ */
+struct _GstVaapiDecoderMpeg2Class {
+ /*< private >*/
+ GstVaapiDecoderClass parent_class;
};
static void
gst_vaapi_decoder_mpeg2_close(GstVaapiDecoderMpeg2 *decoder)
{
- GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
+ GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
gst_vaapi_picture_replace(&priv->current_picture, NULL);
- if (priv->dpb) {
- gst_vaapi_dpb_unref(priv->dpb);
- priv->dpb = NULL;
- }
+ gst_vaapi_parser_info_mpeg2_replace(&priv->seq_hdr, NULL);
+ gst_vaapi_parser_info_mpeg2_replace(&priv->seq_ext, NULL);
+ gst_vaapi_parser_info_mpeg2_replace(&priv->seq_display_ext, NULL);
+ gst_vaapi_parser_info_mpeg2_replace(&priv->seq_scalable_ext, NULL);
+ gst_vaapi_parser_info_mpeg2_replace(&priv->gop, NULL);
+ gst_vaapi_parser_info_mpeg2_replace(&priv->pic_hdr, NULL);
+ gst_vaapi_parser_info_mpeg2_replace(&priv->pic_ext, NULL);
+ gst_vaapi_parser_info_mpeg2_replace(&priv->pic_display_ext, NULL);
+ gst_vaapi_parser_info_mpeg2_replace(&priv->quant_matrix, NULL);
+ gst_vaapi_parser_info_mpeg2_replace(&priv->slice_hdr, NULL);
+
+ gst_vaapi_dpb_replace(&priv->dpb, NULL);
}
static gboolean
gst_vaapi_decoder_mpeg2_open(GstVaapiDecoderMpeg2 *decoder)
{
- GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
+ GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
gst_vaapi_decoder_mpeg2_close(decoder);
- priv->dpb = gst_vaapi_dpb_mpeg2_new();
+ priv->dpb = gst_vaapi_dpb_new(2);
if (!priv->dpb)
return FALSE;
}
static void
-gst_vaapi_decoder_mpeg2_destroy(GstVaapiDecoderMpeg2 *decoder)
+gst_vaapi_decoder_mpeg2_destroy(GstVaapiDecoder *base_decoder)
{
+ GstVaapiDecoderMpeg2 * const decoder =
+ GST_VAAPI_DECODER_MPEG2_CAST(base_decoder);
+
gst_vaapi_decoder_mpeg2_close(decoder);
}
static gboolean
-gst_vaapi_decoder_mpeg2_create(GstVaapiDecoderMpeg2 *decoder)
+gst_vaapi_decoder_mpeg2_create(GstVaapiDecoder *base_decoder)
{
- if (!GST_VAAPI_DECODER_CODEC(decoder))
- return FALSE;
+ GstVaapiDecoderMpeg2 * const decoder =
+ GST_VAAPI_DECODER_MPEG2_CAST(base_decoder);
+ GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
+
+ priv->hw_profile = GST_VAAPI_PROFILE_UNKNOWN;
+ priv->profile = GST_VAAPI_PROFILE_MPEG2_SIMPLE;
+ priv->profile_changed = TRUE; /* Allow fallbacks to work */
return TRUE;
}
get_profile(GstVaapiDecoderMpeg2 *decoder, GstVaapiEntrypoint entrypoint)
{
GstVaapiDisplay * const va_display = GST_VAAPI_DECODER_DISPLAY(decoder);
- GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
+ GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
GstVaapiProfile profile = priv->profile;
do {
break;
case GST_VAAPI_PROFILE_MPEG2_HIGH:
// Try to map to main profile if no high profile specific bits used
- if (priv->profile == profile &&
- !priv->has_seq_scalable_ext &&
- (priv->has_seq_ext && priv->seq_ext.chroma_format == 1)) {
+ if (priv->profile == profile &&
+ !priv->seq_scalable_ext &&
+ (priv->seq_ext &&
+ priv->seq_ext->data.seq_ext.chroma_format == 1)) {
profile = GST_VAAPI_PROFILE_MPEG2_MAIN;
break;
}
static GstVaapiDecoderStatus
ensure_context(GstVaapiDecoderMpeg2 *decoder)
{
- GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
+ GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
GstVaapiEntrypoint entrypoint = GST_VAAPI_ENTRYPOINT_VLD;
gboolean reset_context = FALSE;
info.profile = priv->hw_profile;
info.entrypoint = entrypoint;
+ info.chroma_type = GST_VAAPI_CHROMA_TYPE_YUV420;
info.width = priv->width;
info.height = priv->height;
info.ref_frames = 2;
reset_context = gst_vaapi_decoder_ensure_context(
- GST_VAAPI_DECODER(decoder),
+ GST_VAAPI_DECODER_CAST(decoder),
&info
);
if (!reset_context)
static GstVaapiDecoderStatus
ensure_quant_matrix(GstVaapiDecoderMpeg2 *decoder, GstVaapiPicture *picture)
{
- GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
+ GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
+ GstMpegVideoSequenceHdr * const seq_hdr = &priv->seq_hdr->data.seq_hdr;
VAIQMatrixBufferMPEG2 *iq_matrix;
guint8 *intra_quant_matrix = NULL;
guint8 *non_intra_quant_matrix = NULL;
}
iq_matrix = picture->iq_matrix->param;
- intra_quant_matrix = priv->seq_hdr.intra_quantizer_matrix;
- non_intra_quant_matrix = priv->seq_hdr.non_intra_quantizer_matrix;
- if (priv->has_quant_matrix_ext) {
- if (priv->quant_matrix_ext.load_intra_quantiser_matrix)
- intra_quant_matrix = priv->quant_matrix_ext.intra_quantiser_matrix;
- if (priv->quant_matrix_ext.load_non_intra_quantiser_matrix)
- non_intra_quant_matrix = priv->quant_matrix_ext.non_intra_quantiser_matrix;
- if (priv->quant_matrix_ext.load_chroma_intra_quantiser_matrix)
- chroma_intra_quant_matrix = priv->quant_matrix_ext.chroma_intra_quantiser_matrix;
- if (priv->quant_matrix_ext.load_chroma_non_intra_quantiser_matrix)
- chroma_non_intra_quant_matrix = priv->quant_matrix_ext.chroma_non_intra_quantiser_matrix;
+ intra_quant_matrix = seq_hdr->intra_quantizer_matrix;
+ non_intra_quant_matrix = seq_hdr->non_intra_quantizer_matrix;
+
+ if (priv->quant_matrix) {
+ GstMpegVideoQuantMatrixExt * const quant_matrix =
+ &priv->quant_matrix->data.quant_matrix;
+ if (quant_matrix->load_intra_quantiser_matrix)
+ intra_quant_matrix = quant_matrix->intra_quantiser_matrix;
+ if (quant_matrix->load_non_intra_quantiser_matrix)
+ non_intra_quant_matrix = quant_matrix->non_intra_quantiser_matrix;
+ if (quant_matrix->load_chroma_intra_quantiser_matrix)
+ chroma_intra_quant_matrix = quant_matrix->chroma_intra_quantiser_matrix;
+ if (quant_matrix->load_chroma_non_intra_quantiser_matrix)
+ chroma_non_intra_quant_matrix = quant_matrix->chroma_non_intra_quantiser_matrix;
}
iq_matrix->load_intra_quantiser_matrix = intra_quant_matrix != NULL;
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
-static gboolean
+static inline gboolean
+is_valid_state(GstVaapiDecoderMpeg2 *decoder, guint state)
+{
+ GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
+
+ return (priv->state & state) == state;
+}
+
+static GstVaapiDecoderStatus
decode_current_picture(GstVaapiDecoderMpeg2 *decoder)
{
- GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
+ GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
GstVaapiPicture * const picture = priv->current_picture;
- if (picture) {
- if (!gst_vaapi_picture_decode(picture))
- return FALSE;
- if (GST_VAAPI_PICTURE_IS_COMPLETE(picture)) {
- if (!gst_vaapi_dpb_add(priv->dpb, picture))
- return FALSE;
- gst_vaapi_picture_replace(&priv->current_picture, NULL);
- }
+ if (!is_valid_state(decoder, GST_MPEG_VIDEO_STATE_VALID_PICTURE))
+ goto drop_frame;
+ priv->state &= GST_MPEG_VIDEO_STATE_VALID_SEQ_HEADERS;
+
+ if (!picture)
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
+
+ if (!gst_vaapi_picture_decode(picture))
+ goto error;
+ if (GST_VAAPI_PICTURE_IS_COMPLETE(picture)) {
+ if (!gst_vaapi_dpb_add(priv->dpb, picture))
+ goto error;
+ gst_vaapi_picture_replace(&priv->current_picture, NULL);
}
- return TRUE;
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
+
+error:
+ /* XXX: fix for cases where first field failed to be decoded */
+ gst_vaapi_picture_replace(&priv->current_picture, NULL);
+ return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
+
+drop_frame:
+ priv->state &= GST_MPEG_VIDEO_STATE_VALID_SEQ_HEADERS;
+ return GST_VAAPI_DECODER_STATUS_DROP_FRAME;
}
static GstVaapiDecoderStatus
-parse_sequence(GstVaapiDecoderUnitMpeg2 *unit)
+parse_sequence(GstVaapiDecoderMpeg2 *decoder,
+ GstVaapiDecoderUnit *unit, const GstMpegVideoPacket *packet)
{
- GstMpegVideoPacket * const packet = &unit->packet;
+ GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
+ GstMpegVideoSequenceHdr *seq_hdr;
+
+ priv->state = 0;
- if (!gst_mpeg_video_parse_sequence_header(&unit->data.seq_hdr,
- packet->data, packet->size, packet->offset)) {
+ if (!gst_vaapi_parser_info_mpeg2_ensure(&priv->seq_hdr)) {
+ GST_ERROR("failed to allocate parser info for sequence header");
+ return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+ }
+
+ seq_hdr = &priv->seq_hdr->data.seq_hdr;
+
+ if (!gst_mpeg_video_packet_parse_sequence_header(packet, seq_hdr)) {
GST_ERROR("failed to parse sequence header");
return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
}
+
+ gst_vaapi_decoder_unit_set_parsed_info(unit, seq_hdr, NULL);
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static GstVaapiDecoderStatus
-decode_sequence(GstVaapiDecoderMpeg2 *decoder, GstVaapiDecoderUnitMpeg2 *unit)
+decode_sequence(GstVaapiDecoderMpeg2 *decoder, GstVaapiDecoderUnit *unit)
{
- GstVaapiDecoder * const base_decoder = GST_VAAPI_DECODER(decoder);
- GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
- GstMpegVideoSequenceHdr * const seq_hdr = &priv->seq_hdr;
+ GstVaapiDecoder * const base_decoder = GST_VAAPI_DECODER_CAST(decoder);
+ GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
+ GstMpegVideoSequenceHdr * const seq_hdr = unit->parsed_info;
- *seq_hdr = unit->data.seq_hdr;
+ gst_vaapi_parser_info_mpeg2_replace(&priv->seq_ext, NULL);
+ gst_vaapi_parser_info_mpeg2_replace(&priv->seq_display_ext, NULL);
+ gst_vaapi_parser_info_mpeg2_replace(&priv->seq_scalable_ext, NULL);
+ gst_vaapi_parser_info_mpeg2_replace(&priv->quant_matrix, NULL);
+ gst_vaapi_parser_info_mpeg2_replace(&priv->pic_display_ext, NULL);
priv->fps_n = seq_hdr->fps_n;
priv->fps_d = seq_hdr->fps_d;
pts_set_framerate(&priv->tsg, priv->fps_n, priv->fps_d);
gst_vaapi_decoder_set_framerate(base_decoder, priv->fps_n, priv->fps_d);
- if (gst_mpeg_video_finalise_mpeg2_sequence_header(seq_hdr, NULL, NULL))
- gst_vaapi_decoder_set_pixel_aspect_ratio(base_decoder,
- seq_hdr->par_w, seq_hdr->par_h);
-
priv->width = seq_hdr->width;
priv->height = seq_hdr->height;
- priv->has_seq_ext = FALSE;
priv->size_changed = TRUE;
priv->quant_matrix_changed = TRUE;
priv->progressive_sequence = TRUE;
+
+ priv->state |= GST_MPEG_VIDEO_STATE_GOT_SEQ_HDR;
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static GstVaapiDecoderStatus
-parse_sequence_ext(GstVaapiDecoderUnitMpeg2 *unit)
+parse_sequence_ext(GstVaapiDecoderMpeg2 *decoder,
+ GstVaapiDecoderUnit *unit, const GstMpegVideoPacket *packet)
{
- GstMpegVideoPacket * const packet = &unit->packet;
+ GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
+ GstMpegVideoSequenceExt *seq_ext;
+
+ priv->state &= GST_MPEG_VIDEO_STATE_GOT_SEQ_HDR;
+
+ if (!gst_vaapi_parser_info_mpeg2_ensure(&priv->seq_ext)) {
+ GST_ERROR("failed to allocate parser info for sequence extension");
+ return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+ }
+
+ seq_ext = &priv->seq_ext->data.seq_ext;
- if (!gst_mpeg_video_parse_sequence_extension(&unit->data.seq_ext,
- packet->data, packet->size, packet->offset)) {
+ if (!gst_mpeg_video_packet_parse_sequence_extension(packet, seq_ext)) {
GST_ERROR("failed to parse sequence-extension");
return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
}
+
+ gst_vaapi_decoder_unit_set_parsed_info(unit, seq_ext, NULL);
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static GstVaapiDecoderStatus
-decode_sequence_ext(GstVaapiDecoderMpeg2 *decoder,
- GstVaapiDecoderUnitMpeg2 *unit)
+decode_sequence_ext(GstVaapiDecoderMpeg2 *decoder, GstVaapiDecoderUnit *unit)
{
- GstVaapiDecoder * const base_decoder = GST_VAAPI_DECODER(decoder);
- GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
- GstMpegVideoSequenceHdr * const seq_hdr = &priv->seq_hdr;
- GstMpegVideoSequenceExt * const seq_ext = &priv->seq_ext;
+ GstVaapiDecoder * const base_decoder = GST_VAAPI_DECODER_CAST(decoder);
+ GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
+ GstMpegVideoSequenceExt * const seq_ext = unit->parsed_info;
GstVaapiProfile profile;
guint width, height;
- *seq_ext = unit->data.seq_ext;
+ if (!is_valid_state(decoder, GST_MPEG_VIDEO_STATE_GOT_SEQ_HDR))
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
- priv->has_seq_ext = TRUE;
priv->progressive_sequence = seq_ext->progressive;
gst_vaapi_decoder_set_interlaced(base_decoder, !priv->progressive_sequence);
priv->profile_changed = TRUE;
}
- if (gst_mpeg_video_finalise_mpeg2_sequence_header(seq_hdr, seq_ext, NULL))
- gst_vaapi_decoder_set_pixel_aspect_ratio(base_decoder,
- seq_hdr->par_w, seq_hdr->par_h);
+ priv->state |= GST_MPEG_VIDEO_STATE_GOT_SEQ_EXT;
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
+}
+
+static GstVaapiDecoderStatus
+parse_sequence_display_ext(GstVaapiDecoderMpeg2 *decoder,
+ GstVaapiDecoderUnit *unit, const GstMpegVideoPacket *packet)
+{
+ GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
+ GstMpegVideoSequenceDisplayExt *seq_display_ext;
+
+ if (!gst_vaapi_parser_info_mpeg2_ensure(&priv->seq_display_ext)) {
+ GST_ERROR("failed to allocate parser info for sequence display extension");
+ return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+ }
+ seq_display_ext = &priv->seq_display_ext->data.seq_display_ext;
+
+ if (!gst_mpeg_video_packet_parse_sequence_display_extension(packet,
+ seq_display_ext)) {
+ GST_ERROR("failed to parse sequence-display-extension");
+ return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
+ }
+
+ gst_vaapi_decoder_unit_set_parsed_info(unit, seq_display_ext, NULL);
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static GstVaapiDecoderStatus
-decode_sequence_end(GstVaapiDecoderMpeg2 *decoder)
+decode_sequence_display_ext(GstVaapiDecoderMpeg2 *decoder,
+ GstVaapiDecoderUnit *unit)
+{
+ GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
+ GstMpegVideoSequenceDisplayExt *seq_display_ext;
+
+ seq_display_ext = priv->seq_display_ext ?
+ &priv->seq_display_ext->data.seq_display_ext : NULL;
+
+ /* Update cropping rectangle */
+ if (seq_display_ext) {
+ GstVaapiRectangle * const crop_rect = &priv->crop_rect;
+ crop_rect->x = 0;
+ crop_rect->y = 0;
+ crop_rect->width = seq_display_ext->display_horizontal_size;
+ crop_rect->height = seq_display_ext->display_vertical_size;
+ }
+
+ /* XXX: handle color primaries */
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
+}
+
+static GstVaapiDecoderStatus
+parse_sequence_scalable_ext(GstVaapiDecoderMpeg2 *decoder,
+ GstVaapiDecoderUnit *unit, const GstMpegVideoPacket *packet)
{
- GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
+ GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
+ GstMpegVideoSequenceScalableExt *seq_scalable_ext;
- if (priv->current_picture && !decode_current_picture(decoder))
- return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
+ if (!gst_vaapi_parser_info_mpeg2_ensure(&priv->seq_scalable_ext)) {
+ GST_ERROR("failed to allocate parser info for sequence scalable extension");
+ return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+ }
+
+ seq_scalable_ext = &priv->seq_scalable_ext->data.seq_scalable_ext;
+
+ if (!gst_mpeg_video_packet_parse_sequence_scalable_extension(packet,
+ seq_scalable_ext)) {
+ GST_ERROR("failed to parse sequence-scalable-extension");
+ return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
+ }
+
+ gst_vaapi_decoder_unit_set_parsed_info(unit, seq_scalable_ext, NULL);
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
+}
+
+static GstVaapiDecoderStatus
+decode_sequence_scalable_ext(GstVaapiDecoderMpeg2 *decoder,
+ GstVaapiDecoderUnit *unit)
+{
+ /* XXX: unsupported header -- ignore */
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
+}
+
+static GstVaapiDecoderStatus
+decode_sequence_end(GstVaapiDecoderMpeg2 *decoder)
+{
+ GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
gst_vaapi_dpb_flush(priv->dpb);
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static GstVaapiDecoderStatus
-parse_quant_matrix_ext(GstVaapiDecoderUnitMpeg2 *unit)
+parse_quant_matrix_ext(GstVaapiDecoderMpeg2 *decoder,
+ GstVaapiDecoderUnit *unit, const GstMpegVideoPacket *packet)
{
- GstMpegVideoPacket * const packet = &unit->packet;
+ GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
+ GstMpegVideoQuantMatrixExt *quant_matrix;
+
+ if (!gst_vaapi_parser_info_mpeg2_ensure(&priv->quant_matrix)) {
+ GST_ERROR("failed to allocate parser info for quantization matrix");
+ return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+ }
- if (!gst_mpeg_video_parse_quant_matrix_extension(&unit->data.quant_matrix,
- packet->data, packet->size, packet->offset)) {
+ quant_matrix = &priv->quant_matrix->data.quant_matrix;
+
+ if (!gst_mpeg_video_packet_parse_quant_matrix_extension(packet,
+ quant_matrix)) {
GST_ERROR("failed to parse quant-matrix-extension");
return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
}
+
+ gst_vaapi_decoder_unit_set_parsed_info(unit, quant_matrix, NULL);
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static GstVaapiDecoderStatus
decode_quant_matrix_ext(GstVaapiDecoderMpeg2 *decoder,
- GstVaapiDecoderUnitMpeg2 *unit)
+ GstVaapiDecoderUnit *unit)
{
- GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
+ GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
- priv->quant_matrix_ext = unit->data.quant_matrix;
- priv->has_quant_matrix_ext = TRUE;
priv->quant_matrix_changed = TRUE;
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static GstVaapiDecoderStatus
-parse_gop(GstVaapiDecoderUnitMpeg2 *unit)
+parse_gop(GstVaapiDecoderMpeg2 *decoder,
+ GstVaapiDecoderUnit *unit, const GstMpegVideoPacket *packet)
{
- GstMpegVideoPacket * const packet = &unit->packet;
+ GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
+ GstMpegVideoGop *gop;
+
+ if (!gst_vaapi_parser_info_mpeg2_ensure(&priv->gop)) {
+ GST_ERROR("failed to allocate parser info for GOP");
+ return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+ }
+
+ gop = &priv->gop->data.gop;
- if (!gst_mpeg_video_parse_gop(&unit->data.gop,
- packet->data, packet->size, packet->offset)) {
+ if (!gst_mpeg_video_packet_parse_gop(packet, gop)) {
GST_ERROR("failed to parse GOP");
return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
}
+
+ gst_vaapi_decoder_unit_set_parsed_info(unit, gop, NULL);
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static GstVaapiDecoderStatus
-decode_gop(GstVaapiDecoderMpeg2 *decoder, GstVaapiDecoderUnitMpeg2 *unit)
+decode_gop(GstVaapiDecoderMpeg2 *decoder, GstVaapiDecoderUnit *unit)
{
- GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
- GstMpegVideoGop * const gop = &unit->data.gop;
+ GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
+ GstMpegVideoGop * const gop = unit->parsed_info;
priv->closed_gop = gop->closed_gop;
priv->broken_link = gop->broken_link;
}
static GstVaapiDecoderStatus
-parse_picture(GstVaapiDecoderUnitMpeg2 *unit)
+parse_picture(GstVaapiDecoderMpeg2 *decoder,
+ GstVaapiDecoderUnit *unit, const GstMpegVideoPacket *packet)
{
- GstMpegVideoPacket * const packet = &unit->packet;
+ GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
+ GstMpegVideoPictureHdr *pic_hdr;
+
+ priv->state &= (GST_MPEG_VIDEO_STATE_GOT_SEQ_HDR|
+ GST_MPEG_VIDEO_STATE_GOT_SEQ_EXT);
+
+ if (!gst_vaapi_parser_info_mpeg2_ensure(&priv->pic_hdr)) {
+ GST_ERROR("failed to allocate parser info for picture header");
+ return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+ }
- if (!gst_mpeg_video_parse_picture_header(&unit->data.pic_hdr,
- packet->data, packet->size, packet->offset)) {
+ pic_hdr = &priv->pic_hdr->data.pic_hdr;
+
+ if (!gst_mpeg_video_packet_parse_picture_header(packet, pic_hdr)) {
GST_ERROR("failed to parse picture header");
return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
}
+
+ gst_vaapi_decoder_unit_set_parsed_info(unit, pic_hdr, NULL);
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static GstVaapiDecoderStatus
-decode_picture(GstVaapiDecoderMpeg2 *decoder, GstVaapiDecoderUnitMpeg2 *unit)
+decode_picture(GstVaapiDecoderMpeg2 *decoder, GstVaapiDecoderUnit *unit)
{
- GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
- GstMpegVideoPictureHdr * const pic_hdr = &priv->pic_hdr;
- GstVaapiPicture *picture;
- GstVaapiDecoderStatus status;
-
- *pic_hdr = unit->data.pic_hdr;
-
- status = ensure_context(decoder);
- if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
- GST_ERROR("failed to reset context");
- return status;
- }
-
- if (priv->current_picture && !decode_current_picture(decoder))
- return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
-
- if (priv->current_picture) {
- /* Re-use current picture where the first field was decoded */
- picture = gst_vaapi_picture_new_field(priv->current_picture);
- if (!picture) {
- GST_ERROR("failed to allocate field picture");
- return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
- }
- }
- else {
- /* Create new picture */
- picture = GST_VAAPI_PICTURE_NEW(MPEG2, decoder);
- if (!picture) {
- GST_ERROR("failed to allocate picture");
- return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
- }
- }
- gst_vaapi_picture_replace(&priv->current_picture, picture);
- gst_vaapi_picture_unref(picture);
+ GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
- status = ensure_quant_matrix(decoder, picture);
- if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
- GST_ERROR("failed to reset quantizer matrix");
- return status;
- }
-
- priv->has_pic_ext = FALSE;
+ if (!is_valid_state(decoder, GST_MPEG_VIDEO_STATE_VALID_SEQ_HEADERS))
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
- switch (pic_hdr->pic_type) {
- case GST_MPEG_VIDEO_PICTURE_TYPE_I:
- GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_REFERENCE);
- picture->type = GST_VAAPI_PICTURE_TYPE_I;
- break;
- case GST_MPEG_VIDEO_PICTURE_TYPE_P:
- GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_REFERENCE);
- picture->type = GST_VAAPI_PICTURE_TYPE_P;
- break;
- case GST_MPEG_VIDEO_PICTURE_TYPE_B:
- picture->type = GST_VAAPI_PICTURE_TYPE_B;
- break;
- default:
- GST_ERROR("unsupported picture type %d", pic_hdr->pic_type);
- return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
- }
+ gst_vaapi_parser_info_mpeg2_replace(&priv->pic_ext, NULL);
- /* Update presentation time */
- picture->pts = pts_eval(&priv->tsg,
- GST_VAAPI_DECODER_CODEC_FRAME(decoder)->pts, pic_hdr->tsn);
- picture->poc = pts_get_poc(&priv->tsg);
+ priv->state |= GST_MPEG_VIDEO_STATE_GOT_PIC_HDR;
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static GstVaapiDecoderStatus
-parse_picture_ext(GstVaapiDecoderUnitMpeg2 *unit)
+parse_picture_ext(GstVaapiDecoderMpeg2 *decoder,
+ GstVaapiDecoderUnit *unit, const GstMpegVideoPacket *packet)
{
- GstMpegVideoPacket * const packet = &unit->packet;
+ GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
+ GstMpegVideoPictureExt *pic_ext;
+
+ priv->state &= (GST_MPEG_VIDEO_STATE_GOT_SEQ_HDR|
+ GST_MPEG_VIDEO_STATE_GOT_SEQ_EXT|
+ GST_MPEG_VIDEO_STATE_GOT_PIC_HDR);
+
+ if (!gst_vaapi_parser_info_mpeg2_ensure(&priv->pic_ext)) {
+ GST_ERROR("failed to allocate parser info for picture extension");
+ return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+ }
- if (!gst_mpeg_video_parse_picture_extension(&unit->data.pic_ext,
- packet->data, packet->size, packet->offset)) {
+ pic_ext = &priv->pic_ext->data.pic_ext;
+
+ if (!gst_mpeg_video_packet_parse_picture_extension(packet, pic_ext)) {
GST_ERROR("failed to parse picture-extension");
return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
}
+
+ gst_vaapi_decoder_unit_set_parsed_info(unit, pic_ext, NULL);
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static GstVaapiDecoderStatus
-decode_picture_ext(GstVaapiDecoderMpeg2 *decoder,
- GstVaapiDecoderUnitMpeg2 *unit)
+decode_picture_ext(GstVaapiDecoderMpeg2 *decoder, GstVaapiDecoderUnit *unit)
{
- GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
- GstMpegVideoPictureExt * const pic_ext = &priv->pic_ext;
- GstVaapiPicture * const picture = priv->current_picture;
-
- *pic_ext = unit->data.pic_ext;
+ GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
+ GstMpegVideoPictureExt * const pic_ext = unit->parsed_info;
- priv->has_pic_ext = TRUE;
+ if (!is_valid_state(decoder, GST_MPEG_VIDEO_STATE_GOT_PIC_HDR))
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
if (priv->progressive_sequence && !pic_ext->progressive_frame) {
GST_WARNING("invalid interlaced frame in progressive sequence, fixing");
pic_ext->picture_structure = GST_MPEG_VIDEO_PICTURE_STRUCTURE_FRAME;
}
+ priv->state |= GST_MPEG_VIDEO_STATE_GOT_PIC_EXT;
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
+}
+
+static inline guint32
+pack_f_code(guint8 f_code[2][2])
+{
+ return (((guint32)f_code[0][0] << 12) |
+ ((guint32)f_code[0][1] << 8) |
+ ((guint32)f_code[1][0] << 4) |
+ ( f_code[1][1] ));
+}
+
+static GstVaapiDecoderStatus
+init_picture(GstVaapiDecoderMpeg2 *decoder, GstVaapiPicture *picture)
+{
+ GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
+ GstMpegVideoPictureHdr * const pic_hdr = &priv->pic_hdr->data.pic_hdr;
+ GstMpegVideoPictureExt * const pic_ext = &priv->pic_ext->data.pic_ext;
+
+ switch (pic_hdr->pic_type) {
+ case GST_MPEG_VIDEO_PICTURE_TYPE_I:
+ GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_REFERENCE);
+ picture->type = GST_VAAPI_PICTURE_TYPE_I;
+ break;
+ case GST_MPEG_VIDEO_PICTURE_TYPE_P:
+ GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_REFERENCE);
+ picture->type = GST_VAAPI_PICTURE_TYPE_P;
+ break;
+ case GST_MPEG_VIDEO_PICTURE_TYPE_B:
+ picture->type = GST_VAAPI_PICTURE_TYPE_B;
+ break;
+ default:
+ GST_ERROR("unsupported picture type %d", pic_hdr->pic_type);
+ return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
+ }
+
if (!priv->progressive_sequence && !pic_ext->progressive_frame) {
GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_INTERLACED);
if (pic_ext->top_field_first)
GST_VAAPI_PICTURE_FLAG_SET(
dummy_picture,
(GST_VAAPI_PICTURE_FLAG_SKIPPED |
+ GST_VAAPI_PICTURE_FLAG_OUTPUT |
GST_VAAPI_PICTURE_FLAG_REFERENCE)
);
}
GST_INFO("allocated dummy picture for first field based I-frame");
}
- return GST_VAAPI_DECODER_STATUS_SUCCESS;
-}
-static inline guint32
-pack_f_code(guint8 f_code[2][2])
-{
- return (((guint32)f_code[0][0] << 12) |
- ((guint32)f_code[0][1] << 8) |
- ((guint32)f_code[1][0] << 4) |
- ( f_code[1][1] ));
+ /* Update presentation time */
+ picture->pts = pts_eval(&priv->tsg,
+ GST_VAAPI_DECODER_CODEC_FRAME(decoder)->pts, pic_hdr->tsn);
+ picture->poc = pts_get_poc(&priv->tsg);
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
-static gboolean
+static void
fill_picture(GstVaapiDecoderMpeg2 *decoder, GstVaapiPicture *picture)
{
- GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
+ GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
VAPictureParameterBufferMPEG2 * const pic_param = picture->param;
- GstMpegVideoPictureHdr * const pic_hdr = &priv->pic_hdr;
- GstMpegVideoPictureExt * const pic_ext = &priv->pic_ext;
+ GstMpegVideoPictureHdr * const pic_hdr = &priv->pic_hdr->data.pic_hdr;
+ GstMpegVideoPictureExt * const pic_ext = &priv->pic_ext->data.pic_ext;
GstVaapiPicture *prev_picture, *next_picture;
- if (!priv->has_pic_ext)
- return FALSE;
-
/* Fill in VAPictureParameterBufferMPEG2 */
- pic_param->horizontal_size = priv->width;
- pic_param->vertical_size = priv->height;
- pic_param->forward_reference_picture = VA_INVALID_ID;
- pic_param->backward_reference_picture = VA_INVALID_ID;
- pic_param->picture_coding_type = pic_hdr->pic_type;
- pic_param->f_code = pack_f_code(pic_ext->f_code);
+ pic_param->horizontal_size = priv->width;
+ pic_param->vertical_size = priv->height;
+ pic_param->forward_reference_picture = VA_INVALID_ID;
+ pic_param->backward_reference_picture = VA_INVALID_ID;
+ pic_param->picture_coding_type = pic_hdr->pic_type;
+ pic_param->f_code = pack_f_code(pic_ext->f_code);
#define COPY_FIELD(a, b, f) \
pic_param->a.b.f = pic_ext->f
- pic_param->picture_coding_extension.value = 0;
- pic_param->picture_coding_extension.bits.is_first_field = GST_VAAPI_PICTURE_IS_FIRST_FIELD(picture);
+ pic_param->picture_coding_extension.value = 0;
+ pic_param->picture_coding_extension.bits.is_first_field =
+ GST_VAAPI_PICTURE_IS_FIRST_FIELD(picture);
COPY_FIELD(picture_coding_extension, bits, intra_dc_precision);
COPY_FIELD(picture_coding_extension, bits, picture_structure);
COPY_FIELD(picture_coding_extension, bits, top_field_first);
COPY_FIELD(picture_coding_extension, bits, repeat_first_field);
COPY_FIELD(picture_coding_extension, bits, progressive_frame);
- gst_vaapi_dpb_mpeg2_get_references(
- priv->dpb,
- picture,
- &prev_picture,
- &next_picture
- );
+ gst_vaapi_dpb_get_neighbours(priv->dpb, picture,
+ &prev_picture, &next_picture);
switch (pic_hdr->pic_type) {
case GST_MPEG_VIDEO_PICTURE_TYPE_B:
pic_param->forward_reference_picture = prev_picture->surface_id;
break;
}
- return TRUE;
}
static GstVaapiDecoderStatus
-decode_slice(GstVaapiDecoderMpeg2 *decoder, GstVaapiDecoderUnitMpeg2 *unit)
+parse_slice(GstVaapiDecoderMpeg2 *decoder,
+ GstVaapiDecoderUnit *unit, const GstMpegVideoPacket *packet)
+{
+ GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
+ GstMpegVideoSliceHdr *slice_hdr;
+ GstMpegVideoSequenceHdr *seq_hdr;
+ GstMpegVideoSequenceScalableExt *seq_scalable_ext;
+
+ priv->state &= (GST_MPEG_VIDEO_STATE_GOT_SEQ_HDR|
+ GST_MPEG_VIDEO_STATE_GOT_SEQ_EXT|
+ GST_MPEG_VIDEO_STATE_GOT_PIC_HDR|
+ GST_MPEG_VIDEO_STATE_GOT_PIC_EXT);
+
+ if (!gst_vaapi_parser_info_mpeg2_ensure(&priv->slice_hdr)) {
+ GST_ERROR("failed to allocate parser info for slice header");
+ return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+ }
+
+ slice_hdr = &priv->slice_hdr->data.slice_hdr;
+ seq_hdr = &priv->seq_hdr->data.seq_hdr;
+ seq_scalable_ext = priv->seq_scalable_ext ?
+ &priv->seq_scalable_ext->data.seq_scalable_ext : NULL;
+
+ if (!gst_mpeg_video_packet_parse_slice_header(packet, slice_hdr,
+ seq_hdr, seq_scalable_ext)) {
+ GST_ERROR("failed to parse slice header");
+ return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
+ }
+
+ gst_vaapi_decoder_unit_set_parsed_info(unit, slice_hdr, NULL);
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
+}
+
+static GstVaapiDecoderStatus
+decode_slice(GstVaapiDecoderMpeg2 *decoder, GstVaapiDecoderUnit *unit)
{
- GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
+ GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
GstVaapiPicture * const picture = priv->current_picture;
GstVaapiSlice *slice;
VASliceParameterBufferMPEG2 *slice_param;
- GstMpegVideoPacket * const packet = &unit->packet;
- const gint slice_no = packet->type - GST_MPEG_VIDEO_PACKET_SLICE_MIN;
- GstBitReader br;
- gint mb_x, mb_y, mb_inc;
- guint macroblock_offset;
- guint8 slice_vertical_position_extension;
- guint8 quantiser_scale_code;
- guint8 intra_slice = 0;
- guint8 extra_bit_slice, junk8;
-
- GST_DEBUG("slice %d (%u bytes)", slice_no, packet->size);
+ GstMpegVideoSliceHdr * const slice_hdr = unit->parsed_info;
+ GstBuffer * const buffer =
+ GST_VAAPI_DECODER_CODEC_FRAME(decoder)->input_buffer;
+ GstMapInfo map_info;
- if (!picture)
+ GST_DEBUG("slice %d (%u bytes)", slice_hdr->mb_row, unit->size);
+
+ if (!is_valid_state(decoder, GST_MPEG_VIDEO_STATE_VALID_PIC_HEADERS))
return GST_VAAPI_DECODER_STATUS_SUCCESS;
- if (picture->slices->len == 0 && !fill_picture(decoder, picture))
+ if (!gst_buffer_map(buffer, &map_info, GST_MAP_READ)) {
+ GST_ERROR("failed to map buffer");
return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
-
- unit->base.buffer = gst_buffer_create_sub(
- GST_VAAPI_DECODER_CODEC_FRAME(decoder)->input_buffer,
- unit->base.offset, unit->base.size);
- if (!unit->base.buffer) {
- GST_ERROR("failed to allocate slice data");
- return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
}
slice = GST_VAAPI_SLICE_NEW(MPEG2, decoder,
- GST_BUFFER_DATA(unit->base.buffer), packet->size);
+ (map_info.data + unit->offset), unit->size);
+ gst_buffer_unmap(buffer, &map_info);
if (!slice) {
GST_ERROR("failed to allocate slice");
return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
}
gst_vaapi_picture_add_slice(picture, slice);
- /* Parse slice */
- gst_bit_reader_init(&br, GST_BUFFER_DATA(unit->base.buffer), packet->size);
- SKIP(&br, 32); /* slice_start_code */
- if (priv->height > 2800)
- READ_UINT8(&br, slice_vertical_position_extension, 3);
- if (priv->has_seq_scalable_ext) {
- GST_ERROR("failed to parse slice %d. Unsupported sequence_scalable_extension()", slice_no);
- return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
- }
- READ_UINT8(&br, quantiser_scale_code, 5);
- READ_UINT8(&br, extra_bit_slice, 1);
- if (extra_bit_slice == 1) {
- READ_UINT8(&br, intra_slice, 1);
- READ_UINT8(&br, junk8, 7);
- READ_UINT8(&br, extra_bit_slice, 1);
- while (extra_bit_slice == 1) {
- READ_UINT8(&br, junk8, 8);
- READ_UINT8(&br, extra_bit_slice, 1);
- }
- }
- macroblock_offset = gst_bit_reader_get_pos(&br);
-
- mb_y = slice_no;
- mb_x = -1;
- do {
- if (!decode_vlc(&br, &mb_inc, mpeg2_mbaddr_vlc_table,
- G_N_ELEMENTS(mpeg2_mbaddr_vlc_table))) {
- GST_WARNING("failed to decode first macroblock_address_increment");
- goto failed;
- }
- mb_x += mb_inc == GST_MPEG_VIDEO_MACROBLOCK_ESCAPE ? 33 : mb_inc;
- } while (mb_inc == GST_MPEG_VIDEO_MACROBLOCK_ESCAPE);
-
/* Fill in VASliceParameterBufferMPEG2 */
slice_param = slice->param;
- slice_param->macroblock_offset = macroblock_offset;
- slice_param->slice_horizontal_position = mb_x;
- slice_param->slice_vertical_position = mb_y;
- slice_param->quantiser_scale_code = quantiser_scale_code;
- slice_param->intra_slice_flag = intra_slice;
- return GST_VAAPI_DECODER_STATUS_SUCCESS;
+ slice_param->macroblock_offset = slice_hdr->header_size + 32;
+ slice_param->slice_horizontal_position = slice_hdr->mb_column;
+ slice_param->slice_vertical_position = slice_hdr->mb_row;
+ slice_param->quantiser_scale_code = slice_hdr->quantiser_scale_code;
+ slice_param->intra_slice_flag = slice_hdr->intra_slice;
-failed:
- return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
+ priv->state |= GST_MPEG_VIDEO_STATE_GOT_SLICE;
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static inline gint
-scan_for_start_code(GstAdapter *adapter, guint ofs, guint size, guint32 *scp)
+scan_for_start_code(const guchar *buf, guint buf_size,
+ GstMpegVideoPacketTypeCode *type_ptr)
{
- return (gint)gst_adapter_masked_scan_uint32_peek(adapter,
- 0xffffff00, 0x00000100,
- ofs, size,
- scp);
+ guint i = 0;
+
+ while (i <= (buf_size - 4)) {
+ if (buf[i + 2] > 1)
+ i += 3;
+ else if (buf[i + 1])
+ i += 2;
+ else if (buf[i] || buf[i + 2] != 1)
+ i++;
+ else
+ break;
+ }
+
+ if (i <= (buf_size - 4)) {
+ if (type_ptr)
+ *type_ptr = buf[i + 3];
+ return i;
+ }
+ return -1;
}
static GstVaapiDecoderStatus
-decode_unit(GstVaapiDecoderMpeg2 *decoder, GstVaapiDecoderUnitMpeg2 *unit)
+parse_unit(GstVaapiDecoderMpeg2 *decoder, GstVaapiDecoderUnit *unit,
+ GstMpegVideoPacket *packet)
{
- GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
- const GstMpegVideoPacketTypeCode type = unit->packet.type;
+ GstMpegVideoPacketTypeCode type;
+ GstMpegVideoPacketExtensionCode ext_type;
GstVaapiDecoderStatus status;
+ type = packet->type;
+ switch (type) {
+ case GST_MPEG_VIDEO_PACKET_PICTURE:
+ status = parse_picture(decoder, unit, packet);
+ break;
+ case GST_MPEG_VIDEO_PACKET_SEQUENCE:
+ status = parse_sequence(decoder, unit, packet);
+ break;
+ case GST_MPEG_VIDEO_PACKET_EXTENSION:
+ ext_type = packet->data[4] >> 4;
+ switch (ext_type) {
+ case GST_MPEG_VIDEO_PACKET_EXT_SEQUENCE:
+ status = parse_sequence_ext(decoder, unit, packet);
+ break;
+ case GST_MPEG_VIDEO_PACKET_EXT_SEQUENCE_DISPLAY:
+ status = parse_sequence_display_ext(decoder, unit, packet);
+ break;
+ case GST_MPEG_VIDEO_PACKET_EXT_SEQUENCE_SCALABLE:
+ status = parse_sequence_scalable_ext(decoder, unit, packet);
+ break;
+ case GST_MPEG_VIDEO_PACKET_EXT_QUANT_MATRIX:
+ status = parse_quant_matrix_ext(decoder, unit, packet);
+ break;
+ case GST_MPEG_VIDEO_PACKET_EXT_PICTURE:
+ status = parse_picture_ext(decoder, unit, packet);
+ break;
+ default:
+ status = GST_VAAPI_DECODER_STATUS_SUCCESS;
+ break;
+ }
+ break;
+ case GST_MPEG_VIDEO_PACKET_GOP:
+ status = parse_gop(decoder, unit, packet);
+ break;
+ default:
+ if (type >= GST_MPEG_VIDEO_PACKET_SLICE_MIN &&
+ type <= GST_MPEG_VIDEO_PACKET_SLICE_MAX) {
+ status = parse_slice(decoder, unit, packet);
+ break;
+ }
+ status = GST_VAAPI_DECODER_STATUS_SUCCESS;
+ break;
+ }
+ return status;
+}
+
+static GstVaapiDecoderStatus
+decode_unit(GstVaapiDecoderMpeg2 *decoder, GstVaapiDecoderUnit *unit,
+ GstMpegVideoPacket *packet)
+{
+ GstMpegVideoPacketTypeCode type;
+ GstMpegVideoPacketExtensionCode ext_type;
+ GstVaapiDecoderStatus status;
+
+ type = packet->type;
switch (type) {
case GST_MPEG_VIDEO_PACKET_PICTURE:
- if (!priv->width || !priv->height)
- goto unknown_picture_size;
status = decode_picture(decoder, unit);
break;
case GST_MPEG_VIDEO_PACKET_SEQUENCE:
status = decode_sequence(decoder, unit);
break;
case GST_MPEG_VIDEO_PACKET_EXTENSION:
- switch (unit->extension_type) {
+ ext_type = packet->data[4] >> 4;
+ switch (ext_type) {
case GST_MPEG_VIDEO_PACKET_EXT_SEQUENCE:
status = decode_sequence_ext(decoder, unit);
break;
+ case GST_MPEG_VIDEO_PACKET_EXT_SEQUENCE_DISPLAY:
+ status = decode_sequence_display_ext(decoder, unit);
+ break;
+ case GST_MPEG_VIDEO_PACKET_EXT_SEQUENCE_SCALABLE:
+ status = decode_sequence_scalable_ext(decoder, unit);
+ break;
case GST_MPEG_VIDEO_PACKET_EXT_QUANT_MATRIX:
status = decode_quant_matrix_ext(decoder, unit);
break;
case GST_MPEG_VIDEO_PACKET_EXT_PICTURE:
- if (!priv->width || !priv->height)
- goto unknown_picture_size;
status = decode_picture_ext(decoder, unit);
break;
default:
// Ignore unknown start-code extensions
- GST_WARNING("unsupported packet extension type 0x%02x",
- unit->extension_type);
+ GST_WARNING("unsupported packet extension type 0x%02x", ext_type);
status = GST_VAAPI_DECODER_STATUS_SUCCESS;
break;
}
break;
}
return status;
-
-unknown_picture_size:
- // Ignore packet while picture size is undefined
- // i.e. missing sequence headers, or not parsed correctly
- GST_WARNING("failed to parse picture of unknown size");
- return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static GstVaapiDecoderStatus
ensure_decoder(GstVaapiDecoderMpeg2 *decoder)
{
- GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
-
- g_return_val_if_fail(priv->is_constructed,
- GST_VAAPI_DECODER_STATUS_ERROR_INIT_FAILED);
+ GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
if (!priv->is_opened) {
priv->is_opened = gst_vaapi_decoder_mpeg2_open(decoder);
static GstVaapiDecoderStatus
gst_vaapi_decoder_mpeg2_parse(GstVaapiDecoder *base_decoder,
- GstAdapter *adapter, gboolean at_eos, GstVaapiDecoderUnit **unit_ptr)
+ GstAdapter *adapter, gboolean at_eos, GstVaapiDecoderUnit *unit)
{
GstVaapiDecoderMpeg2 * const decoder =
- GST_VAAPI_DECODER_MPEG2(base_decoder);
- GstVaapiDecoderUnitMpeg2 *unit;
+ GST_VAAPI_DECODER_MPEG2_CAST(base_decoder);
+ GstVaapiParserState * const ps = GST_VAAPI_PARSER_STATE(base_decoder);
GstVaapiDecoderStatus status;
- GstMpegVideoPacket *packet;
+ GstMpegVideoPacketTypeCode type, type2 = GST_MPEG_VIDEO_PACKET_NONE;
const guchar *buf;
- guint32 start_code;
- guint size, buf_size, flags;
- gint ofs;
+ guint buf_size, flags;
+ gint ofs, ofs1, ofs2;
status = ensure_decoder(decoder);
if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
return status;
- size = gst_adapter_available(adapter);
- if (size < 4)
+ buf_size = gst_adapter_available(adapter);
+ if (buf_size < 4)
return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
- ofs = scan_for_start_code(adapter, 0, size, &start_code);
+ buf = gst_adapter_map(adapter, buf_size);
+ if (!buf)
+ return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
+
+ ofs = scan_for_start_code(buf, buf_size, &type);
if (ofs < 0)
return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
- gst_adapter_flush(adapter, ofs);
- size -= ofs;
+ ofs1 = ofs;
+
+ ofs2 = ps->input_offset2 - 4;
+ if (ofs2 < ofs1 + 4)
+ ofs2 = ofs1 + 4;
- ofs = G_UNLIKELY(size < 8) ? -1 :
- scan_for_start_code(adapter, 4, size - 4, NULL);
+ ofs = G_UNLIKELY(buf_size < ofs2 + 4) ? -1 :
+ scan_for_start_code(&buf[ofs2], buf_size - ofs2, &type2);
if (ofs < 0) {
// Assume the whole packet is present if end-of-stream
- if (!at_eos)
+ if (!at_eos) {
+ ps->input_offset2 = buf_size;
return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
- ofs = size;
- }
- buf_size = ofs;
-
- buf = gst_adapter_peek(adapter, buf_size);
- if (!buf)
- return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
-
- unit = gst_vaapi_decoder_unit_mpeg2_new(buf_size);
- if (!unit)
- return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
-
- packet = &unit->packet;
- packet->data = buf;
- packet->size = buf_size;
- packet->offset = 4;
- packet->type = start_code & 0xff;
-
- /* Parse data */
- switch (packet->type) {
- case GST_MPEG_VIDEO_PACKET_SEQUENCE:
- status = parse_sequence(unit);
- break;
- case GST_MPEG_VIDEO_PACKET_GOP:
- status = parse_gop(unit);
- break;
- case GST_MPEG_VIDEO_PACKET_PICTURE:
- status = parse_picture(unit);
- break;
- case GST_MPEG_VIDEO_PACKET_EXTENSION:
- if (G_UNLIKELY(buf_size < 5)) {
- status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
- break;
- }
- unit->extension_type = buf[4] >> 4;
- switch (unit->extension_type) {
- case GST_MPEG_VIDEO_PACKET_EXT_SEQUENCE:
- status = parse_sequence_ext(unit);
- break;
- case GST_MPEG_VIDEO_PACKET_EXT_QUANT_MATRIX:
- status = parse_quant_matrix_ext(unit);
- break;
- case GST_MPEG_VIDEO_PACKET_EXT_PICTURE:
- status = parse_picture_ext(unit);
- break;
- default:
- status = GST_VAAPI_DECODER_STATUS_SUCCESS;
- break;
}
- break;
- default:
- status = GST_VAAPI_DECODER_STATUS_SUCCESS;
- break;
+ ofs = buf_size - ofs2;
}
- if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
- return status;
+ ofs2 += ofs;
+
+ unit->size = ofs2 - ofs1;
+ gst_adapter_flush(adapter, ofs1);
+ ps->input_offset2 = 4;
/* Check for start of new picture */
flags = 0;
- switch (packet->type) {
+ switch (type) {
case GST_MPEG_VIDEO_PACKET_SEQUENCE_END:
flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_END;
flags |= GST_VAAPI_DECODER_UNIT_FLAG_STREAM_END;
case GST_MPEG_VIDEO_PACKET_PICTURE:
flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
break;
+ case GST_MPEG_VIDEO_PACKET_EXTENSION:
+ if (G_UNLIKELY(unit->size < 5))
+ return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
+ break;
default:
- if (packet->type >= GST_MPEG_VIDEO_PACKET_SLICE_MIN &&
- packet->type <= GST_MPEG_VIDEO_PACKET_SLICE_MAX)
+ if (type >= GST_MPEG_VIDEO_PACKET_SLICE_MIN &&
+ type <= GST_MPEG_VIDEO_PACKET_SLICE_MAX) {
flags |= GST_VAAPI_DECODER_UNIT_FLAG_SLICE;
+ switch (type2) {
+ case GST_MPEG_VIDEO_PACKET_USER_DATA:
+ case GST_MPEG_VIDEO_PACKET_SEQUENCE:
+ case GST_MPEG_VIDEO_PACKET_GOP:
+ case GST_MPEG_VIDEO_PACKET_PICTURE:
+ flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_END;
+ break;
+ default:
+ break;
+ }
+ }
// Ignore system start codes (PES headers)
- else if (packet->type >= 0xb9 && packet->type <= 0xff)
+ else if (type >= 0xb9 && type <= 0xff)
flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
break;
}
GST_VAAPI_DECODER_UNIT_FLAG_SET(unit, flags);
-
- unit->packet.data = NULL;
- *unit_ptr = &unit->base;
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
GstVaapiDecoderUnit *unit)
{
GstVaapiDecoderMpeg2 * const decoder =
- GST_VAAPI_DECODER_MPEG2(base_decoder);
+ GST_VAAPI_DECODER_MPEG2_CAST(base_decoder);
GstVaapiDecoderStatus status;
+ GstMpegVideoPacket packet;
+ GstBuffer * const buffer =
+ GST_VAAPI_DECODER_CODEC_FRAME(decoder)->input_buffer;
+ GstMapInfo map_info;
status = ensure_decoder(decoder);
if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
return status;
- return decode_unit(decoder, (GstVaapiDecoderUnitMpeg2 *)unit);
+
+ if (!gst_buffer_map(buffer, &map_info, GST_MAP_READ)) {
+ GST_ERROR("failed to map buffer");
+ return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
+ }
+
+ packet.data = map_info.data + unit->offset;
+ packet.size = unit->size;
+ packet.type = packet.data[3];
+ packet.offset = 4;
+
+ status = parse_unit(decoder, unit, &packet);
+ gst_buffer_unmap(buffer, &map_info);
+ if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
+ return status;
+ return decode_unit(decoder, unit, &packet);
}
-static void
-gst_vaapi_decoder_mpeg2_finalize(GObject *object)
+static GstVaapiDecoderStatus
+gst_vaapi_decoder_mpeg2_start_frame(GstVaapiDecoder *base_decoder,
+ GstVaapiDecoderUnit *base_unit)
{
- GstVaapiDecoderMpeg2 * const decoder = GST_VAAPI_DECODER_MPEG2(object);
+ GstVaapiDecoderMpeg2 * const decoder =
+ GST_VAAPI_DECODER_MPEG2_CAST(base_decoder);
+ GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
+ GstMpegVideoSequenceHdr *seq_hdr;
+ GstMpegVideoSequenceExt *seq_ext;
+ GstMpegVideoSequenceDisplayExt *seq_display_ext;
+ GstVaapiPicture *picture;
+ GstVaapiDecoderStatus status;
- gst_vaapi_decoder_mpeg2_destroy(decoder);
+ if (!is_valid_state(decoder, GST_MPEG_VIDEO_STATE_VALID_PIC_HEADERS))
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
+ priv->state &= ~GST_MPEG_VIDEO_STATE_VALID_PIC_HEADERS;
+
+ seq_hdr = &priv->seq_hdr->data.seq_hdr;
+ seq_ext = priv->seq_ext ? &priv->seq_ext->data.seq_ext : NULL;
+ seq_display_ext = priv->seq_display_ext ?
+ &priv->seq_display_ext->data.seq_display_ext : NULL;
+ if (gst_mpeg_video_finalise_mpeg2_sequence_header(seq_hdr, seq_ext,
+ seq_display_ext))
+ gst_vaapi_decoder_set_pixel_aspect_ratio(base_decoder,
+ seq_hdr->par_w, seq_hdr->par_h);
- G_OBJECT_CLASS(gst_vaapi_decoder_mpeg2_parent_class)->finalize(object);
+ status = ensure_context(decoder);
+ if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
+ GST_ERROR("failed to reset context");
+ return status;
+ }
+
+ if (priv->current_picture) {
+ /* Re-use current picture where the first field was decoded */
+ picture = gst_vaapi_picture_new_field(priv->current_picture);
+ if (!picture) {
+ GST_ERROR("failed to allocate field picture");
+ return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+ }
+ }
+ else {
+ /* Create new picture */
+ picture = GST_VAAPI_PICTURE_NEW(MPEG2, decoder);
+ if (!picture) {
+ GST_ERROR("failed to allocate picture");
+ return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+ }
+ }
+ gst_vaapi_picture_replace(&priv->current_picture, picture);
+ gst_vaapi_picture_unref(picture);
+
+ /* Update cropping rectangle */
+ /* XXX: handle picture_display_extension() */
+ if (seq_display_ext && priv->pic_display_ext) {
+ GstVaapiRectangle * const crop_rect = &priv->crop_rect;
+ if (crop_rect->x + crop_rect->width <= priv->width &&
+ crop_rect->y + crop_rect->height <= priv->height)
+ gst_vaapi_picture_set_crop_rect(picture, crop_rect);
+ }
+
+ status = ensure_quant_matrix(decoder, picture);
+ if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
+ GST_ERROR("failed to reset quantizer matrix");
+ return status;
+ }
+
+ status = init_picture(decoder, picture);
+ if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
+ return status;
+
+ fill_picture(decoder, picture);
+
+ priv->state |= GST_MPEG_VIDEO_STATE_VALID_PIC_HEADERS;
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
-static void
-gst_vaapi_decoder_mpeg2_constructed(GObject *object)
+static GstVaapiDecoderStatus
+gst_vaapi_decoder_mpeg2_end_frame(GstVaapiDecoder *base_decoder)
{
- GstVaapiDecoderMpeg2 * const decoder = GST_VAAPI_DECODER_MPEG2(object);
- GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
- GObjectClass *parent_class;
+ GstVaapiDecoderMpeg2 * const decoder =
+ GST_VAAPI_DECODER_MPEG2_CAST(base_decoder);
+
+ return decode_current_picture(decoder);
+}
- parent_class = G_OBJECT_CLASS(gst_vaapi_decoder_mpeg2_parent_class);
- if (parent_class->constructed)
- parent_class->constructed(object);
+static GstVaapiDecoderStatus
+gst_vaapi_decoder_mpeg2_flush(GstVaapiDecoder *base_decoder)
+{
+ GstVaapiDecoderMpeg2 * const decoder =
+ GST_VAAPI_DECODER_MPEG2_CAST(base_decoder);
+ GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
- priv->is_constructed = gst_vaapi_decoder_mpeg2_create(decoder);
+ gst_vaapi_dpb_flush(priv->dpb);
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static void
gst_vaapi_decoder_mpeg2_class_init(GstVaapiDecoderMpeg2Class *klass)
{
- GObjectClass * const object_class = G_OBJECT_CLASS(klass);
+ GstVaapiMiniObjectClass * const object_class =
+ GST_VAAPI_MINI_OBJECT_CLASS(klass);
GstVaapiDecoderClass * const decoder_class = GST_VAAPI_DECODER_CLASS(klass);
- g_type_class_add_private(klass, sizeof(GstVaapiDecoderMpeg2Private));
-
- object_class->finalize = gst_vaapi_decoder_mpeg2_finalize;
- object_class->constructed = gst_vaapi_decoder_mpeg2_constructed;
+ object_class->size = sizeof(GstVaapiDecoderMpeg2);
+ object_class->finalize = (GDestroyNotify)gst_vaapi_decoder_finalize;
+ decoder_class->create = gst_vaapi_decoder_mpeg2_create;
+ decoder_class->destroy = gst_vaapi_decoder_mpeg2_destroy;
decoder_class->parse = gst_vaapi_decoder_mpeg2_parse;
decoder_class->decode = gst_vaapi_decoder_mpeg2_decode;
+ decoder_class->start_frame = gst_vaapi_decoder_mpeg2_start_frame;
+ decoder_class->end_frame = gst_vaapi_decoder_mpeg2_end_frame;
+ decoder_class->flush = gst_vaapi_decoder_mpeg2_flush;
}
-static void
-gst_vaapi_decoder_mpeg2_init(GstVaapiDecoderMpeg2 *decoder)
+static inline const GstVaapiDecoderClass *
+gst_vaapi_decoder_mpeg2_class(void)
{
- GstVaapiDecoderMpeg2Private *priv;
-
- priv = GST_VAAPI_DECODER_MPEG2_GET_PRIVATE(decoder);
- decoder->priv = priv;
- priv->width = 0;
- priv->height = 0;
- priv->fps_n = 0;
- priv->fps_d = 0;
- priv->hw_profile = GST_VAAPI_PROFILE_UNKNOWN;
- priv->profile = GST_VAAPI_PROFILE_MPEG2_SIMPLE;
- priv->current_picture = NULL;
- priv->is_constructed = FALSE;
- priv->is_opened = FALSE;
- priv->has_seq_ext = FALSE;
- priv->has_seq_scalable_ext = FALSE;
- priv->has_pic_ext = FALSE;
- priv->has_quant_matrix_ext = FALSE;
- priv->size_changed = FALSE;
- priv->profile_changed = TRUE; /* Allow fallbacks to work */
- priv->quant_matrix_changed = FALSE;
- priv->progressive_sequence = FALSE;
- priv->closed_gop = FALSE;
- priv->broken_link = FALSE;
+ static GstVaapiDecoderMpeg2Class g_class;
+ static gsize g_class_init = FALSE;
+
+ if (g_once_init_enter(&g_class_init)) {
+ gst_vaapi_decoder_mpeg2_class_init(&g_class);
+ g_once_init_leave(&g_class_init, TRUE);
+ }
+ return GST_VAAPI_DECODER_CLASS(&g_class);
}
/**
GstVaapiDecoder *
gst_vaapi_decoder_mpeg2_new(GstVaapiDisplay *display, GstCaps *caps)
{
- GstVaapiDecoderMpeg2 *decoder;
-
- g_return_val_if_fail(GST_VAAPI_IS_DISPLAY(display), NULL);
- g_return_val_if_fail(GST_IS_CAPS(caps), NULL);
-
- decoder = g_object_new(
- GST_VAAPI_TYPE_DECODER_MPEG2,
- "display", display,
- "caps", caps,
- NULL
- );
- if (!decoder->priv->is_constructed) {
- g_object_unref(decoder);
- return NULL;
- }
- return GST_VAAPI_DECODER_CAST(decoder);
+ return gst_vaapi_decoder_new(gst_vaapi_decoder_mpeg2_class(),
+ display, caps);
}