/*
* gstvaapidecoder_mpeg2.c - MPEG-2 decoder
*
- * Copyright (C) 2011 Intel Corporation
+ * Copyright (C) 2011-2013 Intel Corporation
+ * Author: Gwenole Beauchesne <gwenole.beauchesne@intel.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public License
#define DEBUG 1
#include "gstvaapidebug.h"
-G_DEFINE_TYPE(GstVaapiDecoderMpeg2,
- gst_vaapi_decoder_mpeg2,
- GST_VAAPI_TYPE_DECODER);
-
-#define GST_VAAPI_DECODER_MPEG2_GET_PRIVATE(obj) \
- (G_TYPE_INSTANCE_GET_PRIVATE((obj), \
- GST_VAAPI_TYPE_DECODER_MPEG2, \
- GstVaapiDecoderMpeg2Private))
-
-#define READ_UINT8(br, val, nbits) G_STMT_START { \
- if (!gst_bit_reader_get_bits_uint8 (br, &val, nbits)) { \
- GST_WARNING ("failed to read uint8, nbits: %d", nbits); \
- goto failed; \
- } \
-} G_STMT_END
-
-#define SKIP(reader, nbits) G_STMT_START { \
- if (!gst_bit_reader_skip (reader, nbits)) { \
- GST_WARNING ("failed to skip nbits: %d", nbits); \
- goto failed; \
- } \
-} G_STMT_END
+/* ------------------------------------------------------------------------- */
+/* --- PTS Generator --- */
+/* ------------------------------------------------------------------------- */
+
+typedef struct _PTSGenerator PTSGenerator;
+struct _PTSGenerator {
+ GstClockTime gop_pts; // Current GOP PTS
+ GstClockTime max_pts; // Max picture PTS
+ guint gop_tsn; // Absolute GOP TSN
+ guint max_tsn; // Max picture TSN, relative to last GOP TSN
+ guint ovl_tsn; // How many times TSN overflowed since GOP
+ guint lst_tsn; // Last picture TSN
+ guint fps_n;
+ guint fps_d;
+};
+
+static void
+pts_init(PTSGenerator *tsg)
+{
+ tsg->gop_pts = GST_CLOCK_TIME_NONE;
+ tsg->max_pts = GST_CLOCK_TIME_NONE;
+ tsg->gop_tsn = 0;
+ tsg->max_tsn = 0;
+ tsg->ovl_tsn = 0;
+ tsg->lst_tsn = 0;
+ tsg->fps_n = 0;
+ tsg->fps_d = 0;
+}
+
+static inline GstClockTime
+pts_get_duration(PTSGenerator *tsg, guint num_frames)
+{
+ return gst_util_uint64_scale(num_frames,
+ GST_SECOND * tsg->fps_d, tsg->fps_n);
+}
+
+static inline guint
+pts_get_poc(PTSGenerator *tsg)
+{
+ return tsg->gop_tsn + tsg->ovl_tsn * 1024 + tsg->lst_tsn;
+}
+
+static void
+pts_set_framerate(PTSGenerator *tsg, guint fps_n, guint fps_d)
+{
+ tsg->fps_n = fps_n;
+ tsg->fps_d = fps_d;
+}
+
+static void
+pts_sync(PTSGenerator *tsg, GstClockTime gop_pts)
+{
+ guint gop_tsn;
+
+ if (!GST_CLOCK_TIME_IS_VALID(gop_pts) ||
+ (GST_CLOCK_TIME_IS_VALID(tsg->max_pts) && tsg->max_pts >= gop_pts)) {
+ /* Invalid GOP PTS, interpolate from the last known picture PTS */
+ if (GST_CLOCK_TIME_IS_VALID(tsg->max_pts)) {
+ gop_pts = tsg->max_pts + pts_get_duration(tsg, 1);
+ gop_tsn = tsg->gop_tsn + tsg->ovl_tsn * 1024 + tsg->max_tsn + 1;
+ }
+ else {
+ gop_pts = 0;
+ gop_tsn = 0;
+ }
+ }
+ else {
+ /* Interpolate GOP TSN from this valid PTS */
+ if (GST_CLOCK_TIME_IS_VALID(tsg->gop_pts))
+ gop_tsn = tsg->gop_tsn + gst_util_uint64_scale(
+ gop_pts - tsg->gop_pts + pts_get_duration(tsg, 1) - 1,
+ tsg->fps_n, GST_SECOND * tsg->fps_d);
+ else
+ gop_tsn = 0;
+ }
+
+ tsg->gop_pts = gop_pts;
+ tsg->gop_tsn = gop_tsn;
+ tsg->max_tsn = 0;
+ tsg->ovl_tsn = 0;
+ tsg->lst_tsn = 0;
+}
+
+static GstClockTime
+pts_eval(PTSGenerator *tsg, GstClockTime pic_pts, guint pic_tsn)
+{
+ GstClockTime pts;
+
+ if (!GST_CLOCK_TIME_IS_VALID(tsg->gop_pts))
+ tsg->gop_pts = 0;
+
+ pts = pic_pts;
+ if (!GST_CLOCK_TIME_IS_VALID (pts))
+ pts = tsg->gop_pts + pts_get_duration(tsg, tsg->ovl_tsn * 1024 + pic_tsn);
+
+ if (!GST_CLOCK_TIME_IS_VALID(tsg->max_pts) || tsg->max_pts < pts)
+ tsg->max_pts = pts;
+
+ if (tsg->max_tsn < pic_tsn)
+ tsg->max_tsn = pic_tsn;
+ else if (tsg->max_tsn == 1023 && pic_tsn < tsg->lst_tsn) { /* TSN wrapped */
+ tsg->max_tsn = pic_tsn;
+ tsg->ovl_tsn++;
+ }
+ tsg->lst_tsn = pic_tsn;
+
+ return pts;
+}
+
+/* ------------------------------------------------------------------------- */
+/* --- MPEG-2 Parser Info --- */
+/* ------------------------------------------------------------------------- */
+
+typedef struct _GstVaapiParserInfoMpeg2 GstVaapiParserInfoMpeg2;
+struct _GstVaapiParserInfoMpeg2 {
+ GstVaapiMiniObject parent_instance;
+ GstMpegVideoPacket packet;
+ guint8 extension_type; /* for Extension packets */
+ union {
+ GstMpegVideoSequenceHdr seq_hdr;
+ GstMpegVideoSequenceExt seq_ext;
+ GstMpegVideoSequenceDisplayExt seq_display_ext;
+ GstMpegVideoSequenceScalableExt seq_scalable_ext;
+ GstMpegVideoGop gop;
+ GstMpegVideoQuantMatrixExt quant_matrix;
+ GstMpegVideoPictureHdr pic_hdr;
+ GstMpegVideoPictureExt pic_ext;
+ GstMpegVideoSliceHdr slice_hdr;
+ } data;
+};
+
+static inline const GstVaapiMiniObjectClass *
+gst_vaapi_parser_info_mpeg2_class(void)
+{
+ static const GstVaapiMiniObjectClass GstVaapiParserInfoMpeg2Class = {
+ sizeof(GstVaapiParserInfoMpeg2),
+ NULL
+ };
+ return &GstVaapiParserInfoMpeg2Class;
+}
+
+static inline GstVaapiParserInfoMpeg2 *
+gst_vaapi_parser_info_mpeg2_new(void)
+{
+ return (GstVaapiParserInfoMpeg2 *)
+ gst_vaapi_mini_object_new(gst_vaapi_parser_info_mpeg2_class());
+}
+
+static inline GstVaapiParserInfoMpeg2 *
+gst_vaapi_parser_info_mpeg2_ensure(GstVaapiParserInfoMpeg2 **pi_ptr)
+{
+ GstVaapiParserInfoMpeg2 *pi = *pi_ptr;
+
+ if (G_LIKELY(pi != NULL))
+ return pi;
+
+ *pi_ptr = pi = gst_vaapi_parser_info_mpeg2_new();
+ return pi;
+}
+
+#define gst_vaapi_parser_info_mpeg2_ref(pi) \
+ gst_vaapi_mini_object_ref(GST_VAAPI_MINI_OBJECT(pi))
+
+#define gst_vaapi_parser_info_mpeg2_unref(pi) \
+ gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(pi))
+
+#define gst_vaapi_parser_info_mpeg2_replace(old_pi_ptr, new_pi) \
+ gst_vaapi_mini_object_replace((GstVaapiMiniObject **)(old_pi_ptr), \
+ (GstVaapiMiniObject *)(new_pi))
+
+/* ------------------------------------------------------------------------- */
+/* --- MPEG-2 Decoder --- */
+/* ------------------------------------------------------------------------- */
+
+#define GST_VAAPI_DECODER_MPEG2_CAST(decoder) \
+ ((GstVaapiDecoderMpeg2 *)(decoder))
+
+typedef struct _GstVaapiDecoderMpeg2Private GstVaapiDecoderMpeg2Private;
+typedef struct _GstVaapiDecoderMpeg2Class GstVaapiDecoderMpeg2Class;
+
+typedef enum {
+ GST_MPEG_VIDEO_STATE_GOT_SEQ_HDR = 1 << 0,
+ GST_MPEG_VIDEO_STATE_GOT_SEQ_EXT = 1 << 1,
+ GST_MPEG_VIDEO_STATE_GOT_PIC_HDR = 1 << 2,
+ GST_MPEG_VIDEO_STATE_GOT_PIC_EXT = 1 << 3,
+ GST_MPEG_VIDEO_STATE_GOT_SLICE = 1 << 4,
+
+ GST_MPEG_VIDEO_STATE_VALID_SEQ_HEADERS = (
+ GST_MPEG_VIDEO_STATE_GOT_SEQ_HDR|
+ GST_MPEG_VIDEO_STATE_GOT_SEQ_EXT),
+ GST_MPEG_VIDEO_STATE_VALID_PIC_HEADERS = (
+ GST_MPEG_VIDEO_STATE_GOT_PIC_HDR|
+ GST_MPEG_VIDEO_STATE_GOT_PIC_EXT),
+ GST_MPEG_VIDEO_STATE_VALID_PICTURE = (
+ GST_MPEG_VIDEO_STATE_VALID_SEQ_HEADERS|
+ GST_MPEG_VIDEO_STATE_VALID_PIC_HEADERS|
+ GST_MPEG_VIDEO_STATE_GOT_SLICE)
+} GstMpegVideoState;
struct _GstVaapiDecoderMpeg2Private {
GstVaapiProfile profile;
+ GstVaapiProfile hw_profile;
guint width;
guint height;
guint fps_n;
guint fps_d;
- GstMpegVideoSequenceHdr seq_hdr;
- GstMpegVideoSequenceExt seq_ext;
- GstMpegVideoPictureHdr pic_hdr;
- GstMpegVideoPictureExt pic_ext;
- GstMpegVideoQuantMatrixExt quant_matrix_ext;
+ guint state;
+ GstVaapiRectangle crop_rect;
+ GstVaapiParserInfoMpeg2 *seq_hdr;
+ GstVaapiParserInfoMpeg2 *seq_ext;
+ GstVaapiParserInfoMpeg2 *seq_display_ext;
+ GstVaapiParserInfoMpeg2 *seq_scalable_ext;
+ GstVaapiParserInfoMpeg2 *gop;
+ GstVaapiParserInfoMpeg2 *pic_hdr;
+ GstVaapiParserInfoMpeg2 *pic_ext;
+ GstVaapiParserInfoMpeg2 *pic_display_ext;
+ GstVaapiParserInfoMpeg2 *quant_matrix;
+ GstVaapiParserInfoMpeg2 *slice_hdr;
GstVaapiPicture *current_picture;
GstVaapiDpb *dpb;
- GstAdapter *adapter;
- GstClockTime seq_pts;
- GstClockTime gop_pts;
- GstClockTime pts_diff;
- guint is_constructed : 1;
+ PTSGenerator tsg;
guint is_opened : 1;
- guint has_seq_ext : 1;
- guint has_seq_scalable_ext : 1;
- guint has_pic_ext : 1;
- guint has_quant_matrix_ext : 1;
guint size_changed : 1;
guint profile_changed : 1;
guint quant_matrix_changed : 1;
guint broken_link : 1;
};
-/* VLC decoder from gst-plugins-bad */
-typedef struct _VLCTable VLCTable;
-struct _VLCTable {
- gint value;
- guint cword;
- guint cbits;
-};
-
-static gboolean
-decode_vlc(GstBitReader *br, gint *res, const VLCTable *table, guint length)
-{
- guint8 i;
- guint cbits = 0;
- guint32 value = 0;
-
- for (i = 0; i < length; i++) {
- if (cbits != table[i].cbits) {
- cbits = table[i].cbits;
- if (!gst_bit_reader_peek_bits_uint32(br, &value, cbits)) {
- goto failed;
- }
- }
-
- if (value == table[i].cword) {
- SKIP(br, cbits);
- if (res)
- *res = table[i].value;
- return TRUE;
- }
- }
- GST_DEBUG("failed to find VLC code");
-
-failed:
- GST_WARNING("failed to decode VLC, returning");
- return FALSE;
-}
-
-enum {
- GST_MPEG_VIDEO_MACROBLOCK_ESCAPE = -1,
+/**
+ * GstVaapiDecoderMpeg2:
+ *
+ * A decoder based on Mpeg2.
+ */
+struct _GstVaapiDecoderMpeg2 {
+ /*< private >*/
+ GstVaapiDecoder parent_instance;
+ GstVaapiDecoderMpeg2Private priv;
};
-/* Table B-1: Variable length codes for macroblock_address_increment */
-static const VLCTable mpeg2_mbaddr_vlc_table[] = {
- { 1, 0x01, 1 },
- { 2, 0x03, 3 },
- { 3, 0x02, 3 },
- { 4, 0x03, 4 },
- { 5, 0x02, 4 },
- { 6, 0x03, 5 },
- { 7, 0x02, 5 },
- { 8, 0x07, 7 },
- { 9, 0x06, 7 },
- { 10, 0x0b, 8 },
- { 11, 0x0a, 8 },
- { 12, 0x09, 8 },
- { 13, 0x08, 8 },
- { 14, 0x07, 8 },
- { 15, 0x06, 8 },
- { 16, 0x17, 10 },
- { 17, 0x16, 10 },
- { 18, 0x15, 10 },
- { 19, 0x14, 10 },
- { 20, 0x13, 10 },
- { 21, 0x12, 10 },
- { 22, 0x23, 11 },
- { 23, 0x22, 11 },
- { 24, 0x21, 11 },
- { 25, 0x20, 11 },
- { 26, 0x1f, 11 },
- { 27, 0x1e, 11 },
- { 28, 0x1d, 11 },
- { 29, 0x1c, 11 },
- { 30, 0x1b, 11 },
- { 31, 0x1a, 11 },
- { 32, 0x19, 11 },
- { 33, 0x18, 11 },
- { GST_MPEG_VIDEO_MACROBLOCK_ESCAPE, 0x08, 11 }
+/**
+ * GstVaapiDecoderMpeg2Class:
+ *
+ * A decoder class based on Mpeg2.
+ */
+struct _GstVaapiDecoderMpeg2Class {
+ /*< private >*/
+ GstVaapiDecoderClass parent_class;
};
static void
gst_vaapi_decoder_mpeg2_close(GstVaapiDecoderMpeg2 *decoder)
{
- GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
+ GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
gst_vaapi_picture_replace(&priv->current_picture, NULL);
- if (priv->dpb) {
- gst_vaapi_dpb_unref(priv->dpb);
- priv->dpb = NULL;
- }
-
- if (priv->adapter) {
- gst_adapter_clear(priv->adapter);
- g_object_unref(priv->adapter);
- priv->adapter = NULL;
- }
+ gst_vaapi_parser_info_mpeg2_replace(&priv->seq_hdr, NULL);
+ gst_vaapi_parser_info_mpeg2_replace(&priv->seq_ext, NULL);
+ gst_vaapi_parser_info_mpeg2_replace(&priv->seq_display_ext, NULL);
+ gst_vaapi_parser_info_mpeg2_replace(&priv->seq_scalable_ext, NULL);
+ gst_vaapi_parser_info_mpeg2_replace(&priv->gop, NULL);
+ gst_vaapi_parser_info_mpeg2_replace(&priv->pic_hdr, NULL);
+ gst_vaapi_parser_info_mpeg2_replace(&priv->pic_ext, NULL);
+ gst_vaapi_parser_info_mpeg2_replace(&priv->pic_display_ext, NULL);
+ gst_vaapi_parser_info_mpeg2_replace(&priv->quant_matrix, NULL);
+ gst_vaapi_parser_info_mpeg2_replace(&priv->slice_hdr, NULL);
+
+ gst_vaapi_dpb_replace(&priv->dpb, NULL);
}
static gboolean
-gst_vaapi_decoder_mpeg2_open(GstVaapiDecoderMpeg2 *decoder, GstBuffer *buffer)
+gst_vaapi_decoder_mpeg2_open(GstVaapiDecoderMpeg2 *decoder)
{
- GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
+ GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
gst_vaapi_decoder_mpeg2_close(decoder);
- priv->adapter = gst_adapter_new();
- if (!priv->adapter)
- return FALSE;
-
- priv->dpb = gst_vaapi_dpb_mpeg2_new();
+ priv->dpb = gst_vaapi_dpb_new(2);
if (!priv->dpb)
return FALSE;
+
+ pts_init(&priv->tsg);
return TRUE;
}
static void
-gst_vaapi_decoder_mpeg2_destroy(GstVaapiDecoderMpeg2 *decoder)
+gst_vaapi_decoder_mpeg2_destroy(GstVaapiDecoder *base_decoder)
{
+ GstVaapiDecoderMpeg2 * const decoder =
+ GST_VAAPI_DECODER_MPEG2_CAST(base_decoder);
+
gst_vaapi_decoder_mpeg2_close(decoder);
}
static gboolean
-gst_vaapi_decoder_mpeg2_create(GstVaapiDecoderMpeg2 *decoder)
+gst_vaapi_decoder_mpeg2_create(GstVaapiDecoder *base_decoder)
{
- if (!GST_VAAPI_DECODER_CODEC(decoder))
- return FALSE;
+ GstVaapiDecoderMpeg2 * const decoder =
+ GST_VAAPI_DECODER_MPEG2_CAST(base_decoder);
+ GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
+
+ priv->hw_profile = GST_VAAPI_PROFILE_UNKNOWN;
+ priv->profile = GST_VAAPI_PROFILE_MPEG2_SIMPLE;
+ priv->profile_changed = TRUE; /* Allow fallbacks to work */
return TRUE;
}
memcpy(dst, src, 64);
}
+static const char *
+get_profile_str(GstVaapiProfile profile)
+{
+ char *str;
+
+ switch (profile) {
+ case GST_VAAPI_PROFILE_MPEG2_SIMPLE: str = "simple"; break;
+ case GST_VAAPI_PROFILE_MPEG2_MAIN: str = "main"; break;
+ case GST_VAAPI_PROFILE_MPEG2_HIGH: str = "high"; break;
+ default: str = "<unknown>"; break;
+ }
+ return str;
+}
+
+static GstVaapiProfile
+get_profile(GstVaapiDecoderMpeg2 *decoder, GstVaapiEntrypoint entrypoint)
+{
+ GstVaapiDisplay * const va_display = GST_VAAPI_DECODER_DISPLAY(decoder);
+ GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
+ GstVaapiProfile profile = priv->profile;
+
+ do {
+ /* Return immediately if the exact same profile was found */
+ if (gst_vaapi_display_has_decoder(va_display, profile, entrypoint))
+ break;
+
+ /* Otherwise, try to map to a higher profile */
+ switch (profile) {
+ case GST_VAAPI_PROFILE_MPEG2_SIMPLE:
+ profile = GST_VAAPI_PROFILE_MPEG2_MAIN;
+ break;
+ case GST_VAAPI_PROFILE_MPEG2_MAIN:
+ profile = GST_VAAPI_PROFILE_MPEG2_HIGH;
+ break;
+ case GST_VAAPI_PROFILE_MPEG2_HIGH:
+ // Try to map to main profile if no high profile specific bits used
+ if (priv->profile == profile &&
+ !priv->seq_scalable_ext &&
+ (priv->seq_ext &&
+ priv->seq_ext->data.seq_ext.chroma_format == 1)) {
+ profile = GST_VAAPI_PROFILE_MPEG2_MAIN;
+ break;
+ }
+ // fall-through
+ default:
+ profile = GST_VAAPI_PROFILE_UNKNOWN;
+ break;
+ }
+ } while (profile != GST_VAAPI_PROFILE_UNKNOWN);
+
+ if (profile != priv->profile)
+ GST_INFO("forced %s profile to %s profile",
+ get_profile_str(priv->profile), get_profile_str(profile));
+ return profile;
+}
+
static GstVaapiDecoderStatus
ensure_context(GstVaapiDecoderMpeg2 *decoder)
{
- GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
- GstVaapiProfile profiles[2];
+ GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
GstVaapiEntrypoint entrypoint = GST_VAAPI_ENTRYPOINT_VLD;
- guint i, n_profiles = 0;
gboolean reset_context = FALSE;
if (priv->profile_changed) {
priv->profile_changed = FALSE;
reset_context = TRUE;
- profiles[n_profiles++] = priv->profile;
- if (priv->profile == GST_VAAPI_PROFILE_MPEG2_SIMPLE)
- profiles[n_profiles++] = GST_VAAPI_PROFILE_MPEG2_MAIN;
-
- for (i = 0; i < n_profiles; i++) {
- if (gst_vaapi_display_has_decoder(GST_VAAPI_DECODER_DISPLAY(decoder),
- profiles[i], entrypoint))
- break;
- }
- if (i == n_profiles)
+ priv->hw_profile = get_profile(decoder, entrypoint);
+ if (priv->hw_profile == GST_VAAPI_PROFILE_UNKNOWN)
return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
- priv->profile = profiles[i];
}
if (priv->size_changed) {
}
if (reset_context) {
- reset_context = gst_vaapi_decoder_ensure_context(
- GST_VAAPI_DECODER(decoder),
- priv->profile,
- entrypoint,
- priv->width, priv->height
+ GstVaapiContextInfo info;
+
+ info.profile = priv->hw_profile;
+ info.entrypoint = entrypoint;
+ info.chroma_type = GST_VAAPI_CHROMA_TYPE_YUV420;
+ info.width = priv->width;
+ info.height = priv->height;
+ info.ref_frames = 2;
+ reset_context = gst_vaapi_decoder_ensure_context(
+ GST_VAAPI_DECODER_CAST(decoder),
+ &info
);
if (!reset_context)
return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
static GstVaapiDecoderStatus
ensure_quant_matrix(GstVaapiDecoderMpeg2 *decoder, GstVaapiPicture *picture)
{
- GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
+ GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
+ GstMpegVideoSequenceHdr * const seq_hdr = &priv->seq_hdr->data.seq_hdr;
VAIQMatrixBufferMPEG2 *iq_matrix;
guint8 *intra_quant_matrix = NULL;
guint8 *non_intra_quant_matrix = NULL;
}
iq_matrix = picture->iq_matrix->param;
- intra_quant_matrix = priv->seq_hdr.intra_quantizer_matrix;
- non_intra_quant_matrix = priv->seq_hdr.non_intra_quantizer_matrix;
- if (priv->has_quant_matrix_ext) {
- if (priv->quant_matrix_ext.load_intra_quantiser_matrix)
- intra_quant_matrix = priv->quant_matrix_ext.intra_quantiser_matrix;
- if (priv->quant_matrix_ext.load_non_intra_quantiser_matrix)
- non_intra_quant_matrix = priv->quant_matrix_ext.non_intra_quantiser_matrix;
- if (priv->quant_matrix_ext.load_chroma_intra_quantiser_matrix)
- chroma_intra_quant_matrix = priv->quant_matrix_ext.chroma_intra_quantiser_matrix;
- if (priv->quant_matrix_ext.load_chroma_non_intra_quantiser_matrix)
- chroma_non_intra_quant_matrix = priv->quant_matrix_ext.chroma_non_intra_quantiser_matrix;
+ intra_quant_matrix = seq_hdr->intra_quantizer_matrix;
+ non_intra_quant_matrix = seq_hdr->non_intra_quantizer_matrix;
+
+ if (priv->quant_matrix) {
+ GstMpegVideoQuantMatrixExt * const quant_matrix =
+ &priv->quant_matrix->data.quant_matrix;
+ if (quant_matrix->load_intra_quantiser_matrix)
+ intra_quant_matrix = quant_matrix->intra_quantiser_matrix;
+ if (quant_matrix->load_non_intra_quantiser_matrix)
+ non_intra_quant_matrix = quant_matrix->non_intra_quantiser_matrix;
+ if (quant_matrix->load_chroma_intra_quantiser_matrix)
+ chroma_intra_quant_matrix = quant_matrix->chroma_intra_quantiser_matrix;
+ if (quant_matrix->load_chroma_non_intra_quantiser_matrix)
+ chroma_non_intra_quant_matrix = quant_matrix->chroma_non_intra_quantiser_matrix;
}
iq_matrix->load_intra_quantiser_matrix = intra_quant_matrix != NULL;
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
-static gboolean
+static inline gboolean
+is_valid_state(GstVaapiDecoderMpeg2 *decoder, guint state)
+{
+ GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
+
+ return (priv->state & state) == state;
+}
+
+static GstVaapiDecoderStatus
decode_current_picture(GstVaapiDecoderMpeg2 *decoder)
{
- GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
+ GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
GstVaapiPicture * const picture = priv->current_picture;
- if (picture) {
- if (!gst_vaapi_picture_decode(picture))
- return FALSE;
- if (GST_VAAPI_PICTURE_IS_COMPLETE(picture)) {
- if (!gst_vaapi_dpb_add(priv->dpb, picture))
- return FALSE;
- gst_vaapi_picture_replace(&priv->current_picture, NULL);
- }
+ if (!is_valid_state(decoder, GST_MPEG_VIDEO_STATE_VALID_PICTURE))
+ goto drop_frame;
+ priv->state &= GST_MPEG_VIDEO_STATE_VALID_SEQ_HEADERS;
+
+ if (!picture)
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
+
+ if (!gst_vaapi_picture_decode(picture))
+ goto error;
+ if (GST_VAAPI_PICTURE_IS_COMPLETE(picture)) {
+ if (!gst_vaapi_dpb_add(priv->dpb, picture))
+ goto error;
+ gst_vaapi_picture_replace(&priv->current_picture, NULL);
}
- return TRUE;
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
+
+error:
+ /* XXX: fix for cases where first field failed to be decoded */
+ gst_vaapi_picture_replace(&priv->current_picture, NULL);
+ return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
+
+drop_frame:
+ priv->state &= GST_MPEG_VIDEO_STATE_VALID_SEQ_HEADERS;
+ return GST_VAAPI_DECODER_STATUS_DROP_FRAME;
}
static GstVaapiDecoderStatus
-decode_sequence(GstVaapiDecoderMpeg2 *decoder, guchar *buf, guint buf_size)
+parse_sequence(GstVaapiDecoderMpeg2 *decoder,
+ GstVaapiDecoderUnit *unit, const GstMpegVideoPacket *packet)
{
- GstVaapiDecoder * const base_decoder = GST_VAAPI_DECODER(decoder);
- GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
- GstMpegVideoSequenceHdr * const seq_hdr = &priv->seq_hdr;
+ GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
+ GstMpegVideoSequenceHdr *seq_hdr;
+
+ priv->state = 0;
+
+ if (!gst_vaapi_parser_info_mpeg2_ensure(&priv->seq_hdr)) {
+ GST_ERROR("failed to allocate parser info for sequence header");
+ return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+ }
- if (!gst_mpeg_video_parse_sequence_header(seq_hdr, buf, buf_size, 0)) {
+ seq_hdr = &priv->seq_hdr->data.seq_hdr;
+
+ if (!gst_mpeg_video_packet_parse_sequence_header(packet, seq_hdr)) {
GST_ERROR("failed to parse sequence header");
return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
}
+ gst_vaapi_decoder_unit_set_parsed_info(unit, seq_hdr, NULL);
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
+}
+
+static GstVaapiDecoderStatus
+decode_sequence(GstVaapiDecoderMpeg2 *decoder, GstVaapiDecoderUnit *unit)
+{
+ GstVaapiDecoder * const base_decoder = GST_VAAPI_DECODER_CAST(decoder);
+ GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
+ GstMpegVideoSequenceHdr * const seq_hdr = unit->parsed_info;
+
+ gst_vaapi_parser_info_mpeg2_replace(&priv->seq_ext, NULL);
+ gst_vaapi_parser_info_mpeg2_replace(&priv->seq_display_ext, NULL);
+ gst_vaapi_parser_info_mpeg2_replace(&priv->seq_scalable_ext, NULL);
+ gst_vaapi_parser_info_mpeg2_replace(&priv->quant_matrix, NULL);
+ gst_vaapi_parser_info_mpeg2_replace(&priv->pic_display_ext, NULL);
+
priv->fps_n = seq_hdr->fps_n;
priv->fps_d = seq_hdr->fps_d;
+ pts_set_framerate(&priv->tsg, priv->fps_n, priv->fps_d);
gst_vaapi_decoder_set_framerate(base_decoder, priv->fps_n, priv->fps_d);
- priv->seq_pts = gst_adapter_prev_timestamp(priv->adapter, NULL);
-
priv->width = seq_hdr->width;
priv->height = seq_hdr->height;
- priv->has_seq_ext = FALSE;
priv->size_changed = TRUE;
priv->quant_matrix_changed = TRUE;
priv->progressive_sequence = TRUE;
+
+ priv->state |= GST_MPEG_VIDEO_STATE_GOT_SEQ_HDR;
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static GstVaapiDecoderStatus
-decode_sequence_ext(GstVaapiDecoderMpeg2 *decoder, guchar *buf, guint buf_size)
+parse_sequence_ext(GstVaapiDecoderMpeg2 *decoder,
+ GstVaapiDecoderUnit *unit, const GstMpegVideoPacket *packet)
{
- GstVaapiDecoder * const base_decoder = GST_VAAPI_DECODER(decoder);
- GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
- GstMpegVideoSequenceExt * const seq_ext = &priv->seq_ext;
- GstVaapiProfile profile;
- guint width, height;
+ GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
+ GstMpegVideoSequenceExt *seq_ext;
+
+ priv->state &= GST_MPEG_VIDEO_STATE_GOT_SEQ_HDR;
+
+ if (!gst_vaapi_parser_info_mpeg2_ensure(&priv->seq_ext)) {
+ GST_ERROR("failed to allocate parser info for sequence extension");
+ return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+ }
- if (!gst_mpeg_video_parse_sequence_extension(seq_ext, buf, buf_size, 0)) {
+ seq_ext = &priv->seq_ext->data.seq_ext;
+
+ if (!gst_mpeg_video_packet_parse_sequence_extension(packet, seq_ext)) {
GST_ERROR("failed to parse sequence-extension");
return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
}
- priv->has_seq_ext = TRUE;
+
+ gst_vaapi_decoder_unit_set_parsed_info(unit, seq_ext, NULL);
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
+}
+
+static GstVaapiDecoderStatus
+decode_sequence_ext(GstVaapiDecoderMpeg2 *decoder, GstVaapiDecoderUnit *unit)
+{
+ GstVaapiDecoder * const base_decoder = GST_VAAPI_DECODER_CAST(decoder);
+ GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
+ GstMpegVideoSequenceExt * const seq_ext = unit->parsed_info;
+ GstVaapiProfile profile;
+ guint width, height;
+
+ if (!is_valid_state(decoder, GST_MPEG_VIDEO_STATE_GOT_SEQ_HDR))
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
+
priv->progressive_sequence = seq_ext->progressive;
gst_vaapi_decoder_set_interlaced(base_decoder, !priv->progressive_sequence);
if (seq_ext->fps_n_ext && seq_ext->fps_d_ext) {
priv->fps_n *= seq_ext->fps_n_ext + 1;
priv->fps_d *= seq_ext->fps_d_ext + 1;
+ pts_set_framerate(&priv->tsg, priv->fps_n, priv->fps_d);
gst_vaapi_decoder_set_framerate(base_decoder, priv->fps_n, priv->fps_d);
}
case GST_MPEG_VIDEO_PROFILE_MAIN:
profile = GST_VAAPI_PROFILE_MPEG2_MAIN;
break;
+ case GST_MPEG_VIDEO_PROFILE_HIGH:
+ profile = GST_VAAPI_PROFILE_MPEG2_HIGH;
+ break;
default:
GST_ERROR("unsupported profile %d", seq_ext->profile);
return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
priv->profile = profile;
priv->profile_changed = TRUE;
}
+
+ priv->state |= GST_MPEG_VIDEO_STATE_GOT_SEQ_EXT;
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static GstVaapiDecoderStatus
-decode_sequence_end(GstVaapiDecoderMpeg2 *decoder)
+parse_sequence_display_ext(GstVaapiDecoderMpeg2 *decoder,
+ GstVaapiDecoderUnit *unit, const GstMpegVideoPacket *packet)
{
- GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
+ GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
+ GstMpegVideoSequenceDisplayExt *seq_display_ext;
- if (priv->current_picture && !decode_current_picture(decoder))
- return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
+ if (!gst_vaapi_parser_info_mpeg2_ensure(&priv->seq_display_ext)) {
+ GST_ERROR("failed to allocate parser info for sequence display extension");
+ return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+ }
- gst_vaapi_dpb_flush(priv->dpb);
- return GST_VAAPI_DECODER_STATUS_END_OF_STREAM;
+ seq_display_ext = &priv->seq_display_ext->data.seq_display_ext;
+
+ if (!gst_mpeg_video_packet_parse_sequence_display_extension(packet,
+ seq_display_ext)) {
+ GST_ERROR("failed to parse sequence-display-extension");
+ return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
+ }
+
+ gst_vaapi_decoder_unit_set_parsed_info(unit, seq_display_ext, NULL);
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static GstVaapiDecoderStatus
-decode_quant_matrix_ext(GstVaapiDecoderMpeg2 *decoder, guchar *buf, guint buf_size)
+decode_sequence_display_ext(GstVaapiDecoderMpeg2 *decoder,
+ GstVaapiDecoderUnit *unit)
{
- GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
- GstMpegVideoQuantMatrixExt * const quant_matrix_ext = &priv->quant_matrix_ext;
-
- if (!gst_mpeg_video_parse_quant_matrix_extension(quant_matrix_ext, buf, buf_size, 0)) {
- GST_ERROR("failed to parse quant-matrix-extension");
- return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
+ GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
+ GstMpegVideoSequenceDisplayExt *seq_display_ext;
+
+ seq_display_ext = priv->seq_display_ext ?
+ &priv->seq_display_ext->data.seq_display_ext : NULL;
+
+ /* Update cropping rectangle */
+ if (seq_display_ext) {
+ GstVaapiRectangle * const crop_rect = &priv->crop_rect;
+ crop_rect->x = 0;
+ crop_rect->y = 0;
+ crop_rect->width = seq_display_ext->display_horizontal_size;
+ crop_rect->height = seq_display_ext->display_vertical_size;
}
- priv->has_quant_matrix_ext = TRUE;
- priv->quant_matrix_changed = TRUE;
+
+ /* XXX: handle color primaries */
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static GstVaapiDecoderStatus
-decode_gop(GstVaapiDecoderMpeg2 *decoder, guchar *buf, guint buf_size)
+parse_sequence_scalable_ext(GstVaapiDecoderMpeg2 *decoder,
+ GstVaapiDecoderUnit *unit, const GstMpegVideoPacket *packet)
{
- GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
- GstMpegVideoGop gop;
- GstClockTime pts;
+ GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
+ GstMpegVideoSequenceScalableExt *seq_scalable_ext;
- if (!gst_mpeg_video_parse_gop(&gop, buf, buf_size, 0)) {
- GST_ERROR("failed to parse GOP");
- return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
+ if (!gst_vaapi_parser_info_mpeg2_ensure(&priv->seq_scalable_ext)) {
+ GST_ERROR("failed to allocate parser info for sequence scalable extension");
+ return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
}
- priv->closed_gop = gop.closed_gop;
- priv->broken_link = gop.broken_link;
+ seq_scalable_ext = &priv->seq_scalable_ext->data.seq_scalable_ext;
- GST_DEBUG("GOP %02u:%02u:%02u:%02u (closed_gop %d, broken_link %d)",
- gop.hour, gop.minute, gop.second, gop.frame,
- priv->closed_gop, priv->broken_link);
+ if (!gst_mpeg_video_packet_parse_sequence_scalable_extension(packet,
+ seq_scalable_ext)) {
+ GST_ERROR("failed to parse sequence-scalable-extension");
+ return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
+ }
- pts = GST_SECOND * (gop.hour * 3600 + gop.minute * 60 + gop.second);
- pts += gst_util_uint64_scale(gop.frame, GST_SECOND * priv->fps_d, priv->fps_n);
- if (priv->gop_pts != GST_CLOCK_TIME_NONE && pts <= priv->gop_pts) {
- /* Try to fix GOP timestamps, based on demux timestamps */
- pts = gst_adapter_prev_timestamp(priv->adapter, NULL);
- if (pts != GST_CLOCK_TIME_NONE)
- pts -= priv->pts_diff;
- }
- priv->gop_pts = pts;
- if (!priv->pts_diff)
- priv->pts_diff = priv->seq_pts - priv->gop_pts;
+ gst_vaapi_decoder_unit_set_parsed_info(unit, seq_scalable_ext, NULL);
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static GstVaapiDecoderStatus
-decode_picture(GstVaapiDecoderMpeg2 *decoder, guchar *buf, guint buf_size)
+decode_sequence_scalable_ext(GstVaapiDecoderMpeg2 *decoder,
+ GstVaapiDecoderUnit *unit)
{
- GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
- GstMpegVideoPictureHdr * const pic_hdr = &priv->pic_hdr;
- GstVaapiPicture *picture;
- GstVaapiDecoderStatus status;
- GstClockTime pts;
+ /* XXX: unsupported header -- ignore */
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
+}
- status = ensure_context(decoder);
- if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
- GST_ERROR("failed to reset context");
- return status;
- }
+static GstVaapiDecoderStatus
+decode_sequence_end(GstVaapiDecoderMpeg2 *decoder)
+{
+ GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
- if (priv->current_picture && !decode_current_picture(decoder))
- return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
+ gst_vaapi_dpb_flush(priv->dpb);
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
+}
- if (priv->current_picture) {
- /* Re-use current picture where the first field was decoded */
- picture = gst_vaapi_picture_new_field(priv->current_picture);
- if (!picture) {
- GST_ERROR("failed to allocate field picture");
- return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
- }
- }
- else {
- /* Create new picture */
- picture = GST_VAAPI_PICTURE_NEW(MPEG2, decoder);
- if (!picture) {
- GST_ERROR("failed to allocate picture");
- return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
- }
- }
- gst_vaapi_picture_replace(&priv->current_picture, picture);
- gst_vaapi_picture_unref(picture);
+static GstVaapiDecoderStatus
+parse_quant_matrix_ext(GstVaapiDecoderMpeg2 *decoder,
+ GstVaapiDecoderUnit *unit, const GstMpegVideoPacket *packet)
+{
+ GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
+ GstMpegVideoQuantMatrixExt *quant_matrix;
- status = ensure_quant_matrix(decoder, picture);
- if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
- GST_ERROR("failed to reset quantizer matrix");
- return status;
+ if (!gst_vaapi_parser_info_mpeg2_ensure(&priv->quant_matrix)) {
+ GST_ERROR("failed to allocate parser info for quantization matrix");
+ return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
}
- if (!gst_mpeg_video_parse_picture_header(pic_hdr, buf, buf_size, 0)) {
- GST_ERROR("failed to parse picture header");
+ quant_matrix = &priv->quant_matrix->data.quant_matrix;
+
+ if (!gst_mpeg_video_packet_parse_quant_matrix_extension(packet,
+ quant_matrix)) {
+ GST_ERROR("failed to parse quant-matrix-extension");
return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
}
- priv->has_pic_ext = FALSE;
- switch (pic_hdr->pic_type) {
- case GST_MPEG_VIDEO_PICTURE_TYPE_I:
- GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_REFERENCE);
- picture->type = GST_VAAPI_PICTURE_TYPE_I;
- break;
- case GST_MPEG_VIDEO_PICTURE_TYPE_P:
- GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_REFERENCE);
- picture->type = GST_VAAPI_PICTURE_TYPE_P;
- break;
- case GST_MPEG_VIDEO_PICTURE_TYPE_B:
- picture->type = GST_VAAPI_PICTURE_TYPE_B;
- break;
- default:
- GST_ERROR("unsupported picture type %d", pic_hdr->pic_type);
- return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
- }
+ gst_vaapi_decoder_unit_set_parsed_info(unit, quant_matrix, NULL);
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
+}
- /* Update presentation time */
- pts = priv->gop_pts;
- pts += gst_util_uint64_scale(pic_hdr->tsn, GST_SECOND * priv->fps_d, priv->fps_n);
- picture->pts = pts + priv->pts_diff;
- return status;
+static GstVaapiDecoderStatus
+decode_quant_matrix_ext(GstVaapiDecoderMpeg2 *decoder,
+ GstVaapiDecoderUnit *unit)
+{
+ GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
+
+ priv->quant_matrix_changed = TRUE;
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static GstVaapiDecoderStatus
-decode_picture_ext(GstVaapiDecoderMpeg2 *decoder, guchar *buf, guint buf_size)
+parse_gop(GstVaapiDecoderMpeg2 *decoder,
+ GstVaapiDecoderUnit *unit, const GstMpegVideoPacket *packet)
{
- GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
- GstMpegVideoPictureExt * const pic_ext = &priv->pic_ext;
- GstVaapiPicture * const picture = priv->current_picture;
+ GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
+ GstMpegVideoGop *gop;
- if (!gst_mpeg_video_parse_picture_extension(pic_ext, buf, buf_size, 0)) {
- GST_ERROR("failed to parse picture-extension");
- return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
+ if (!gst_vaapi_parser_info_mpeg2_ensure(&priv->gop)) {
+ GST_ERROR("failed to allocate parser info for GOP");
+ return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
}
- priv->has_pic_ext = TRUE;
- if (priv->progressive_sequence && !pic_ext->progressive_frame) {
- GST_WARNING("invalid interlaced frame in progressive sequence, fixing");
- pic_ext->progressive_frame = 1;
+ gop = &priv->gop->data.gop;
+
+ if (!gst_mpeg_video_packet_parse_gop(packet, gop)) {
+ GST_ERROR("failed to parse GOP");
+ return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
}
- if (pic_ext->picture_structure == 0 ||
- (pic_ext->progressive_frame &&
- pic_ext->picture_structure != GST_MPEG_VIDEO_PICTURE_STRUCTURE_FRAME)) {
+ gst_vaapi_decoder_unit_set_parsed_info(unit, gop, NULL);
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
+}
+
+static GstVaapiDecoderStatus
+decode_gop(GstVaapiDecoderMpeg2 *decoder, GstVaapiDecoderUnit *unit)
+{
+ GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
+ GstMpegVideoGop * const gop = unit->parsed_info;
+
+ priv->closed_gop = gop->closed_gop;
+ priv->broken_link = gop->broken_link;
+
+ GST_DEBUG("GOP %02u:%02u:%02u:%02u (closed_gop %d, broken_link %d)",
+ gop->hour, gop->minute, gop->second, gop->frame,
+ priv->closed_gop, priv->broken_link);
+
+ pts_sync(&priv->tsg, GST_VAAPI_DECODER_CODEC_FRAME(decoder)->pts);
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
+}
+
+static GstVaapiDecoderStatus
+parse_picture(GstVaapiDecoderMpeg2 *decoder,
+ GstVaapiDecoderUnit *unit, const GstMpegVideoPacket *packet)
+{
+ GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
+ GstMpegVideoPictureHdr *pic_hdr;
+
+ priv->state &= (GST_MPEG_VIDEO_STATE_GOT_SEQ_HDR|
+ GST_MPEG_VIDEO_STATE_GOT_SEQ_EXT);
+
+ if (!gst_vaapi_parser_info_mpeg2_ensure(&priv->pic_hdr)) {
+ GST_ERROR("failed to allocate parser info for picture header");
+ return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+ }
+
+ pic_hdr = &priv->pic_hdr->data.pic_hdr;
+
+ if (!gst_mpeg_video_packet_parse_picture_header(packet, pic_hdr)) {
+ GST_ERROR("failed to parse picture header");
+ return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
+ }
+
+ gst_vaapi_decoder_unit_set_parsed_info(unit, pic_hdr, NULL);
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
+}
+
+static GstVaapiDecoderStatus
+decode_picture(GstVaapiDecoderMpeg2 *decoder, GstVaapiDecoderUnit *unit)
+{
+ GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
+
+ if (!is_valid_state(decoder, GST_MPEG_VIDEO_STATE_VALID_SEQ_HEADERS))
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
+
+ gst_vaapi_parser_info_mpeg2_replace(&priv->pic_ext, NULL);
+
+ priv->state |= GST_MPEG_VIDEO_STATE_GOT_PIC_HDR;
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
+}
+
+static GstVaapiDecoderStatus
+parse_picture_ext(GstVaapiDecoderMpeg2 *decoder,
+ GstVaapiDecoderUnit *unit, const GstMpegVideoPacket *packet)
+{
+ GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
+ GstMpegVideoPictureExt *pic_ext;
+
+ priv->state &= (GST_MPEG_VIDEO_STATE_GOT_SEQ_HDR|
+ GST_MPEG_VIDEO_STATE_GOT_SEQ_EXT|
+ GST_MPEG_VIDEO_STATE_GOT_PIC_HDR);
+
+ if (!gst_vaapi_parser_info_mpeg2_ensure(&priv->pic_ext)) {
+ GST_ERROR("failed to allocate parser info for picture extension");
+ return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+ }
+
+ pic_ext = &priv->pic_ext->data.pic_ext;
+
+ if (!gst_mpeg_video_packet_parse_picture_extension(packet, pic_ext)) {
+ GST_ERROR("failed to parse picture-extension");
+ return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
+ }
+
+ gst_vaapi_decoder_unit_set_parsed_info(unit, pic_ext, NULL);
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
+}
+
+static GstVaapiDecoderStatus
+decode_picture_ext(GstVaapiDecoderMpeg2 *decoder, GstVaapiDecoderUnit *unit)
+{
+ GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
+ GstMpegVideoPictureExt * const pic_ext = unit->parsed_info;
+
+ if (!is_valid_state(decoder, GST_MPEG_VIDEO_STATE_GOT_PIC_HDR))
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
+
+ if (priv->progressive_sequence && !pic_ext->progressive_frame) {
+ GST_WARNING("invalid interlaced frame in progressive sequence, fixing");
+ pic_ext->progressive_frame = 1;
+ }
+
+ if (pic_ext->picture_structure == 0 ||
+ (pic_ext->progressive_frame &&
+ pic_ext->picture_structure != GST_MPEG_VIDEO_PICTURE_STRUCTURE_FRAME)) {
GST_WARNING("invalid picture_structure %d, replacing with \"frame\"",
pic_ext->picture_structure);
pic_ext->picture_structure = GST_MPEG_VIDEO_PICTURE_STRUCTURE_FRAME;
}
+ priv->state |= GST_MPEG_VIDEO_STATE_GOT_PIC_EXT;
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
+}
+
+static inline guint32
+pack_f_code(guint8 f_code[2][2])
+{
+ return (((guint32)f_code[0][0] << 12) |
+ ((guint32)f_code[0][1] << 8) |
+ ((guint32)f_code[1][0] << 4) |
+ ( f_code[1][1] ));
+}
+
+static GstVaapiDecoderStatus
+init_picture(GstVaapiDecoderMpeg2 *decoder, GstVaapiPicture *picture)
+{
+ GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
+ GstMpegVideoPictureHdr * const pic_hdr = &priv->pic_hdr->data.pic_hdr;
+ GstMpegVideoPictureExt * const pic_ext = &priv->pic_ext->data.pic_ext;
+
+ switch (pic_hdr->pic_type) {
+ case GST_MPEG_VIDEO_PICTURE_TYPE_I:
+ GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_REFERENCE);
+ picture->type = GST_VAAPI_PICTURE_TYPE_I;
+ break;
+ case GST_MPEG_VIDEO_PICTURE_TYPE_P:
+ GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_REFERENCE);
+ picture->type = GST_VAAPI_PICTURE_TYPE_P;
+ break;
+ case GST_MPEG_VIDEO_PICTURE_TYPE_B:
+ picture->type = GST_VAAPI_PICTURE_TYPE_B;
+ break;
+ default:
+ GST_ERROR("unsupported picture type %d", pic_hdr->pic_type);
+ return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
+ }
+
if (!priv->progressive_sequence && !pic_ext->progressive_frame) {
GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_INTERLACED);
if (pic_ext->top_field_first)
picture->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
break;
}
- return GST_VAAPI_DECODER_STATUS_SUCCESS;
-}
-static inline guint32
-pack_f_code(guint8 f_code[2][2])
-{
- return (((guint32)f_code[0][0] << 12) |
- ((guint32)f_code[0][1] << 8) |
- ((guint32)f_code[1][0] << 4) |
- ( f_code[1][1] ));
+ /* Allocate dummy picture for first field based I-frame */
+ if (picture->type == GST_VAAPI_PICTURE_TYPE_I &&
+ !GST_VAAPI_PICTURE_IS_FRAME(picture) &&
+ gst_vaapi_dpb_size(priv->dpb) == 0) {
+ GstVaapiPicture *dummy_picture;
+ gboolean success;
+
+ dummy_picture = GST_VAAPI_PICTURE_NEW(MPEG2, decoder);
+ if (!dummy_picture) {
+ GST_ERROR("failed to allocate dummy picture");
+ return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+ }
+
+ dummy_picture->type = GST_VAAPI_PICTURE_TYPE_I;
+ dummy_picture->pts = GST_CLOCK_TIME_NONE;
+ dummy_picture->poc = -1;
+ dummy_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
+
+ GST_VAAPI_PICTURE_FLAG_SET(
+ dummy_picture,
+ (GST_VAAPI_PICTURE_FLAG_SKIPPED |
+ GST_VAAPI_PICTURE_FLAG_OUTPUT |
+ GST_VAAPI_PICTURE_FLAG_REFERENCE)
+ );
+
+ success = gst_vaapi_dpb_add(priv->dpb, dummy_picture);
+ gst_vaapi_picture_unref(dummy_picture);
+ if (!success) {
+ GST_ERROR("failed to add dummy picture into DPB");
+ return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
+ }
+ GST_INFO("allocated dummy picture for first field based I-frame");
+ }
+
+ /* Update presentation time */
+ picture->pts = pts_eval(&priv->tsg,
+ GST_VAAPI_DECODER_CODEC_FRAME(decoder)->pts, pic_hdr->tsn);
+ picture->poc = pts_get_poc(&priv->tsg);
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
-static gboolean
+static void
fill_picture(GstVaapiDecoderMpeg2 *decoder, GstVaapiPicture *picture)
{
- GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
+ GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
VAPictureParameterBufferMPEG2 * const pic_param = picture->param;
- GstMpegVideoPictureHdr * const pic_hdr = &priv->pic_hdr;
- GstMpegVideoPictureExt * const pic_ext = &priv->pic_ext;
+ GstMpegVideoPictureHdr * const pic_hdr = &priv->pic_hdr->data.pic_hdr;
+ GstMpegVideoPictureExt * const pic_ext = &priv->pic_ext->data.pic_ext;
GstVaapiPicture *prev_picture, *next_picture;
- if (!priv->has_pic_ext)
- return FALSE;
-
/* Fill in VAPictureParameterBufferMPEG2 */
- pic_param->horizontal_size = priv->width;
- pic_param->vertical_size = priv->height;
- pic_param->forward_reference_picture = VA_INVALID_ID;
- pic_param->backward_reference_picture = VA_INVALID_ID;
- pic_param->picture_coding_type = pic_hdr->pic_type;
- pic_param->f_code = pack_f_code(pic_ext->f_code);
+ pic_param->horizontal_size = priv->width;
+ pic_param->vertical_size = priv->height;
+ pic_param->forward_reference_picture = VA_INVALID_ID;
+ pic_param->backward_reference_picture = VA_INVALID_ID;
+ pic_param->picture_coding_type = pic_hdr->pic_type;
+ pic_param->f_code = pack_f_code(pic_ext->f_code);
#define COPY_FIELD(a, b, f) \
pic_param->a.b.f = pic_ext->f
- pic_param->picture_coding_extension.value = 0;
- pic_param->picture_coding_extension.bits.is_first_field = GST_VAAPI_PICTURE_IS_FIRST_FIELD(picture);
+ pic_param->picture_coding_extension.value = 0;
+ pic_param->picture_coding_extension.bits.is_first_field =
+ GST_VAAPI_PICTURE_IS_FIRST_FIELD(picture);
COPY_FIELD(picture_coding_extension, bits, intra_dc_precision);
COPY_FIELD(picture_coding_extension, bits, picture_structure);
COPY_FIELD(picture_coding_extension, bits, top_field_first);
COPY_FIELD(picture_coding_extension, bits, repeat_first_field);
COPY_FIELD(picture_coding_extension, bits, progressive_frame);
- gst_vaapi_dpb_mpeg2_get_references(
- priv->dpb,
- picture,
- &prev_picture,
- &next_picture
- );
+ gst_vaapi_dpb_get_neighbours(priv->dpb, picture,
+ &prev_picture, &next_picture);
switch (pic_hdr->pic_type) {
case GST_MPEG_VIDEO_PICTURE_TYPE_B:
pic_param->forward_reference_picture = prev_picture->surface_id;
break;
}
- return TRUE;
}
static GstVaapiDecoderStatus
-decode_slice(
- GstVaapiDecoderMpeg2 *decoder,
- int slice_no,
- guchar *buf,
- guint buf_size
-)
-{
- GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
+parse_slice(GstVaapiDecoderMpeg2 *decoder,
+ GstVaapiDecoderUnit *unit, const GstMpegVideoPacket *packet)
+{
+ GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
+ GstMpegVideoSliceHdr *slice_hdr;
+ GstMpegVideoSequenceHdr *seq_hdr;
+ GstMpegVideoSequenceScalableExt *seq_scalable_ext;
+
+ priv->state &= (GST_MPEG_VIDEO_STATE_GOT_SEQ_HDR|
+ GST_MPEG_VIDEO_STATE_GOT_SEQ_EXT|
+ GST_MPEG_VIDEO_STATE_GOT_PIC_HDR|
+ GST_MPEG_VIDEO_STATE_GOT_PIC_EXT);
+
+ if (!gst_vaapi_parser_info_mpeg2_ensure(&priv->slice_hdr)) {
+ GST_ERROR("failed to allocate parser info for slice header");
+ return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+ }
+
+ slice_hdr = &priv->slice_hdr->data.slice_hdr;
+ seq_hdr = &priv->seq_hdr->data.seq_hdr;
+ seq_scalable_ext = priv->seq_scalable_ext ?
+ &priv->seq_scalable_ext->data.seq_scalable_ext : NULL;
+
+ if (!gst_mpeg_video_packet_parse_slice_header(packet, slice_hdr,
+ seq_hdr, seq_scalable_ext)) {
+ GST_ERROR("failed to parse slice header");
+ return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
+ }
+
+ gst_vaapi_decoder_unit_set_parsed_info(unit, slice_hdr, NULL);
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
+}
+
+static GstVaapiDecoderStatus
+decode_slice(GstVaapiDecoderMpeg2 *decoder, GstVaapiDecoderUnit *unit)
+{
+ GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
GstVaapiPicture * const picture = priv->current_picture;
GstVaapiSlice *slice;
VASliceParameterBufferMPEG2 *slice_param;
- GstBitReader br;
- gint mb_x, mb_y, mb_inc;
- guint macroblock_offset;
- guint8 slice_vertical_position_extension;
- guint8 quantiser_scale_code;
- guint8 intra_slice_flag, intra_slice = 0;
- guint8 extra_bit_slice, junk8;
+ GstMpegVideoSliceHdr * const slice_hdr = unit->parsed_info;
+ GstBuffer * const buffer =
+ GST_VAAPI_DECODER_CODEC_FRAME(decoder)->input_buffer;
+ GstMapInfo map_info;
+
+ GST_DEBUG("slice %d (%u bytes)", slice_hdr->mb_row, unit->size);
- GST_DEBUG("slice %d @ %p, %u bytes)", slice_no, buf, buf_size);
+ if (!is_valid_state(decoder, GST_MPEG_VIDEO_STATE_VALID_PIC_HEADERS))
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
- if (picture->slices->len == 0 && !fill_picture(decoder, picture))
+ if (!gst_buffer_map(buffer, &map_info, GST_MAP_READ)) {
+ GST_ERROR("failed to map buffer");
return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
+ }
- slice = GST_VAAPI_SLICE_NEW(MPEG2, decoder, buf, buf_size);
+ slice = GST_VAAPI_SLICE_NEW(MPEG2, decoder,
+ (map_info.data + unit->offset), unit->size);
+ gst_buffer_unmap(buffer, &map_info);
if (!slice) {
GST_ERROR("failed to allocate slice");
return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
}
gst_vaapi_picture_add_slice(picture, slice);
- /* Parse slice */
- gst_bit_reader_init(&br, buf, buf_size);
- if (priv->height > 2800)
- READ_UINT8(&br, slice_vertical_position_extension, 3);
- if (priv->has_seq_scalable_ext) {
- GST_ERROR("failed to parse slice %d. Unsupported sequence_scalable_extension()", slice_no);
- return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
- }
- READ_UINT8(&br, quantiser_scale_code, 5);
- READ_UINT8(&br, extra_bit_slice, 1);
- if (extra_bit_slice == 1) {
- READ_UINT8(&br, intra_slice_flag, 1);
- if (intra_slice_flag) {
- READ_UINT8(&br, intra_slice, 1);
- READ_UINT8(&br, junk8, 7);
- }
- READ_UINT8(&br, extra_bit_slice, 1);
- while (extra_bit_slice == 1) {
- READ_UINT8(&br, junk8, 8);
- READ_UINT8(&br, extra_bit_slice, 1);
- }
- }
- macroblock_offset = gst_bit_reader_get_pos(&br);
-
- mb_y = slice_no;
- mb_x = -1;
- do {
- if (!decode_vlc(&br, &mb_inc, mpeg2_mbaddr_vlc_table,
- G_N_ELEMENTS(mpeg2_mbaddr_vlc_table))) {
- GST_WARNING("failed to decode first macroblock_address_increment");
- goto failed;
- }
- mb_x += mb_inc == GST_MPEG_VIDEO_MACROBLOCK_ESCAPE ? 33 : mb_inc;
- } while (mb_inc == GST_MPEG_VIDEO_MACROBLOCK_ESCAPE);
-
/* Fill in VASliceParameterBufferMPEG2 */
slice_param = slice->param;
- slice_param->macroblock_offset = macroblock_offset;
- slice_param->slice_horizontal_position = mb_x;
- slice_param->slice_vertical_position = mb_y;
- slice_param->quantiser_scale_code = quantiser_scale_code;
- slice_param->intra_slice_flag = intra_slice;
- return GST_VAAPI_DECODER_STATUS_SUCCESS;
+ slice_param->macroblock_offset = slice_hdr->header_size + 32;
+ slice_param->slice_horizontal_position = slice_hdr->mb_column;
+ slice_param->slice_vertical_position = slice_hdr->mb_row;
+ slice_param->quantiser_scale_code = slice_hdr->quantiser_scale_code;
+ slice_param->intra_slice_flag = slice_hdr->intra_slice;
-failed:
- return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
+ priv->state |= GST_MPEG_VIDEO_STATE_GOT_SLICE;
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static inline gint
-scan_for_start_code(GstAdapter *adapter, guint ofs, guint size, guint32 *scp)
+scan_for_start_code(const guchar *buf, guint buf_size,
+ GstMpegVideoPacketTypeCode *type_ptr)
{
- return (gint)gst_adapter_masked_scan_uint32_peek(adapter,
- 0xffffff00, 0x00000100,
- ofs, size,
- scp);
+ guint i = 0;
+
+ while (i <= (buf_size - 4)) {
+ if (buf[i + 2] > 1)
+ i += 3;
+ else if (buf[i + 1])
+ i += 2;
+ else if (buf[i] || buf[i + 2] != 1)
+ i++;
+ else
+ break;
+ }
+
+ if (i <= (buf_size - 4)) {
+ if (type_ptr)
+ *type_ptr = buf[i + 3];
+ return i;
+ }
+ return -1;
}
static GstVaapiDecoderStatus
-decode_buffer(GstVaapiDecoderMpeg2 *decoder, GstBuffer *buffer)
+parse_unit(GstVaapiDecoderMpeg2 *decoder, GstVaapiDecoderUnit *unit,
+ GstMpegVideoPacket *packet)
{
- GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
+ GstMpegVideoPacketTypeCode type;
+ GstMpegVideoPacketExtensionCode ext_type;
GstVaapiDecoderStatus status;
- guchar *buf;
- guint buf_size, size;
- guint32 start_code;
- guint8 type;
- gint ofs;
-
- buf = GST_BUFFER_DATA(buffer);
- buf_size = GST_BUFFER_SIZE(buffer);
- if (!buf && buf_size == 0)
- return decode_sequence_end(decoder);
-
- gst_adapter_push(priv->adapter, gst_buffer_ref(buffer));
- size = gst_adapter_available(priv->adapter);
- status = GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
- do {
- if (size < 8)
+ type = packet->type;
+ switch (type) {
+ case GST_MPEG_VIDEO_PACKET_PICTURE:
+ status = parse_picture(decoder, unit, packet);
+ break;
+ case GST_MPEG_VIDEO_PACKET_SEQUENCE:
+ status = parse_sequence(decoder, unit, packet);
+ break;
+ case GST_MPEG_VIDEO_PACKET_EXTENSION:
+ ext_type = packet->data[4] >> 4;
+ switch (ext_type) {
+ case GST_MPEG_VIDEO_PACKET_EXT_SEQUENCE:
+ status = parse_sequence_ext(decoder, unit, packet);
break;
- ofs = scan_for_start_code(priv->adapter, 0, size, &start_code);
- if (ofs < 0)
+ case GST_MPEG_VIDEO_PACKET_EXT_SEQUENCE_DISPLAY:
+ status = parse_sequence_display_ext(decoder, unit, packet);
break;
- gst_adapter_flush(priv->adapter, ofs);
- size -= ofs;
-
- status = gst_vaapi_decoder_check_status(GST_VAAPI_DECODER(decoder));
- if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
+ case GST_MPEG_VIDEO_PACKET_EXT_SEQUENCE_SCALABLE:
+ status = parse_sequence_scalable_ext(decoder, unit, packet);
break;
-
- if (size < 8)
+ case GST_MPEG_VIDEO_PACKET_EXT_QUANT_MATRIX:
+ status = parse_quant_matrix_ext(decoder, unit, packet);
break;
- ofs = scan_for_start_code(priv->adapter, 4, size - 4, NULL);
- if (ofs < 0)
+ case GST_MPEG_VIDEO_PACKET_EXT_PICTURE:
+ status = parse_picture_ext(decoder, unit, packet);
break;
- gst_adapter_flush(priv->adapter, 4);
- size -= ofs;
-
- if (ofs == 4) {
- // Ignore empty user-data packets
- if ((start_code & 0xff) == GST_MPEG_VIDEO_PACKET_USER_DATA)
- continue;
- GST_ERROR("failed to get a valid packet (SC: 0x%08x)", start_code);
- status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
+ default:
+ status = GST_VAAPI_DECODER_STATUS_SUCCESS;
break;
}
+ break;
+ case GST_MPEG_VIDEO_PACKET_GOP:
+ status = parse_gop(decoder, unit, packet);
+ break;
+ default:
+ if (type >= GST_MPEG_VIDEO_PACKET_SLICE_MIN &&
+ type <= GST_MPEG_VIDEO_PACKET_SLICE_MAX) {
+ status = parse_slice(decoder, unit, packet);
+ break;
+ }
+ status = GST_VAAPI_DECODER_STATUS_SUCCESS;
+ break;
+ }
+ return status;
+}
- buffer = gst_adapter_take_buffer(priv->adapter, ofs - 4);
- buf = GST_BUFFER_DATA(buffer);
- buf_size = GST_BUFFER_SIZE(buffer);
+static GstVaapiDecoderStatus
+decode_unit(GstVaapiDecoderMpeg2 *decoder, GstVaapiDecoderUnit *unit,
+ GstMpegVideoPacket *packet)
+{
+ GstMpegVideoPacketTypeCode type;
+ GstMpegVideoPacketExtensionCode ext_type;
+ GstVaapiDecoderStatus status;
- type = start_code & 0xff;
- switch (type) {
- case GST_MPEG_VIDEO_PACKET_PICTURE:
- if (!priv->width || !priv->height)
- break;
- status = decode_picture(decoder, buf, buf_size);
+ type = packet->type;
+ switch (type) {
+ case GST_MPEG_VIDEO_PACKET_PICTURE:
+ status = decode_picture(decoder, unit);
+ break;
+ case GST_MPEG_VIDEO_PACKET_SEQUENCE:
+ status = decode_sequence(decoder, unit);
+ break;
+ case GST_MPEG_VIDEO_PACKET_EXTENSION:
+ ext_type = packet->data[4] >> 4;
+ switch (ext_type) {
+ case GST_MPEG_VIDEO_PACKET_EXT_SEQUENCE:
+ status = decode_sequence_ext(decoder, unit);
break;
- case GST_MPEG_VIDEO_PACKET_SEQUENCE:
- status = decode_sequence(decoder, buf, buf_size);
+ case GST_MPEG_VIDEO_PACKET_EXT_SEQUENCE_DISPLAY:
+ status = decode_sequence_display_ext(decoder, unit);
break;
- case GST_MPEG_VIDEO_PACKET_EXTENSION: {
- const guchar id = buf[0] >> 4;
- switch (id) {
- case GST_MPEG_VIDEO_PACKET_EXT_SEQUENCE:
- status = decode_sequence_ext(decoder, buf, buf_size);
- break;
- case GST_MPEG_VIDEO_PACKET_EXT_QUANT_MATRIX:
- status = decode_quant_matrix_ext(decoder, buf, buf_size);
- break;
- case GST_MPEG_VIDEO_PACKET_EXT_PICTURE:
- if (!priv->width || !priv->height)
- break;
- status = decode_picture_ext(decoder, buf, buf_size);
- break;
- default:
- // Ignore unknown extensions
- GST_WARNING("unsupported start-code extension (0x%02x)", id);
- break;
- }
+ case GST_MPEG_VIDEO_PACKET_EXT_SEQUENCE_SCALABLE:
+ status = decode_sequence_scalable_ext(decoder, unit);
break;
- }
- case GST_MPEG_VIDEO_PACKET_SEQUENCE_END:
- status = decode_sequence_end(decoder);
+ case GST_MPEG_VIDEO_PACKET_EXT_QUANT_MATRIX:
+ status = decode_quant_matrix_ext(decoder, unit);
break;
- case GST_MPEG_VIDEO_PACKET_GOP:
- status = decode_gop(decoder, buf, buf_size);
+ case GST_MPEG_VIDEO_PACKET_EXT_PICTURE:
+ status = decode_picture_ext(decoder, unit);
break;
- case GST_MPEG_VIDEO_PACKET_USER_DATA:
- // Ignore user-data packets
+ default:
+ // Ignore unknown start-code extensions
+ GST_WARNING("unsupported packet extension type 0x%02x", ext_type);
status = GST_VAAPI_DECODER_STATUS_SUCCESS;
break;
- default:
- if (type >= GST_MPEG_VIDEO_PACKET_SLICE_MIN &&
- type <= GST_MPEG_VIDEO_PACKET_SLICE_MAX) {
- if (!priv->current_picture)
- break;
- status = decode_slice(
- decoder,
- type - GST_MPEG_VIDEO_PACKET_SLICE_MIN,
- buf, buf_size
- );
- break;
- }
- else if (type >= 0xb9 && type <= 0xff) {
- // Ignore system start codes (PES headers)
- status = GST_VAAPI_DECODER_STATUS_SUCCESS;
- break;
- }
- GST_WARNING("unsupported start code (0x%02x)", type);
- status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
+ }
+ break;
+ case GST_MPEG_VIDEO_PACKET_SEQUENCE_END:
+ status = decode_sequence_end(decoder);
+ break;
+ case GST_MPEG_VIDEO_PACKET_GOP:
+ status = decode_gop(decoder, unit);
+ break;
+ default:
+ if (type >= GST_MPEG_VIDEO_PACKET_SLICE_MIN &&
+ type <= GST_MPEG_VIDEO_PACKET_SLICE_MAX) {
+ status = decode_slice(decoder, unit);
break;
}
- gst_buffer_unref(buffer);
- } while (status == GST_VAAPI_DECODER_STATUS_SUCCESS);
+ GST_WARNING("unsupported packet type 0x%02x", type);
+ status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
+ break;
+ }
return status;
}
-GstVaapiDecoderStatus
-gst_vaapi_decoder_mpeg2_decode(GstVaapiDecoder *base, GstBuffer *buffer)
+static GstVaapiDecoderStatus
+ensure_decoder(GstVaapiDecoderMpeg2 *decoder)
{
- GstVaapiDecoderMpeg2 * const decoder = GST_VAAPI_DECODER_MPEG2(base);
- GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
-
- g_return_val_if_fail(priv->is_constructed,
- GST_VAAPI_DECODER_STATUS_ERROR_INIT_FAILED);
+ GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
if (!priv->is_opened) {
- priv->is_opened = gst_vaapi_decoder_mpeg2_open(decoder, buffer);
+ priv->is_opened = gst_vaapi_decoder_mpeg2_open(decoder);
if (!priv->is_opened)
return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CODEC;
}
- return decode_buffer(decoder, buffer);
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
-static void
-gst_vaapi_decoder_mpeg2_finalize(GObject *object)
+static GstVaapiDecoderStatus
+gst_vaapi_decoder_mpeg2_parse(GstVaapiDecoder *base_decoder,
+ GstAdapter *adapter, gboolean at_eos, GstVaapiDecoderUnit *unit)
{
- GstVaapiDecoderMpeg2 * const decoder = GST_VAAPI_DECODER_MPEG2(object);
+ GstVaapiDecoderMpeg2 * const decoder =
+ GST_VAAPI_DECODER_MPEG2_CAST(base_decoder);
+ GstVaapiParserState * const ps = GST_VAAPI_PARSER_STATE(base_decoder);
+ GstVaapiDecoderStatus status;
+ GstMpegVideoPacketTypeCode type, type2 = GST_MPEG_VIDEO_PACKET_NONE;
+ const guchar *buf;
+ guint buf_size, flags;
+ gint ofs, ofs1, ofs2;
- gst_vaapi_decoder_mpeg2_destroy(decoder);
+ status = ensure_decoder(decoder);
+ if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
+ return status;
+
+ buf_size = gst_adapter_available(adapter);
+ if (buf_size < 4)
+ return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
+
+ buf = gst_adapter_map(adapter, buf_size);
+ if (!buf)
+ return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
+
+ ofs = scan_for_start_code(buf, buf_size, &type);
+ if (ofs < 0)
+ return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
+ ofs1 = ofs;
+
+ ofs2 = ps->input_offset2 - 4;
+ if (ofs2 < ofs1 + 4)
+ ofs2 = ofs1 + 4;
+
+ ofs = G_UNLIKELY(buf_size < ofs2 + 4) ? -1 :
+ scan_for_start_code(&buf[ofs2], buf_size - ofs2, &type2);
+ if (ofs < 0) {
+ // Assume the whole packet is present if end-of-stream
+ if (!at_eos) {
+ ps->input_offset2 = buf_size;
+ return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
+ }
+ ofs = buf_size - ofs2;
+ }
+ ofs2 += ofs;
+
+ unit->size = ofs2 - ofs1;
+ gst_adapter_flush(adapter, ofs1);
+ ps->input_offset2 = 4;
+
+ /* Check for start of new picture */
+ flags = 0;
+ switch (type) {
+ case GST_MPEG_VIDEO_PACKET_SEQUENCE_END:
+ flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_END;
+ flags |= GST_VAAPI_DECODER_UNIT_FLAG_STREAM_END;
+ break;
+ case GST_MPEG_VIDEO_PACKET_USER_DATA:
+ flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
+ /* fall-through */
+ case GST_MPEG_VIDEO_PACKET_SEQUENCE:
+ case GST_MPEG_VIDEO_PACKET_GOP:
+ case GST_MPEG_VIDEO_PACKET_PICTURE:
+ flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
+ break;
+ case GST_MPEG_VIDEO_PACKET_EXTENSION:
+ if (G_UNLIKELY(unit->size < 5))
+ return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
+ break;
+ default:
+ if (type >= GST_MPEG_VIDEO_PACKET_SLICE_MIN &&
+ type <= GST_MPEG_VIDEO_PACKET_SLICE_MAX) {
+ flags |= GST_VAAPI_DECODER_UNIT_FLAG_SLICE;
+ switch (type2) {
+ case GST_MPEG_VIDEO_PACKET_USER_DATA:
+ case GST_MPEG_VIDEO_PACKET_SEQUENCE:
+ case GST_MPEG_VIDEO_PACKET_GOP:
+ case GST_MPEG_VIDEO_PACKET_PICTURE:
+ flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_END;
+ break;
+ default:
+ break;
+ }
+ }
- G_OBJECT_CLASS(gst_vaapi_decoder_mpeg2_parent_class)->finalize(object);
+ // Ignore system start codes (PES headers)
+ else if (type >= 0xb9 && type <= 0xff)
+ flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
+ break;
+ }
+ GST_VAAPI_DECODER_UNIT_FLAG_SET(unit, flags);
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
-static void
-gst_vaapi_decoder_mpeg2_constructed(GObject *object)
+static GstVaapiDecoderStatus
+gst_vaapi_decoder_mpeg2_decode(GstVaapiDecoder *base_decoder,
+ GstVaapiDecoderUnit *unit)
{
- GstVaapiDecoderMpeg2 * const decoder = GST_VAAPI_DECODER_MPEG2(object);
- GstVaapiDecoderMpeg2Private * const priv = decoder->priv;
- GObjectClass *parent_class;
+ GstVaapiDecoderMpeg2 * const decoder =
+ GST_VAAPI_DECODER_MPEG2_CAST(base_decoder);
+ GstVaapiDecoderStatus status;
+ GstMpegVideoPacket packet;
+ GstBuffer * const buffer =
+ GST_VAAPI_DECODER_CODEC_FRAME(decoder)->input_buffer;
+ GstMapInfo map_info;
+
+ status = ensure_decoder(decoder);
+ if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
+ return status;
+
+ if (!gst_buffer_map(buffer, &map_info, GST_MAP_READ)) {
+ GST_ERROR("failed to map buffer");
+ return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
+ }
+
+ packet.data = map_info.data + unit->offset;
+ packet.size = unit->size;
+ packet.type = packet.data[3];
+ packet.offset = 4;
+
+ status = parse_unit(decoder, unit, &packet);
+ gst_buffer_unmap(buffer, &map_info);
+ if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
+ return status;
+ return decode_unit(decoder, unit, &packet);
+}
+
+static GstVaapiDecoderStatus
+gst_vaapi_decoder_mpeg2_start_frame(GstVaapiDecoder *base_decoder,
+ GstVaapiDecoderUnit *base_unit)
+{
+ GstVaapiDecoderMpeg2 * const decoder =
+ GST_VAAPI_DECODER_MPEG2_CAST(base_decoder);
+ GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
+ GstMpegVideoSequenceHdr *seq_hdr;
+ GstMpegVideoSequenceExt *seq_ext;
+ GstMpegVideoSequenceDisplayExt *seq_display_ext;
+ GstVaapiPicture *picture;
+ GstVaapiDecoderStatus status;
+
+ if (!is_valid_state(decoder, GST_MPEG_VIDEO_STATE_VALID_PIC_HEADERS))
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
+ priv->state &= ~GST_MPEG_VIDEO_STATE_VALID_PIC_HEADERS;
+
+ seq_hdr = &priv->seq_hdr->data.seq_hdr;
+ seq_ext = priv->seq_ext ? &priv->seq_ext->data.seq_ext : NULL;
+ seq_display_ext = priv->seq_display_ext ?
+ &priv->seq_display_ext->data.seq_display_ext : NULL;
+ if (gst_mpeg_video_finalise_mpeg2_sequence_header(seq_hdr, seq_ext,
+ seq_display_ext))
+ gst_vaapi_decoder_set_pixel_aspect_ratio(base_decoder,
+ seq_hdr->par_w, seq_hdr->par_h);
+
+ status = ensure_context(decoder);
+ if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
+ GST_ERROR("failed to reset context");
+ return status;
+ }
+
+ if (priv->current_picture) {
+ /* Re-use current picture where the first field was decoded */
+ picture = gst_vaapi_picture_new_field(priv->current_picture);
+ if (!picture) {
+ GST_ERROR("failed to allocate field picture");
+ return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+ }
+ }
+ else {
+ /* Create new picture */
+ picture = GST_VAAPI_PICTURE_NEW(MPEG2, decoder);
+ if (!picture) {
+ GST_ERROR("failed to allocate picture");
+ return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+ }
+ }
+ gst_vaapi_picture_replace(&priv->current_picture, picture);
+ gst_vaapi_picture_unref(picture);
- parent_class = G_OBJECT_CLASS(gst_vaapi_decoder_mpeg2_parent_class);
- if (parent_class->constructed)
- parent_class->constructed(object);
+ /* Update cropping rectangle */
+ /* XXX: handle picture_display_extension() */
+ if (seq_display_ext && priv->pic_display_ext) {
+ GstVaapiRectangle * const crop_rect = &priv->crop_rect;
+ if (crop_rect->x + crop_rect->width <= priv->width &&
+ crop_rect->y + crop_rect->height <= priv->height)
+ gst_vaapi_picture_set_crop_rect(picture, crop_rect);
+ }
- priv->is_constructed = gst_vaapi_decoder_mpeg2_create(decoder);
+ status = ensure_quant_matrix(decoder, picture);
+ if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
+ GST_ERROR("failed to reset quantizer matrix");
+ return status;
+ }
+
+ status = init_picture(decoder, picture);
+ if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
+ return status;
+
+ fill_picture(decoder, picture);
+
+ priv->state |= GST_MPEG_VIDEO_STATE_VALID_PIC_HEADERS;
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
+}
+
+static GstVaapiDecoderStatus
+gst_vaapi_decoder_mpeg2_end_frame(GstVaapiDecoder *base_decoder)
+{
+ GstVaapiDecoderMpeg2 * const decoder =
+ GST_VAAPI_DECODER_MPEG2_CAST(base_decoder);
+
+ return decode_current_picture(decoder);
+}
+
+static GstVaapiDecoderStatus
+gst_vaapi_decoder_mpeg2_flush(GstVaapiDecoder *base_decoder)
+{
+ GstVaapiDecoderMpeg2 * const decoder =
+ GST_VAAPI_DECODER_MPEG2_CAST(base_decoder);
+ GstVaapiDecoderMpeg2Private * const priv = &decoder->priv;
+
+ gst_vaapi_dpb_flush(priv->dpb);
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static void
gst_vaapi_decoder_mpeg2_class_init(GstVaapiDecoderMpeg2Class *klass)
{
- GObjectClass * const object_class = G_OBJECT_CLASS(klass);
+ GstVaapiMiniObjectClass * const object_class =
+ GST_VAAPI_MINI_OBJECT_CLASS(klass);
GstVaapiDecoderClass * const decoder_class = GST_VAAPI_DECODER_CLASS(klass);
- g_type_class_add_private(klass, sizeof(GstVaapiDecoderMpeg2Private));
-
- object_class->finalize = gst_vaapi_decoder_mpeg2_finalize;
- object_class->constructed = gst_vaapi_decoder_mpeg2_constructed;
+ object_class->size = sizeof(GstVaapiDecoderMpeg2);
+ object_class->finalize = (GDestroyNotify)gst_vaapi_decoder_finalize;
+ decoder_class->create = gst_vaapi_decoder_mpeg2_create;
+ decoder_class->destroy = gst_vaapi_decoder_mpeg2_destroy;
+ decoder_class->parse = gst_vaapi_decoder_mpeg2_parse;
decoder_class->decode = gst_vaapi_decoder_mpeg2_decode;
+ decoder_class->start_frame = gst_vaapi_decoder_mpeg2_start_frame;
+ decoder_class->end_frame = gst_vaapi_decoder_mpeg2_end_frame;
+ decoder_class->flush = gst_vaapi_decoder_mpeg2_flush;
}
-static void
-gst_vaapi_decoder_mpeg2_init(GstVaapiDecoderMpeg2 *decoder)
+static inline const GstVaapiDecoderClass *
+gst_vaapi_decoder_mpeg2_class(void)
{
- GstVaapiDecoderMpeg2Private *priv;
+ static GstVaapiDecoderMpeg2Class g_class;
+ static gsize g_class_init = FALSE;
- priv = GST_VAAPI_DECODER_MPEG2_GET_PRIVATE(decoder);
- decoder->priv = priv;
- priv->width = 0;
- priv->height = 0;
- priv->fps_n = 0;
- priv->fps_d = 0;
- priv->profile = GST_VAAPI_PROFILE_MPEG2_SIMPLE;
- priv->current_picture = NULL;
- priv->adapter = NULL;
- priv->seq_pts = GST_CLOCK_TIME_NONE;
- priv->gop_pts = GST_CLOCK_TIME_NONE;
- priv->pts_diff = 0;
- priv->is_constructed = FALSE;
- priv->is_opened = FALSE;
- priv->has_seq_ext = FALSE;
- priv->has_seq_scalable_ext = FALSE;
- priv->has_pic_ext = FALSE;
- priv->has_quant_matrix_ext = FALSE;
- priv->size_changed = FALSE;
- priv->profile_changed = FALSE;
- priv->quant_matrix_changed = FALSE;
- priv->progressive_sequence = FALSE;
- priv->closed_gop = FALSE;
- priv->broken_link = FALSE;
+ if (g_once_init_enter(&g_class_init)) {
+ gst_vaapi_decoder_mpeg2_class_init(&g_class);
+ g_once_init_leave(&g_class_init, TRUE);
+ }
+ return GST_VAAPI_DECODER_CLASS(&g_class);
}
/**
GstVaapiDecoder *
gst_vaapi_decoder_mpeg2_new(GstVaapiDisplay *display, GstCaps *caps)
{
- GstVaapiDecoderMpeg2 *decoder;
-
- g_return_val_if_fail(GST_VAAPI_IS_DISPLAY(display), NULL);
- g_return_val_if_fail(GST_IS_CAPS(caps), NULL);
-
- decoder = g_object_new(
- GST_VAAPI_TYPE_DECODER_MPEG2,
- "display", display,
- "caps", caps,
- NULL
- );
- if (!decoder->priv->is_constructed) {
- g_object_unref(decoder);
- return NULL;
- }
- return GST_VAAPI_DECODER_CAST(decoder);
+ return gst_vaapi_decoder_new(gst_vaapi_decoder_mpeg2_class(),
+ display, caps);
}