mpeg4: fix size argument to gst_adapter_flush().
[profile/ivi/gstreamer-vaapi.git] / gst-libs / gst / vaapi / gstvaapidecoder_mpeg4.c
1 /*
2  *  gstvaapidecoder_mpeg4.c - MPEG-4 decoder
3  *
4  *  Copyright (C) 2011 Intel Corporation
5  *
6  *  This library is free software; you can redistribute it and/or
7  *  modify it under the terms of the GNU Lesser General Public License
8  *  as published by the Free Software Foundation; either version 2.1
9  *  of the License, or (at your option) any later version.
10  *
11  *  This library is distributed in the hope that it will be useful,
12  *  but WITHOUT ANY WARRANTY; without even the implied warranty of
13  *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14  *  Lesser General Public License for more details.
15  *
16  *  You should have received a copy of the GNU Lesser General Public
17  *  License along with this library; if not, write to the Free
18  *  Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
19  *  Boston, MA 02110-1301 USA
20  */
21
22 /**
23  * SECTION:gstvaapidecoder_mpeg4
24  * @short_description: MPEG-4 decoder, include h263/divx/xvid support
25  */
26
27 #include "sysdeps.h"
28 #include <string.h>
29 #include <gst/base/gstbitreader.h>
30 #include <gst/codecparsers/gstmpeg4parser.h>
31 #include "gstvaapidecoder_mpeg4.h"
32 #include "gstvaapidecoder_objects.h"
33 #include "gstvaapidecoder_priv.h"
34 #include "gstvaapidisplay_priv.h"
35 #include "gstvaapiobject_priv.h"
36
37 #define DEBUG 1
38 #include "gstvaapidebug.h"
39
40 G_DEFINE_TYPE(GstVaapiDecoderMpeg4,
41               gst_vaapi_decoder_mpeg4,
42               GST_VAAPI_TYPE_DECODER)
43
44 #define GST_VAAPI_DECODER_MPEG4_GET_PRIVATE(obj)                \
45     (G_TYPE_INSTANCE_GET_PRIVATE((obj),                         \
46                                  GST_VAAPI_TYPE_DECODER_MPEG4,  \
47                                  GstVaapiDecoderMpeg4Private))
48
49 struct _GstVaapiDecoderMpeg4Private {
50     GstVaapiProfile                 profile;
51     guint                           level; 
52     guint                           width; 
53     guint                           height;
54     guint                           fps_n;
55     guint                           fps_d; 
56     guint                           coding_type;
57     GstMpeg4VisualObjectSequence    vos_hdr;
58     GstMpeg4VisualObject            vo_hdr;
59     GstMpeg4VideoSignalType         signal_type;
60     GstMpeg4VideoObjectLayer        vol_hdr;
61     GstMpeg4VideoObjectPlane        vop_hdr;
62     GstMpeg4VideoPlaneShortHdr      svh_hdr;
63     GstMpeg4VideoPacketHdr          packet_hdr;
64     GstMpeg4SpriteTrajectory        sprite_trajectory;
65     VAIQMatrixBufferMPEG4           iq_matrix;
66     GstVaapiPicture                *curr_picture;
67     // forward reference pic
68     GstVaapiPicture                *next_picture;
69     // backward reference pic
70     GstVaapiPicture                *prev_picture;
71     GstAdapter                     *adapter;
72     GstBuffer                      *sub_buffer;
73     GstClockTime                    seq_pts;
74     GstClockTime                    gop_pts;
75     GstClockTime                    pts_diff;
76     GstClockTime                    max_pts;
77     // anchor sync time base for any picture type, 
78     // it is time base of backward reference frame
79     GstClockTime                    last_sync_time; 
80     // time base for recent I/P/S frame, 
81     // it is time base of forward reference frame for B frame
82     GstClockTime                    sync_time; 
83
84     /* last non-b-frame time by resolution */
85     GstClockTime                    last_non_b_scale_time;
86     GstClockTime                    non_b_scale_time;
87     GstClockTime                    trb;
88     GstClockTime                    trd;
89     // temporal_reference of previous frame of svh
90     guint8                          prev_t_ref;
91     guint                           is_constructed          : 1;
92     guint                           is_opened               : 1;
93     guint                           is_first_field          : 1;
94     guint                           size_changed            : 1;
95     guint                           profile_changed         : 1;
96     guint                           progressive_sequence    : 1;
97     guint                           closed_gop              : 1;
98     guint                           broken_link             : 1;
99     guint                           calculate_pts_diff      : 1;
100     guint                           is_svh                  : 1;
101 };
102
103 static void
104 gst_vaapi_decoder_mpeg4_close(GstVaapiDecoderMpeg4 *decoder)
105 {
106     GstVaapiDecoderMpeg4Private * const priv = decoder->priv;
107
108     gst_vaapi_picture_replace(&priv->curr_picture, NULL);
109     gst_vaapi_picture_replace(&priv->next_picture, NULL);
110     gst_vaapi_picture_replace(&priv->prev_picture, NULL);
111
112     if (priv->sub_buffer) {
113         gst_buffer_unref(priv->sub_buffer);
114         priv->sub_buffer = NULL;
115     }
116
117     if (priv->adapter) {
118         gst_adapter_clear(priv->adapter);
119         g_object_unref(priv->adapter);
120         priv->adapter = NULL;
121     }
122 }
123
124 static gboolean
125 gst_vaapi_decoder_mpeg4_open(GstVaapiDecoderMpeg4 *decoder, GstBuffer *buffer)
126 {
127     GstVaapiDecoder *const base_decoder = GST_VAAPI_DECODER(decoder);
128     GstVaapiDecoderMpeg4Private * const priv = decoder->priv;
129     GstCaps *caps = NULL;
130     GstStructure *structure = NULL;
131
132     gst_vaapi_decoder_mpeg4_close(decoder);
133
134     priv->adapter = gst_adapter_new();
135     if (!priv->adapter)
136         return FALSE;
137
138     priv->is_svh = 0;
139     caps = gst_vaapi_decoder_get_caps(base_decoder);
140     if (caps) {
141         structure = gst_caps_get_structure(caps, 0);
142         if (structure) {
143             if (gst_structure_has_name(structure, "video/x-h263")) {
144                 priv->is_svh = 1;
145                 priv->profile = GST_VAAPI_PROFILE_MPEG4_SIMPLE;
146                 priv->prev_t_ref = -1;
147             }
148         }
149     }
150     return TRUE;
151 }
152
153 static void
154 gst_vaapi_decoder_mpeg4_destroy(GstVaapiDecoderMpeg4 *decoder)
155 {
156     gst_vaapi_decoder_mpeg4_close(decoder);
157 }
158
159 static gboolean
160 gst_vaapi_decoder_mpeg4_create(GstVaapiDecoderMpeg4 *decoder)
161 {
162     if (!GST_VAAPI_DECODER_CODEC(decoder))
163         return FALSE;
164     return TRUE;
165 }
166
167 static inline void
168 copy_quant_matrix(guint8 dst[64], const guint8 src[64])
169 {
170     memcpy(dst, src, 64);
171 }
172
173 static GstVaapiDecoderStatus
174 ensure_context(GstVaapiDecoderMpeg4 *decoder)
175 {
176     GstVaapiDecoderMpeg4Private * const priv = decoder->priv;
177     GstVaapiProfile profiles[2];
178     GstVaapiEntrypoint entrypoint = GST_VAAPI_ENTRYPOINT_VLD;
179     guint i, n_profiles = 0;
180     gboolean reset_context = FALSE;
181
182     if (priv->profile_changed) {
183         GST_DEBUG("profile changed");
184         priv->profile_changed = FALSE;
185         reset_context         = TRUE;
186
187         profiles[n_profiles++] = priv->profile;
188         if (priv->profile == GST_VAAPI_PROFILE_MPEG4_SIMPLE)
189             profiles[n_profiles++] = GST_VAAPI_PROFILE_MPEG4_ADVANCED_SIMPLE;
190
191         for (i = 0; i < n_profiles; i++) {
192             if (gst_vaapi_display_has_decoder(GST_VAAPI_DECODER_DISPLAY(decoder),
193                                               profiles[i], entrypoint))
194                 break;
195         }
196         if (i == n_profiles)
197             return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
198         priv->profile = profiles[i];
199     }
200
201     if (priv->size_changed) {
202         GST_DEBUG("size changed");
203         priv->size_changed = FALSE;
204         reset_context      = TRUE;
205     }
206
207     if (reset_context) {
208         GstVaapiContextInfo info;
209
210         info.profile    = priv->profile;
211         info.entrypoint = entrypoint;
212         info.width      = priv->width;
213         info.height     = priv->height;
214         info.ref_frames = 2;
215         reset_context   = gst_vaapi_decoder_ensure_context(
216             GST_VAAPI_DECODER(decoder),
217             &info
218         );
219         if (!reset_context)
220             return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
221     }
222     return GST_VAAPI_DECODER_STATUS_SUCCESS;
223 }
224
225 static GstVaapiDecoderStatus
226 ensure_quant_matrix(GstVaapiDecoderMpeg4 *decoder, GstVaapiPicture *picture)
227 {
228     GstVaapiDecoderMpeg4Private * const priv = decoder->priv;
229     VAIQMatrixBufferMPEG4 *iq_matrix;
230
231     if (!priv->vol_hdr.load_intra_quant_mat && !priv->vol_hdr.load_non_intra_quant_mat) {
232             return GST_VAAPI_DECODER_STATUS_SUCCESS;
233     }
234
235     picture->iq_matrix = GST_VAAPI_IQ_MATRIX_NEW(MPEG4, decoder);
236     if (!picture->iq_matrix) {
237         GST_DEBUG("failed to allocate IQ matrix");
238         return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
239     }
240     iq_matrix = picture->iq_matrix->param;
241
242     if (priv->vol_hdr.load_intra_quant_mat) {
243         iq_matrix->load_intra_quant_mat = 1;
244         copy_quant_matrix(iq_matrix->intra_quant_mat,
245                           priv->vol_hdr.intra_quant_mat);
246     }
247     else
248         iq_matrix->load_intra_quant_mat = 0;
249
250     if (priv->vol_hdr.load_non_intra_quant_mat) {
251         iq_matrix->load_non_intra_quant_mat = 1;
252         copy_quant_matrix(iq_matrix->non_intra_quant_mat,
253                       priv->vol_hdr.non_intra_quant_mat);
254     }
255     else
256         iq_matrix->load_non_intra_quant_mat = 0;
257     
258
259     return GST_VAAPI_DECODER_STATUS_SUCCESS;
260 }
261
262 static inline GstVaapiDecoderStatus
263 render_picture(GstVaapiDecoderMpeg4 *decoder, GstVaapiPicture *picture)
264 {
265     if (!gst_vaapi_picture_output(picture))
266         return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
267     return GST_VAAPI_DECODER_STATUS_SUCCESS;
268 }
269
270 /* decode_picture() start to decode a frame/picture
271  * decode_current_picture() finishe decoding a frame/picture 
272  * (commit buffer to driver for decoding)
273  */
274 static GstVaapiDecoderStatus
275 decode_current_picture(GstVaapiDecoderMpeg4 *decoder)
276 {
277     GstVaapiDecoderMpeg4Private * const priv = decoder->priv;
278     GstVaapiPicture * const picture = priv->curr_picture;
279     GstVaapiDecoderStatus status = GST_VAAPI_DECODER_STATUS_SUCCESS;
280
281     if (picture) {
282         if (!gst_vaapi_picture_decode(picture))
283             status = GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
284         if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture)) {
285             if ((priv->prev_picture && priv->next_picture) ||
286                 (priv->closed_gop && priv->next_picture))
287                 status = render_picture(decoder, picture);
288         }
289         gst_vaapi_picture_replace(&priv->curr_picture, NULL);
290     }
291     return status;
292 }
293
294 static GstVaapiDecoderStatus
295 decode_sequence(GstVaapiDecoderMpeg4 *decoder, const guint8 *buf, guint buf_size)
296 {
297     GstVaapiDecoderMpeg4Private * const priv = decoder->priv;
298     GstMpeg4VisualObjectSequence * const vos_hdr = &priv->vos_hdr;
299     GstVaapiProfile profile;
300
301     if (gst_mpeg4_parse_visual_object_sequence(vos_hdr, buf, buf_size) != GST_MPEG4_PARSER_OK) {
302         GST_DEBUG("failed to parse sequence header");
303         return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
304     }
305
306     priv->level = vos_hdr->level;
307     switch (vos_hdr->profile) {
308     case GST_MPEG4_PROFILE_SIMPLE:
309         profile = GST_VAAPI_PROFILE_MPEG4_SIMPLE;
310         break;
311     case GST_MPEG4_PROFILE_ADVANCED_SIMPLE:
312     case GST_MPEG4_PROFILE_SIMPLE_SCALABLE: /* shared profile with ADVANCED_SIMPLE */
313         profile = GST_VAAPI_PROFILE_MPEG4_ADVANCED_SIMPLE;
314         break;
315     default:
316         GST_DEBUG("unsupported profile %d", vos_hdr->profile);
317         return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
318     }
319     if (priv->profile != profile) {
320         priv->profile = profile;
321         priv->profile_changed = TRUE;
322     }
323     priv->seq_pts = gst_adapter_prev_timestamp(priv->adapter, NULL);
324     priv->size_changed          = TRUE;
325
326     return GST_VAAPI_DECODER_STATUS_SUCCESS;
327 }
328
329 static GstVaapiDecoderStatus
330 decode_sequence_end(GstVaapiDecoderMpeg4 *decoder)
331 {
332     GstVaapiDecoderMpeg4Private * const priv = decoder->priv;
333     GstVaapiDecoderStatus status;
334
335     if (priv->curr_picture) {
336         status = decode_current_picture(decoder);
337         if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
338             return status;
339         status = render_picture(decoder, priv->curr_picture);
340         if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
341             return status;
342     }
343
344     if (priv->next_picture) {
345         status = render_picture(decoder, priv->next_picture);
346         if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
347             return status;
348     }
349     return GST_VAAPI_DECODER_STATUS_END_OF_STREAM;
350 }
351
352 static GstVaapiDecoderStatus
353 decode_visual_object(GstVaapiDecoderMpeg4 *decoder, const guint8 *buf, guint buf_size)
354 {
355     GstVaapiDecoderMpeg4Private * const priv = decoder->priv;
356     GstMpeg4VisualObject * vo_hdr = &priv->vo_hdr;
357     GstMpeg4VideoSignalType * signal_type = &priv->signal_type;
358
359     if (gst_mpeg4_parse_visual_object (vo_hdr, signal_type, buf, buf_size) != GST_MPEG4_PARSER_OK) {
360         GST_DEBUG("failed to parse visual object");
361         return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
362     }
363
364     /* XXX: video_signal_type isn't used for decoding */
365     return GST_VAAPI_DECODER_STATUS_SUCCESS;
366 }
367
368 static GstVaapiDecoderStatus
369 decode_video_object_layer(GstVaapiDecoderMpeg4 *decoder, const guint8 *buf, guint buf_size)
370 {
371     GstVaapiDecoder * const base_decoder = GST_VAAPI_DECODER(decoder);
372     GstVaapiDecoderMpeg4Private * const priv = decoder->priv;
373     GstMpeg4VisualObject * vo_hdr = &priv->vo_hdr;
374     GstMpeg4VideoObjectLayer * vol_hdr = &priv->vol_hdr;
375
376     if (gst_mpeg4_parse_video_object_layer (vol_hdr, vo_hdr, buf, buf_size) != GST_MPEG4_PARSER_OK) {
377         GST_DEBUG("failed to parse video object layer");
378         return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
379     }
380
381     priv->width  = vol_hdr->width;
382     priv->height = vol_hdr->height;
383
384     priv->progressive_sequence  = !vol_hdr->interlaced;
385
386     if (vol_hdr->fixed_vop_rate) {
387         priv->fps_n = vol_hdr->vop_time_increment_resolution;
388         priv->fps_d = vol_hdr->fixed_vop_time_increment;
389         gst_vaapi_decoder_set_framerate(base_decoder, priv->fps_n, priv->fps_d);    
390     }
391
392     gst_vaapi_decoder_set_pixel_aspect_ratio(base_decoder, priv->vol_hdr.par_width, priv->vol_hdr.par_height);
393     gst_vaapi_decoder_set_picture_size(base_decoder, priv->width, priv->height);
394
395     return GST_VAAPI_DECODER_STATUS_SUCCESS;
396 }
397
398 static GstVaapiDecoderStatus
399 decode_gop(GstVaapiDecoderMpeg4 *decoder, const guint8 *buf, guint buf_size)
400 {
401     GstVaapiDecoderMpeg4Private * const priv = decoder->priv;
402     GstMpeg4GroupOfVOP gop;
403     GstClockTime gop_time;
404
405     if (buf_size >4) {
406         if (gst_mpeg4_parse_group_of_vop(&gop, buf, buf_size) != GST_MPEG4_PARSER_OK) {
407         GST_DEBUG("failed to parse GOP");
408         return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
409         }
410     }
411     else {
412         gop.closed          = 1;
413         gop.broken_link     = 0;
414         gop.hours           = 0;
415         gop.minutes         = 0;
416         gop.seconds         = 0;
417     }
418
419     priv->closed_gop  = gop.closed;
420     priv->broken_link = gop.broken_link;
421
422     GST_DEBUG("GOP %02u:%02u:%02u (closed_gop %d, broken_link %d)",
423               gop.hours, gop.minutes, gop.seconds,
424               priv->closed_gop, priv->broken_link);
425
426     gop_time             = gop.hours * 3600 + gop.minutes * 60 + gop.seconds;
427     priv->last_sync_time = gop_time;
428     priv->sync_time      = gop_time;
429     
430     if (priv->gop_pts != GST_CLOCK_TIME_NONE)
431         priv->pts_diff += gop_time * GST_SECOND - priv->gop_pts;
432     priv->gop_pts = gop_time * GST_SECOND;
433     priv->calculate_pts_diff = TRUE;
434     priv->is_first_field = TRUE;
435
436     return GST_VAAPI_DECODER_STATUS_SUCCESS;
437 }
438
439 void
440 calculate_pts_diff(GstVaapiDecoderMpeg4 *decoder,
441                       GstMpeg4VideoObjectLayer *vol_hdr,
442                       GstMpeg4VideoObjectPlane *vop_hdr)
443 {
444     GstVaapiDecoderMpeg4Private * const priv = decoder->priv;
445     GstClockTime frame_timestamp;
446
447     frame_timestamp = gst_adapter_prev_timestamp(priv->adapter, NULL);
448     if (frame_timestamp && frame_timestamp != GST_CLOCK_TIME_NONE) {
449         /* Buffer with timestamp */
450         if (priv->max_pts != GST_CLOCK_TIME_NONE &&
451             frame_timestamp < priv->max_pts) {
452             frame_timestamp = priv->max_pts +
453                 gst_util_uint64_scale((vol_hdr->fixed_vop_rate ?
454                                        vol_hdr->fixed_vop_time_increment : 1),
455                                       GST_SECOND,
456                                       vol_hdr->vop_time_increment_resolution);
457         }
458     } else {
459         /* Buffer without timestamp set */
460         if (priv->max_pts == GST_CLOCK_TIME_NONE) /* first buffer */
461             frame_timestamp = 0;
462         else {
463             GstClockTime tmp_pts;
464             tmp_pts = priv->pts_diff + priv->gop_pts +
465                 vop_hdr->modulo_time_base * GST_SECOND +
466                 gst_util_uint64_scale(vop_hdr->time_increment,
467                                       GST_SECOND,
468                                       vol_hdr->vop_time_increment_resolution);
469             if (tmp_pts > priv->max_pts)
470                 frame_timestamp = tmp_pts;
471             else
472                 frame_timestamp = priv->max_pts +
473                     gst_util_uint64_scale((vol_hdr->fixed_vop_rate ?
474                                            vol_hdr->fixed_vop_time_increment : 1),
475                                            GST_SECOND,
476                                           vol_hdr->vop_time_increment_resolution);
477         }
478     }
479
480     priv->pts_diff = frame_timestamp -
481         (priv->gop_pts + vop_hdr->modulo_time_base * GST_SECOND +
482          gst_util_uint64_scale(vop_hdr->time_increment, GST_SECOND,
483                                vol_hdr->vop_time_increment_resolution));
484 }
485  
486 static GstVaapiDecoderStatus
487 decode_picture(GstVaapiDecoderMpeg4 *decoder, const guint8 *buf, guint buf_size)
488 {
489     GstMpeg4ParseResult parser_result = GST_MPEG4_PARSER_OK;
490     GstVaapiDecoderMpeg4Private * const priv = decoder->priv;
491     GstMpeg4VideoObjectPlane * const vop_hdr = &priv->vop_hdr;
492     GstMpeg4VideoObjectLayer * const vol_hdr = &priv->vol_hdr;
493     GstMpeg4SpriteTrajectory * const sprite_trajectory = &priv->sprite_trajectory;
494     GstVaapiPicture *picture;
495     GstVaapiDecoderStatus status;
496     GstClockTime pts;
497
498     // context depends on priv->width and priv->height, so we move parse_vop a little earlier
499     if (priv->is_svh) {
500         parser_result = gst_mpeg4_parse_video_plane_short_header(&priv->svh_hdr, buf, buf_size);
501
502     }
503     else {
504         parser_result = gst_mpeg4_parse_video_object_plane(vop_hdr, sprite_trajectory, vol_hdr, buf, buf_size);
505         /* Need to skip this frame if VOP was not coded */
506         if (GST_MPEG4_PARSER_OK == parser_result && !vop_hdr->coded)
507             return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
508     }
509
510     if (parser_result != GST_MPEG4_PARSER_OK) {
511         GST_DEBUG("failed to parse picture header");
512         return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
513     }
514
515     if (priv->is_svh) {
516         priv->width = priv->svh_hdr.vop_width;
517         priv->height = priv->svh_hdr.vop_height;
518     }
519     else {
520         if (!vop_hdr->width && !vop_hdr->height) {
521             vop_hdr->width = vol_hdr->width;
522             vop_hdr->height = vol_hdr->height;
523         }
524         priv->width = vop_hdr->width;
525         priv->height = vop_hdr->height;
526     }
527
528     status = ensure_context(decoder);
529     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
530         GST_DEBUG("failed to reset context");
531         return status;
532     }
533
534     if (priv->curr_picture) {
535         status = decode_current_picture(decoder);
536         if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
537             return status;
538     }
539
540     priv->curr_picture = GST_VAAPI_PICTURE_NEW(MPEG4, decoder);
541     if (!priv->curr_picture) {
542         GST_DEBUG("failed to allocate picture");
543         return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
544     }
545     picture = priv->curr_picture;
546
547     status = ensure_quant_matrix(decoder, picture);
548     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
549         GST_DEBUG("failed to reset quantizer matrix");
550         return status;
551     }
552
553     /* 7.6.7 Temporal prediction structure
554      * forward reference frame     B B B B B B      backward reference frame
555      *            |                                              |
556      *  nearest I/P/S in the past with vop_coded ==1             |
557      *                         nearest I/P/S in the future with any vop_coded
558      * fixme, it said that B frame shouldn't use backward reference frame 
559      *        when backward reference frame coded is 0
560      */
561     if (priv->is_svh) {
562         priv->coding_type = priv->svh_hdr.picture_coding_type;
563     }
564     else {
565         priv->coding_type = priv->vop_hdr.coding_type;
566     }
567     switch (priv->coding_type) {
568     case GST_MPEG4_I_VOP:
569         picture->type = GST_VAAPI_PICTURE_TYPE_I;
570         if (priv->is_svh || vop_hdr->coded) 
571             GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_REFERENCE);
572         break;
573     case GST_MPEG4_P_VOP:
574         picture->type = GST_VAAPI_PICTURE_TYPE_P;
575         if (priv->is_svh || vop_hdr->coded) 
576             GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_REFERENCE);
577         break;
578     case GST_MPEG4_B_VOP:
579         picture->type = GST_VAAPI_PICTURE_TYPE_B;
580         break;
581     case GST_MPEG4_S_VOP:
582         picture->type = GST_VAAPI_PICTURE_TYPE_S;
583         // see 3.175 reference VOP
584         if (vop_hdr->coded) 
585             GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_REFERENCE);
586         break;
587     default:
588         GST_DEBUG("unsupported picture type %d", priv->coding_type);
589         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
590     }
591
592     if (!priv->is_svh && !vop_hdr->coded) {
593         status = render_picture(decoder, priv->prev_picture);
594         return status;
595     }
596
597     if (priv->is_svh) {
598         guint temp_ref = priv->svh_hdr.temporal_reference;
599         if (temp_ref < priv->prev_t_ref) {
600             temp_ref += 256;
601         }
602         guint delta_ref = temp_ref - priv->prev_t_ref;
603
604         pts = priv->sync_time;
605         // see temporal_reference definition in spec, 30000/1001Hz
606         pts += gst_util_uint64_scale(delta_ref, GST_SECOND*1001, 30000);
607         priv->sync_time = pts;
608         priv->prev_t_ref = priv->svh_hdr.temporal_reference;
609     }
610     else {
611         /* Update priv->pts_diff */
612         if (priv->calculate_pts_diff) {
613             calculate_pts_diff(decoder, vol_hdr, vop_hdr);
614             priv->calculate_pts_diff = FALSE;
615         }
616
617         /* Update presentation time, 6.3.5 */
618         if(vop_hdr->coding_type != GST_MPEG4_B_VOP) {
619             // increment basing on decoding order
620             priv->last_sync_time = priv->sync_time;
621             priv->sync_time = priv->last_sync_time + vop_hdr->modulo_time_base;
622             pts = priv->sync_time * GST_SECOND;
623             pts += gst_util_uint64_scale(vop_hdr->time_increment, GST_SECOND, vol_hdr->vop_time_increment_resolution);
624             priv->last_non_b_scale_time = priv->non_b_scale_time;
625             priv->non_b_scale_time = priv->sync_time * vol_hdr->vop_time_increment_resolution + vop_hdr->time_increment;
626             priv->trd  = priv->non_b_scale_time - priv->last_non_b_scale_time;
627         }
628         else {
629             // increment basing on display oder
630             pts = (priv->last_sync_time + vop_hdr->modulo_time_base) * GST_SECOND;
631             pts += gst_util_uint64_scale(vop_hdr->time_increment, GST_SECOND, vol_hdr->vop_time_increment_resolution);
632             priv->trb = (priv->last_sync_time + vop_hdr->modulo_time_base) * vol_hdr->vop_time_increment_resolution +
633                 vop_hdr->time_increment - priv->last_non_b_scale_time;
634         }
635     }
636     picture->pts = pts + priv->pts_diff;
637     if (priv->max_pts == GST_CLOCK_TIME_NONE || priv->max_pts < picture->pts)
638         priv->max_pts = picture->pts;
639
640     /* Update reference pictures */
641     /* XXX: consider priv->vol_hdr.low_delay, consider packed video frames for DivX/XviD */
642     if (GST_VAAPI_PICTURE_IS_REFERENCE(picture)) {
643         if (priv->next_picture)
644             status = render_picture(decoder, priv->next_picture);
645         gst_vaapi_picture_replace(&priv->prev_picture, priv->next_picture);
646         gst_vaapi_picture_replace(&priv->next_picture, picture);
647     }
648     return status;
649 }
650
651 static inline guint
652 get_vop_coding_type(GstVaapiPicture *picture)
653 {
654     return picture->type - GST_VAAPI_PICTURE_TYPE_I;
655 }
656
657 static gboolean
658 fill_picture(GstVaapiDecoderMpeg4 *decoder, GstVaapiPicture *picture)
659 {
660     GstVaapiDecoderMpeg4Private * const priv = decoder->priv;
661     VAPictureParameterBufferMPEG4 * const pic_param = picture->param;
662     GstMpeg4VideoObjectPlane * const vop_hdr = &priv->vop_hdr;
663
664     /* Fill in VAPictureParameterBufferMPEG4 */
665     pic_param->forward_reference_picture                        = VA_INVALID_ID;
666     pic_param->backward_reference_picture                       = VA_INVALID_ID;
667
668     pic_param->vol_fields.value                                 = 0;
669     pic_param->vop_fields.value                                 = 0;
670     if(priv->is_svh) {
671         // vol_hdr Parameters
672         pic_param->vol_fields.bits.short_video_header           = 1; 
673         // does the following vol_hdr parameters matter for short video header?
674         pic_param->vol_fields.bits.chroma_format                = 1; // I420, see table 6-15. 
675         pic_param->vol_fields.bits.interlaced                   = 0; 
676         pic_param->vol_fields.bits.obmc_disable                 = 1;
677         pic_param->vol_fields.bits.sprite_enable                = 0;
678         pic_param->vol_fields.bits.sprite_warping_accuracy      = 0;
679         pic_param->vol_fields.bits.quant_type                   = 0; //method 1; $7.4.4
680         pic_param->vol_fields.bits.quarter_sample               = 0; 
681         pic_param->vol_fields.bits.data_partitioned             = 0; 
682         pic_param->vol_fields.bits.reversible_vlc               = 0; 
683         pic_param->vol_fields.bits.resync_marker_disable        = 1; 
684         pic_param->no_of_sprite_warping_points                  = 0; 
685         pic_param->quant_precision                              = 5;
686         // VOP parameters    
687         pic_param->vop_width                                    = priv->svh_hdr.vop_width;
688         pic_param->vop_height                                   = priv->svh_hdr.vop_height;
689         pic_param->vop_fields.bits.vop_coding_type              = priv->svh_hdr.picture_coding_type;
690         pic_param->vop_time_increment_resolution                = priv->vol_hdr.vop_time_increment_resolution; 
691         
692         pic_param->num_gobs_in_vop                              = priv->svh_hdr.num_gobs_in_vop;
693         pic_param->num_macroblocks_in_gob                       = priv->svh_hdr.num_macroblocks_in_gob;
694     }
695     else {
696         // VOL parameters
697         pic_param->vol_fields.bits.short_video_header           = 0; 
698         pic_param->vol_fields.bits.chroma_format                = priv->vol_hdr.chroma_format;
699         pic_param->vol_fields.bits.interlaced                   = priv->vol_hdr.interlaced;
700         pic_param->vol_fields.bits.obmc_disable                 = priv->vol_hdr.obmc_disable;
701         pic_param->vol_fields.bits.sprite_enable                = priv->vol_hdr.sprite_enable;
702         pic_param->vol_fields.bits.sprite_warping_accuracy      = priv->vol_hdr.sprite_warping_accuracy; 
703         pic_param->vol_fields.bits.quant_type                   = priv->vol_hdr.quant_type;
704         pic_param->vol_fields.bits.quarter_sample               = priv->vol_hdr.quarter_sample;
705         pic_param->vol_fields.bits.data_partitioned             = priv->vol_hdr.data_partitioned;
706         pic_param->vol_fields.bits.reversible_vlc               = priv->vol_hdr.reversible_vlc;
707         pic_param->vol_fields.bits.resync_marker_disable        = priv->vol_hdr.resync_marker_disable;
708         pic_param->no_of_sprite_warping_points                  = priv->vol_hdr.no_of_sprite_warping_points;
709         int i =0;
710         for (i=0; i<3 && i<priv->vol_hdr.no_of_sprite_warping_points ; i++) {
711             pic_param->sprite_trajectory_du[i]                  = priv->sprite_trajectory.vop_ref_points[i];
712             pic_param->sprite_trajectory_dv[i]                  = priv->sprite_trajectory.sprite_ref_points[i];
713         }
714         pic_param->quant_precision                              = priv->vol_hdr.quant_precision;
715         
716         // VOP parameters    
717         pic_param->vop_width                                    = vop_hdr->width;
718         pic_param->vop_height                                   = vop_hdr->height;
719         pic_param->vop_fields.bits.vop_coding_type              = vop_hdr->coding_type;
720         pic_param->vop_fields.bits.vop_rounding_type            = vop_hdr->rounding_type;
721         pic_param->vop_fields.bits.intra_dc_vlc_thr             = vop_hdr->intra_dc_vlc_thr;
722         pic_param->vop_fields.bits.top_field_first              = vop_hdr->top_field_first;
723         pic_param->vop_fields.bits.alternate_vertical_scan_flag = vop_hdr->alternate_vertical_scan_flag;
724
725         pic_param->vop_fcode_forward                            = vop_hdr->fcode_forward;
726         pic_param->vop_fcode_backward                           = vop_hdr->fcode_backward;
727         pic_param->vop_time_increment_resolution                = priv->vol_hdr.vop_time_increment_resolution;
728     }    
729
730     pic_param->TRB = 0;
731     pic_param->TRD = 0;
732     switch (priv->coding_type) {
733     case GST_MPEG4_B_VOP:
734         pic_param->TRB                                          = priv->trb;
735         pic_param->backward_reference_picture                   = priv->next_picture->surface_id;
736         pic_param->vop_fields.bits.backward_reference_vop_coding_type = get_vop_coding_type(priv->next_picture);
737         // fall-through
738     case GST_MPEG4_P_VOP:
739         pic_param->TRD                                          = priv->trd;
740         if (priv->prev_picture)
741             pic_param->forward_reference_picture                = priv->prev_picture->surface_id;
742         break;
743     }
744
745     if (priv->vol_hdr.interlaced) {
746         priv->is_first_field ^= 1;
747     }
748     return TRUE;
749 }
750
751 static GstVaapiDecoderStatus
752 decode_slice(
753     GstVaapiDecoderMpeg4 *decoder,
754     const guint8          *buf,
755     guint                 buf_size,
756     gboolean              has_packet_header
757 )
758 {
759     GstVaapiDecoderMpeg4Private * const priv = decoder->priv;
760     GstVaapiPicture * const picture = priv->curr_picture;
761     GstVaapiSlice *slice;
762     VASliceParameterBufferMPEG4 *slice_param;
763
764     GST_DEBUG("decoder silce: %p, %u bytes)", buf, buf_size);
765
766     // has_packet_header is ture for the 2+ slice
767     if (!has_packet_header && !fill_picture(decoder, picture))
768         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
769
770     slice = GST_VAAPI_SLICE_NEW(MPEG4, decoder, buf, buf_size);
771     if (!slice) {
772         GST_DEBUG("failed to allocate slice");
773         return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
774     }
775     gst_vaapi_picture_add_slice(picture, slice);
776
777     /* Fill in VASliceParameterBufferMPEG4 */
778     slice_param = slice->param;
779     if (priv->is_svh) {
780         slice_param->macroblock_offset         = (priv->svh_hdr.size)%8;
781         slice_param->macroblock_number         = 0; 
782         // the header of first gob_layer is empty (gob_header_empty=1), use vop_quant
783         slice_param->quant_scale               = priv->svh_hdr.vop_quant; 
784     }
785     else {
786         if (has_packet_header) {
787             slice_param->macroblock_offset     = priv->packet_hdr.size % 8;
788             slice_param->macroblock_number     = priv->packet_hdr.macroblock_number;
789             slice_param->quant_scale           = priv->packet_hdr.quant_scale;
790        }    
791         else {
792             slice_param->macroblock_offset     = priv->vop_hdr.size % 8;
793             slice_param->macroblock_number     = 0;
794             slice_param->quant_scale           = priv->vop_hdr.quant;
795         }
796     }
797     return GST_VAAPI_DECODER_STATUS_SUCCESS;
798 }
799
800 static GstVaapiDecoderStatus
801 decode_packet(GstVaapiDecoderMpeg4 *decoder, GstMpeg4Packet packet)
802 {
803     GstVaapiDecoderMpeg4Private * const priv = decoder->priv;
804     GstMpeg4Packet *tos = &packet;
805     GstVaapiDecoderStatus status;
806
807     if (tos->size < 0)
808         return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
809
810     status = gst_vaapi_decoder_check_status(GST_VAAPI_DECODER(decoder));
811     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
812         return status;
813
814     // packet.size is the size from current marker to the next.
815     if (tos->type == GST_MPEG4_VISUAL_OBJ_SEQ_START) {
816         status = decode_sequence(decoder, packet.data + packet.offset, packet.size);
817     }
818     else if (tos->type == GST_MPEG4_VISUAL_OBJ_SEQ_END) {
819         status = decode_sequence_end(decoder);
820     }
821     else if (tos->type == GST_MPEG4_VISUAL_OBJ) {
822         status = decode_visual_object(decoder, packet.data + packet.offset, packet.size);
823     }
824     else if (tos->type >= GST_MPEG4_VIDEO_OBJ_FIRST && tos->type <= GST_MPEG4_VIDEO_OBJ_LAST) {
825         GST_WARNING("unexpected marker: (GST_MPEG4_VIDEO_OBJ_FIRST, GST_MPEG4_VIDEO_OBJ_LAST)");
826         status = GST_VAAPI_DECODER_STATUS_SUCCESS;
827     }
828     else if (tos->type >= GST_MPEG4_VIDEO_LAYER_FIRST && tos->type <= GST_MPEG4_VIDEO_LAYER_LAST) {
829         status = decode_video_object_layer(decoder, packet.data + packet.offset, packet.size);
830     }
831     else if (tos->type == GST_MPEG4_GROUP_OF_VOP) {
832         status = decode_gop(decoder, packet.data + packet.offset, packet.size);
833     }
834     else if (tos->type == GST_MPEG4_VIDEO_OBJ_PLANE) {
835         status = decode_picture(decoder, packet.data + packet.offset, packet.size);
836         if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
837             return status;
838
839         /* decode slice
840          * A resync marker shall only be located immediately before a macroblock 
841          * (or video packet header if exists) and aligned with a byte
842          * either start_code or resync_marker are scaned/measured by byte, 
843          * while the header itself are parsed/measured in bit
844          * it means: resync_marker(video_packet_header) start from byte boundary, 
845          * while MB doesn't start from byte boundary -- it is what 'macroblock_offset' 
846          * in slice refer to
847          */
848         const guint8 *_data = packet.data + packet.offset + priv->vop_hdr.size/8; 
849         gint  _data_size = packet.size - (priv->vop_hdr.size/8); 
850         GstMpeg4Packet video_packet;
851         
852         if (priv->vol_hdr.resync_marker_disable) {
853             status = decode_slice(decoder, _data, _data_size, FALSE);
854             if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
855                 return status;
856         }
857         else {
858             // next start_code is required to determine the end of last slice
859             _data_size += 4;
860             GstMpeg4ParseResult ret = GST_MPEG4_PARSER_OK;
861
862             gboolean first_slice = TRUE;
863             while (_data_size > 0) {
864                 // we can skip user data here
865                 ret = gst_mpeg4_parse(&video_packet, TRUE, &priv->vop_hdr, _data, 0,  _data_size);
866                 if(ret != GST_MPEG4_PARSER_OK) {
867                     break;
868                 }
869
870                 if (first_slice) {
871                     status = decode_slice(decoder, _data, video_packet.size, FALSE);
872                     first_slice = FALSE;
873                 }
874                 else {
875                     _data += video_packet.offset;
876                     _data_size -= video_packet.offset;
877
878                     ret = gst_mpeg4_parse_video_packet_header (&priv->packet_hdr, &priv->vol_hdr, &priv->vop_hdr, &priv->sprite_trajectory, _data, _data_size);
879                     status = decode_slice(decoder,_data + priv->packet_hdr.size/8, video_packet.size - priv->packet_hdr.size/8, TRUE); 
880                 }
881
882                 _data += video_packet.size;
883                 _data_size -= video_packet.size;
884             }
885         }
886         status = decode_current_picture(decoder);
887     }
888     else if (tos->type == GST_MPEG4_USER_DATA
889           || tos->type == GST_MPEG4_VIDEO_SESSION_ERR 
890           || tos->type == GST_MPEG4_FBA 
891           || tos->type == GST_MPEG4_FBA_PLAN 
892           || tos->type == GST_MPEG4_MESH 
893           || tos->type == GST_MPEG4_MESH_PLAN 
894           || tos->type == GST_MPEG4_STILL_TEXTURE_OBJ 
895           || tos->type == GST_MPEG4_TEXTURE_SPATIAL 
896           || tos->type == GST_MPEG4_TEXTURE_SNR_LAYER 
897           || tos->type == GST_MPEG4_TEXTURE_TILE 
898           || tos->type == GST_MPEG4_SHAPE_LAYER 
899           || tos->type == GST_MPEG4_STUFFING 
900           || tos->type == GST_MPEG4_SYSTEM_FIRST 
901           || tos->type == GST_MPEG4_SYSTEM_LAST) {
902         GST_WARNING("Ignore marker: %x\n", tos->type);
903         status = GST_VAAPI_DECODER_STATUS_SUCCESS;
904     }
905     
906     return status;
907 }
908
909 static GstVaapiDecoderStatus
910 decode_buffer(GstVaapiDecoderMpeg4 *decoder, GstBuffer *buffer)
911 {
912     GstVaapiDecoderMpeg4Private * const priv = decoder->priv;
913     GstVaapiDecoderStatus status = GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
914     guchar *buf;
915     guint pos, buf_size;
916
917     buf      = GST_BUFFER_DATA(buffer);
918     buf_size = GST_BUFFER_SIZE(buffer);
919
920     // visual object sequence end
921     if (!buf && buf_size == 0)
922         return decode_sequence_end(decoder);
923
924     gst_buffer_ref(buffer);
925     gst_adapter_push(priv->adapter, buffer);
926
927     if (priv->sub_buffer) {
928         buffer = gst_buffer_merge(priv->sub_buffer, buffer);
929         if (!buffer)
930             return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
931         gst_buffer_unref(priv->sub_buffer);
932         priv->sub_buffer = NULL;
933     }
934
935     buf      = GST_BUFFER_DATA(buffer);
936     buf_size = GST_BUFFER_SIZE(buffer);
937     pos = 0;
938
939     GstMpeg4Packet packet;
940     GstMpeg4ParseResult result = GST_MPEG4_PARSER_OK;
941     guint consumed_size = 0;
942
943     if (priv->is_svh) {
944         while (result == GST_MPEG4_PARSER_OK && pos < buf_size) {
945             result = gst_h263_parse (&packet,buf, pos, buf_size);
946             if (result != GST_MPEG4_PARSER_OK) {
947                 break;
948             }
949             status = decode_picture(decoder, packet.data+packet.offset, packet.size);
950             if (GST_VAAPI_DECODER_STATUS_SUCCESS == status) {
951                 // MBs are not byte aligned, so we set the start address with byte aligned 
952                 // and mb offset with (priv->svh_hdr.size)%8
953                 status = decode_slice(decoder, packet.data+packet.offset+(priv->svh_hdr.size)/8, 
954                         packet.size - (priv->svh_hdr.size)/8, FALSE);
955                 status = decode_current_picture(decoder);
956
957                 consumed_size = packet.offset + packet.size; 
958                 pos += consumed_size; 
959                 if (gst_adapter_available(priv->adapter) >= consumed_size)
960                     gst_adapter_flush(priv->adapter, consumed_size);
961             }
962             else {
963                 GST_WARNING("decode h263 packet failed\n");
964                 break;
965             }
966         }
967     }
968     else {
969         while (pos < buf_size) {
970             // don't skip user data, we need the size to pop tsb buffer
971             result = gst_mpeg4_parse(&packet, FALSE, NULL, buf, pos, buf_size);
972             if (result != GST_MPEG4_PARSER_OK) {
973                 break;
974             }
975             status = decode_packet(decoder, packet);
976             if (GST_VAAPI_DECODER_STATUS_SUCCESS == status ||
977                 GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA == status) {
978                 consumed_size = packet.offset + packet.size - pos; 
979                 pos = packet.offset + packet.size; 
980                 if (gst_adapter_available(priv->adapter) >= consumed_size)
981                     gst_adapter_flush(priv->adapter, consumed_size);
982             }
983             else {
984                 GST_WARNING("decode mp4 packet failed\n");
985                 break;
986             }
987         }
988     }
989
990     if ((result == GST_MPEG4_PARSER_NO_PACKET ||
991          result == GST_MPEG4_PARSER_NO_PACKET_END ||
992          status == GST_VAAPI_DECODER_STATUS_ERROR_NO_SURFACE) &&
993         pos < buf_size) {
994         priv->sub_buffer = gst_buffer_create_sub(buffer, pos, buf_size-pos);
995         status = GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
996     }
997     return status;
998 }
999
1000 static GstVaapiDecoderStatus
1001 decode_codec_data(GstVaapiDecoderMpeg4 *decoder, GstBuffer *buffer)
1002 {
1003     GstVaapiDecoderStatus status;
1004     guchar *buf, *_buf;
1005     guint pos, buf_size, _buf_size;
1006
1007     _buf      = GST_BUFFER_DATA(buffer);
1008     _buf_size = GST_BUFFER_SIZE(buffer);
1009     // add additional 0x000001b2 to enclose the last header
1010     buf_size = _buf_size + 4;
1011     buf = malloc(buf_size);
1012     memcpy(buf, _buf, buf_size);
1013     buf[buf_size-4] = 0;
1014     buf[buf_size-3] = 0;
1015     buf[buf_size-2] = 1;
1016     buf[buf_size-1] = 0xb2;
1017
1018     pos = 0;
1019     GstMpeg4Packet packet;
1020     GstMpeg4ParseResult result = GST_MPEG4_PARSER_OK;
1021
1022     while (result == GST_MPEG4_PARSER_OK && pos < buf_size) {
1023         result = gst_mpeg4_parse(&packet, FALSE, NULL, buf, pos, buf_size);
1024         if (result != GST_MPEG4_PARSER_OK) {
1025             break;
1026         }
1027         status = decode_packet(decoder, packet);
1028         if (GST_VAAPI_DECODER_STATUS_SUCCESS == status) {
1029             pos = packet.offset + packet.size; 
1030         }
1031         else {
1032             GST_WARNING("decode mp4 packet failed when decoding codec data\n");
1033             break;
1034         }
1035     }
1036     free(buf);
1037     return status;
1038 }
1039
1040 GstVaapiDecoderStatus
1041 gst_vaapi_decoder_mpeg4_decode(GstVaapiDecoder *base, GstBuffer *buffer)
1042 {
1043     GstVaapiDecoderMpeg4 * const decoder = GST_VAAPI_DECODER_MPEG4(base);
1044     GstVaapiDecoderMpeg4Private * const priv = decoder->priv;
1045     GstBuffer *codec_data = NULL;
1046     GstVaapiDecoderStatus status;
1047
1048     g_return_val_if_fail(priv->is_constructed,
1049                          GST_VAAPI_DECODER_STATUS_ERROR_INIT_FAILED);
1050
1051     if (!priv->is_opened) {
1052         priv->is_opened = gst_vaapi_decoder_mpeg4_open(decoder, buffer);
1053         if (!priv->is_opened)
1054             return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CODEC;
1055
1056         codec_data = GST_VAAPI_DECODER_CODEC_DATA(decoder);
1057         if (codec_data) {
1058             status = decode_codec_data(decoder, codec_data);
1059             if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
1060                 return status;
1061         }
1062     }
1063     return decode_buffer(decoder, buffer);
1064 }
1065
1066 static void
1067 gst_vaapi_decoder_mpeg4_finalize(GObject *object)
1068 {
1069     GstVaapiDecoderMpeg4 * const decoder = GST_VAAPI_DECODER_MPEG4(object);
1070
1071     gst_vaapi_decoder_mpeg4_destroy(decoder);
1072
1073     G_OBJECT_CLASS(gst_vaapi_decoder_mpeg4_parent_class)->finalize(object);
1074 }
1075
1076 static void
1077 gst_vaapi_decoder_mpeg4_constructed(GObject *object)
1078 {
1079     GstVaapiDecoderMpeg4 * const decoder = GST_VAAPI_DECODER_MPEG4(object);
1080     GstVaapiDecoderMpeg4Private * const priv = decoder->priv;
1081     GObjectClass *parent_class;
1082
1083     parent_class = G_OBJECT_CLASS(gst_vaapi_decoder_mpeg4_parent_class);
1084     if (parent_class->constructed)
1085         parent_class->constructed(object);
1086
1087     priv->is_constructed = gst_vaapi_decoder_mpeg4_create(decoder);
1088 }
1089
1090 static void
1091 gst_vaapi_decoder_mpeg4_class_init(GstVaapiDecoderMpeg4Class *klass)
1092 {
1093     GObjectClass * const object_class = G_OBJECT_CLASS(klass);
1094     GstVaapiDecoderClass * const decoder_class = GST_VAAPI_DECODER_CLASS(klass);
1095
1096     g_type_class_add_private(klass, sizeof(GstVaapiDecoderMpeg4Private));
1097
1098     object_class->finalize      = gst_vaapi_decoder_mpeg4_finalize;
1099     object_class->constructed   = gst_vaapi_decoder_mpeg4_constructed;
1100
1101     decoder_class->decode       = gst_vaapi_decoder_mpeg4_decode;
1102 }
1103
1104 static void
1105 gst_vaapi_decoder_mpeg4_init(GstVaapiDecoderMpeg4 *decoder)
1106 {
1107     GstVaapiDecoderMpeg4Private *priv;
1108
1109     priv                        = GST_VAAPI_DECODER_MPEG4_GET_PRIVATE(decoder);
1110     decoder->priv               = priv;
1111     priv->width                 = 0;
1112     priv->height                = 0;
1113     priv->fps_n                 = 0;
1114     priv->fps_d                 = 0;
1115     priv->profile               = GST_VAAPI_PROFILE_MPEG4_SIMPLE;
1116     priv->curr_picture          = NULL;
1117     priv->next_picture          = NULL;
1118     priv->prev_picture          = NULL;
1119     priv->adapter               = NULL;
1120     priv->sub_buffer            = NULL;
1121     priv->seq_pts               = GST_CLOCK_TIME_NONE;
1122     priv->gop_pts               = GST_CLOCK_TIME_NONE;
1123     priv->max_pts               = GST_CLOCK_TIME_NONE;
1124     priv->pts_diff              = 0;
1125     priv->calculate_pts_diff    = TRUE;
1126     priv->is_constructed        = FALSE;
1127     priv->is_opened             = FALSE;
1128     priv->is_first_field        = FALSE;
1129     priv->size_changed          = TRUE;
1130     priv->profile_changed       = TRUE;
1131     priv->progressive_sequence  = FALSE;
1132     priv->closed_gop            = FALSE;
1133     priv->broken_link           = FALSE;
1134     priv->last_non_b_scale_time = 0;
1135     priv->non_b_scale_time      = 0;
1136     priv->trb                   = 0;
1137     priv->trd                   = 0;
1138 }
1139
1140 /**
1141  * gst_vaapi_decoder_mpeg4_new:
1142  * @display: a #GstVaapiDisplay
1143  * @caps: a #GstCaps holding codec information
1144  *
1145  * Creates a new #GstVaapiDecoder for MPEG-2 decoding.  The @caps can
1146  * hold extra information like codec-data and pictured coded size.
1147  *
1148  * Return value: the newly allocated #GstVaapiDecoder object
1149  */
1150 GstVaapiDecoder *
1151 gst_vaapi_decoder_mpeg4_new(GstVaapiDisplay *display, GstCaps *caps)
1152 {
1153     GstVaapiDecoderMpeg4 *decoder;
1154
1155     g_return_val_if_fail(GST_VAAPI_IS_DISPLAY(display), NULL);
1156     g_return_val_if_fail(GST_IS_CAPS(caps), NULL);
1157
1158     decoder = g_object_new(
1159         GST_VAAPI_TYPE_DECODER_MPEG4,
1160         "display",      display,
1161         "caps",         caps,
1162         NULL
1163     );
1164     if (!decoder->priv->is_constructed) {
1165         g_object_unref(decoder);
1166         return NULL;
1167     }
1168     return GST_VAAPI_DECODER_CAST(decoder);
1169 }