2 * gstvaapidecoder_mpeg4.c - MPEG-4 decoder
4 * Copyright (C) 2011 Intel Corporation
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public License
8 * as published by the Free Software Foundation; either version 2.1
9 * of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free
18 * Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
19 * Boston, MA 02110-1301 USA
23 * SECTION:gstvaapidecoder_mpeg4
24 * @short_description: MPEG-4 decoder, include h263/divx/xvid support
29 #include <gst/base/gstbitreader.h>
30 #include <gst/codecparsers/gstmpeg4parser.h>
31 #include "gstvaapidecoder_mpeg4.h"
32 #include "gstvaapidecoder_priv.h"
33 #include "gstvaapidisplay_priv.h"
34 #include "gstvaapiobject_priv.h"
37 #include "gstvaapidebug.h"
39 G_DEFINE_TYPE(GstVaapiDecoderMpeg4,
40 gst_vaapi_decoder_mpeg4,
41 GST_VAAPI_TYPE_DECODER);
43 #define GST_VAAPI_DECODER_MPEG4_GET_PRIVATE(obj) \
44 (G_TYPE_INSTANCE_GET_PRIVATE((obj), \
45 GST_VAAPI_TYPE_DECODER_MPEG4, \
46 GstVaapiDecoderMpeg4Private))
48 struct _GstVaapiDecoderMpeg4Private {
49 GstVaapiProfile profile;
56 GstMpeg4VisualObjectSequence vos_hdr;
57 GstMpeg4VisualObject vo_hdr;
58 GstMpeg4VideoSignalType signal_type;
59 GstMpeg4VideoObjectLayer vol_hdr;
60 GstMpeg4VideoObjectPlane vop_hdr;
61 GstMpeg4VideoPlaneShortHdr svh_hdr;
62 GstMpeg4VideoPacketHdr packet_hdr;
63 GstMpeg4SpriteTrajectory sprite_trajectory;
64 VAIQMatrixBufferMPEG4 iq_matrix;
65 GstVaapiPicture *curr_picture;
66 // forward reference pic
67 GstVaapiPicture *next_picture;
68 // backward reference pic
69 GstVaapiPicture *prev_picture;
71 GstBuffer *sub_buffer;
74 GstClockTime pts_diff;
75 // anchor sync time base for any picture type,
76 // it is time base of backward reference frame
77 GstClockTime last_sync_time;
78 // time base for recent I/P/S frame,
79 // it is time base of forward reference frame for B frame
80 GstClockTime sync_time;
81 // temporal_reference of previous frame of svh
83 guint is_constructed : 1;
85 guint is_first_field : 1;
86 guint size_changed : 1;
87 guint profile_changed : 1;
88 guint progressive_sequence : 1;
90 guint broken_link : 1;
91 guint calculate_pts_diff : 1;
96 gst_vaapi_decoder_mpeg4_close(GstVaapiDecoderMpeg4 *decoder)
98 GstVaapiDecoder * const base_decoder = GST_VAAPI_DECODER(decoder);
99 GstVaapiDecoderMpeg4Private * const priv = decoder->priv;
101 if (priv->curr_picture) {
102 gst_vaapi_decoder_free_picture(base_decoder, priv->curr_picture);
103 priv->curr_picture = NULL;
106 if (priv->next_picture) {
107 gst_vaapi_decoder_free_picture(base_decoder, priv->next_picture);
108 priv->next_picture = NULL;
111 if (priv->prev_picture) {
112 gst_vaapi_decoder_free_picture(base_decoder, priv->prev_picture);
113 priv->prev_picture = NULL;
116 if (priv->sub_buffer) {
117 gst_buffer_unref(priv->sub_buffer);
118 priv->sub_buffer = NULL;
122 gst_adapter_clear(priv->adapter);
123 g_object_unref(priv->adapter);
124 priv->adapter = NULL;
129 gst_vaapi_decoder_mpeg4_open(GstVaapiDecoderMpeg4 *decoder, GstBuffer *buffer)
131 GstVaapiDecoder *const base_decoder = GST_VAAPI_DECODER(decoder);
132 GstVaapiDecoderMpeg4Private * const priv = decoder->priv;
133 GstCaps *caps = NULL;
134 GstStructure *structure = NULL;
136 gst_vaapi_decoder_mpeg4_close(decoder);
138 priv->adapter = gst_adapter_new();
143 caps = gst_vaapi_decoder_get_caps(base_decoder);
145 structure = gst_caps_get_structure(caps, 0);
147 if (gst_structure_has_name(structure, "video/x-h263")) {
149 priv->profile = GST_VAAPI_PROFILE_MPEG4_SIMPLE;
150 priv->prev_t_ref = -1;
158 gst_vaapi_decoder_mpeg4_destroy(GstVaapiDecoderMpeg4 *decoder)
160 gst_vaapi_decoder_mpeg4_close(decoder);
164 gst_vaapi_decoder_mpeg4_create(GstVaapiDecoderMpeg4 *decoder)
166 if (!GST_VAAPI_DECODER_CODEC(decoder))
172 copy_quant_matrix(guint8 dst[64], const guint8 src[64])
174 memcpy(dst, src, 64);
177 static GstVaapiDecoderStatus
178 ensure_context(GstVaapiDecoderMpeg4 *decoder)
180 GstVaapiDecoderMpeg4Private * const priv = decoder->priv;
181 GstVaapiProfile profiles[2];
182 GstVaapiEntrypoint entrypoint = GST_VAAPI_ENTRYPOINT_VLD;
183 guint i, n_profiles = 0;
184 gboolean reset_context = FALSE;
186 if (priv->profile_changed) {
187 GST_DEBUG("profile changed");
188 priv->profile_changed = FALSE;
189 reset_context = TRUE;
191 profiles[n_profiles++] = priv->profile;
192 if (priv->profile == GST_VAAPI_PROFILE_MPEG4_SIMPLE)
193 profiles[n_profiles++] = GST_VAAPI_PROFILE_MPEG4_ADVANCED_SIMPLE;
195 for (i = 0; i < n_profiles; i++) {
196 if (gst_vaapi_display_has_decoder(GST_VAAPI_DECODER_DISPLAY(decoder),
197 profiles[i], entrypoint))
201 return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
202 priv->profile = profiles[i];
205 if (priv->size_changed) {
206 GST_DEBUG("size changed");
207 priv->size_changed = FALSE;
208 reset_context = TRUE;
212 reset_context = gst_vaapi_decoder_ensure_context(
213 GST_VAAPI_DECODER(decoder),
216 priv->width, priv->height
219 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
221 return GST_VAAPI_DECODER_STATUS_SUCCESS;
224 static GstVaapiDecoderStatus
225 ensure_quant_matrix(GstVaapiDecoderMpeg4 *decoder, GstVaapiPicture *picture)
227 GstVaapiDecoderMpeg4Private * const priv = decoder->priv;
228 VAIQMatrixBufferMPEG4 *iq_matrix;
230 if (!priv->vol_hdr.load_intra_quant_mat && !priv->vol_hdr.load_non_intra_quant_mat) {
231 return GST_VAAPI_DECODER_STATUS_SUCCESS;
234 picture->iq_matrix = gst_vaapi_decoder_new_iq_matrix(GST_VAAPI_DECODER(decoder));
235 if (!picture->iq_matrix) {
236 GST_DEBUG("failed to allocate IQ matrix");
237 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
239 iq_matrix = picture->iq_matrix->param;
241 if (priv->vol_hdr.load_intra_quant_mat) {
242 iq_matrix->load_intra_quant_mat = 1;
243 copy_quant_matrix(iq_matrix->intra_quant_mat,
244 priv->vol_hdr.intra_quant_mat);
247 iq_matrix->load_intra_quant_mat = 0;
249 if (priv->vol_hdr.load_non_intra_quant_mat) {
250 iq_matrix->load_non_intra_quant_mat = 1;
251 copy_quant_matrix(iq_matrix->non_intra_quant_mat,
252 priv->vol_hdr.non_intra_quant_mat);
255 iq_matrix->load_non_intra_quant_mat = 0;
258 return GST_VAAPI_DECODER_STATUS_SUCCESS;
261 static inline GstVaapiDecoderStatus
262 render_picture(GstVaapiDecoderMpeg4 *decoder, GstVaapiPicture *picture)
264 GstVaapiDecoder * const base_decoder = GST_VAAPI_DECODER(decoder);
266 if (!gst_vaapi_decoder_push_surface(base_decoder,
269 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
270 return GST_VAAPI_DECODER_STATUS_SUCCESS;
273 /* decode_picture() start to decode a frame/picture
274 * decode_current_picture() finishe decoding a frame/picture
275 * (commit buffer to driver for decoding)
277 static GstVaapiDecoderStatus
278 decode_current_picture(GstVaapiDecoderMpeg4 *decoder)
280 GstVaapiDecoder * const base_decoder = GST_VAAPI_DECODER(decoder);
281 GstVaapiDecoderMpeg4Private * const priv = decoder->priv;
282 GstVaapiPicture * const picture = priv->curr_picture;
283 GstVaapiDecoderStatus status = GST_VAAPI_DECODER_STATUS_SUCCESS;
286 if (!gst_vaapi_decoder_decode_picture(base_decoder, picture))
287 status = GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
288 if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture)) {
289 if ((priv->prev_picture && priv->next_picture) ||
290 (priv->closed_gop && priv->next_picture))
291 status = render_picture(decoder, picture);
292 gst_vaapi_decoder_free_picture(base_decoder, picture);
294 priv->curr_picture = NULL;
299 static GstVaapiDecoderStatus
300 decode_sequence(GstVaapiDecoderMpeg4 *decoder, const guint8 *buf, guint buf_size)
302 GstVaapiDecoderMpeg4Private * const priv = decoder->priv;
303 GstMpeg4VisualObjectSequence * const vos_hdr = &priv->vos_hdr;
304 GstVaapiProfile profile;
306 if (gst_mpeg4_parse_visual_object_sequence(vos_hdr, buf, buf_size) != GST_MPEG4_PARSER_OK) {
307 GST_DEBUG("failed to parse sequence header");
308 return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
311 priv->level = vos_hdr->level;
312 switch (vos_hdr->profile) {
313 case GST_MPEG4_PROFILE_SIMPLE:
314 profile = GST_VAAPI_PROFILE_MPEG4_SIMPLE;
316 case GST_MPEG4_PROFILE_ADVANCED_SIMPLE:
317 profile = GST_VAAPI_PROFILE_MPEG4_ADVANCED_SIMPLE;
320 GST_DEBUG("unsupported profile %d", profile);
321 return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
323 if (priv->profile != profile) {
324 priv->profile = profile;
325 priv->profile_changed = TRUE;
327 priv->seq_pts = gst_adapter_prev_timestamp(priv->adapter, NULL);
328 priv->calculate_pts_diff = TRUE;
330 priv->size_changed = TRUE;
332 return GST_VAAPI_DECODER_STATUS_SUCCESS;
335 static GstVaapiDecoderStatus
336 decode_sequence_end(GstVaapiDecoderMpeg4 *decoder)
338 GstVaapiDecoderMpeg4Private * const priv = decoder->priv;
339 GstVaapiDecoderStatus status;
341 if (priv->curr_picture) {
342 status = decode_current_picture(decoder);
343 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
345 status = render_picture(decoder, priv->curr_picture);
346 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
350 if (priv->next_picture) {
351 status = render_picture(decoder, priv->next_picture);
352 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
355 return GST_VAAPI_DECODER_STATUS_END_OF_STREAM;
358 static GstVaapiDecoderStatus
359 decode_visual_object(GstVaapiDecoderMpeg4 *decoder, const guint8 *buf, guint buf_size)
361 GstVaapiDecoderMpeg4Private * const priv = decoder->priv;
362 GstMpeg4VisualObject * vo_hdr = &priv->vo_hdr;
363 GstMpeg4VideoSignalType * signal_type = &priv->signal_type;
365 if (gst_mpeg4_parse_visual_object (vo_hdr, signal_type, buf, buf_size) != GST_MPEG4_PARSER_OK) {
366 GST_DEBUG("failed to parse visual object");
367 return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
370 /* XXX: video_signal_type isn't used for decoding */
371 return GST_VAAPI_DECODER_STATUS_SUCCESS;
374 static GstVaapiDecoderStatus
375 decode_video_object_layer(GstVaapiDecoderMpeg4 *decoder, const guint8 *buf, guint buf_size)
377 GstVaapiDecoder * const base_decoder = GST_VAAPI_DECODER(decoder);
378 GstVaapiDecoderMpeg4Private * const priv = decoder->priv;
379 GstMpeg4VisualObject * vo_hdr = &priv->vo_hdr;
380 GstMpeg4VideoObjectLayer * vol_hdr = &priv->vol_hdr;
382 if (gst_mpeg4_parse_video_object_layer (vol_hdr, vo_hdr, buf, buf_size) != GST_MPEG4_PARSER_OK) {
383 GST_DEBUG("failed to parse video object layer");
384 return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
387 priv->width = vol_hdr->width;
388 priv->height = vol_hdr->height;
390 priv->progressive_sequence = !vol_hdr->interlaced;
392 if (vol_hdr->fixed_vop_rate) {
393 priv->fps_n = vol_hdr->vop_time_increment_resolution;
394 priv->fps_d = vol_hdr->fixed_vop_time_increment;
395 gst_vaapi_decoder_set_framerate(base_decoder, priv->fps_n, priv->fps_d);
398 gst_vaapi_decoder_set_pixel_aspect_ratio(base_decoder, priv->vol_hdr.par_width, priv->vol_hdr.par_height);
399 gst_vaapi_decoder_set_picture_size(base_decoder, priv->width, priv->height);
401 return GST_VAAPI_DECODER_STATUS_SUCCESS;
404 static GstVaapiDecoderStatus
405 decode_gop(GstVaapiDecoderMpeg4 *decoder, const guint8 *buf, guint buf_size)
407 GstVaapiDecoderMpeg4Private * const priv = decoder->priv;
408 GstMpeg4GroupOfVOP gop;
412 if (gst_mpeg4_parse_group_of_vop(&gop, buf, buf_size) != GST_MPEG4_PARSER_OK) {
413 GST_DEBUG("failed to parse GOP");
414 return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
425 priv->closed_gop = gop.closed;
426 priv->broken_link = gop.broken_link;
428 GST_DEBUG("GOP %02u:%02u:%02u (closed_gop %d, broken_link %d)",
429 gop.hours, gop.minutes, gop.seconds,
430 priv->closed_gop, priv->broken_link);
432 pts = GST_SECOND * (gop.hours * 3600 + gop.minutes * 60 + gop.seconds);
434 priv->last_sync_time = priv->gop_pts;
435 priv->sync_time= priv->gop_pts;
437 if (priv->calculate_pts_diff) {
438 priv->pts_diff = priv->seq_pts - priv->gop_pts;
439 priv->calculate_pts_diff = FALSE;
442 priv->is_first_field = TRUE;
444 return GST_VAAPI_DECODER_STATUS_SUCCESS;
447 static GstVaapiDecoderStatus
448 decode_picture(GstVaapiDecoderMpeg4 *decoder, const guint8 *buf, guint buf_size)
450 GstMpeg4ParseResult parser_result = GST_MPEG4_PARSER_OK;
451 GstVaapiDecoder * const base_decoder = GST_VAAPI_DECODER(decoder);
452 GstVaapiDecoderMpeg4Private * const priv = decoder->priv;
453 GstMpeg4VideoObjectPlane * const vop_hdr = &priv->vop_hdr;
454 GstMpeg4VideoObjectLayer * const vol_hdr = &priv->vol_hdr;
455 GstMpeg4SpriteTrajectory * const sprite_trajectory = &priv->sprite_trajectory;
456 GstVaapiPicture *picture;
457 GstVaapiDecoderStatus status;
460 // context depends on priv->width and priv->height, so we move parse_vop a little earlier
462 parser_result = gst_mpeg4_parse_video_plane_short_header(&priv->svh_hdr, buf, buf_size);
466 parser_result = gst_mpeg4_parse_video_object_plane(vop_hdr, sprite_trajectory, vol_hdr, buf, buf_size);
469 if (parser_result != GST_MPEG4_PARSER_OK) {
470 GST_DEBUG("failed to parse picture header");
471 return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
475 priv->width = priv->svh_hdr.vop_width;
476 priv->height = priv->svh_hdr.vop_height;
479 if (!vop_hdr->width && !vop_hdr->height) {
480 vop_hdr->width = vol_hdr->width;
481 vop_hdr->height = vol_hdr->height;
483 priv->width = vop_hdr->width;
484 priv->height = vop_hdr->height;
487 status = ensure_context(decoder);
488 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
489 GST_DEBUG("failed to reset context");
493 if (priv->curr_picture) {
494 status = decode_current_picture(decoder);
495 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
499 priv->curr_picture = gst_vaapi_decoder_new_picture(base_decoder);
500 if (!priv->curr_picture) {
501 GST_DEBUG("failed to allocate picture");
502 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
504 picture = priv->curr_picture;
506 status = ensure_quant_matrix(decoder, picture);
507 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
508 GST_DEBUG("failed to reset quantizer matrix");
512 /* 7.6.7 Temporal prediction structure
513 * forward reference frame B B B B B B backward reference frame
515 * nearest I/P/S in the past with vop_coded ==1 |
516 * nearest I/P/S in the future with any vop_coded
517 * fixme, it said that B frame shouldn't use backward reference frame
518 * when backward reference frame coded is 0
521 priv->coding_type = priv->svh_hdr.picture_coding_type;
524 priv->coding_type = priv->vop_hdr.coding_type;
526 switch (priv->coding_type) {
527 case GST_MPEG4_I_VOP:
528 picture->type = GST_VAAPI_PICTURE_TYPE_I;
529 if (priv->is_svh || vop_hdr->coded)
530 picture->flags |= GST_VAAPI_PICTURE_REFERENCE;
532 case GST_MPEG4_P_VOP:
533 picture->type = GST_VAAPI_PICTURE_TYPE_P;
534 if (priv->is_svh || vop_hdr->coded)
535 picture->flags |= GST_VAAPI_PICTURE_REFERENCE;
537 case GST_MPEG4_B_VOP:
538 picture->type = GST_VAAPI_PICTURE_TYPE_B;
540 case GST_MPEG4_S_VOP:
541 picture->type = GST_VAAPI_PICTURE_TYPE_S;
542 // see 3.175 reference VOP
544 picture->flags |= GST_VAAPI_PICTURE_REFERENCE;
547 GST_DEBUG("unsupported picture type %d", priv->coding_type);
548 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
551 if (!priv->is_svh && !vop_hdr->coded) {
552 status = render_picture(decoder, priv->prev_picture);
557 guint temp_ref = priv->svh_hdr.temporal_reference;
558 if (temp_ref < priv->prev_t_ref) {
561 guint delta_ref = temp_ref - priv->prev_t_ref;
563 pts = priv->sync_time;
564 // see temporal_reference definition in spec, 30000/1001Hz
565 pts += gst_util_uint64_scale(delta_ref, GST_SECOND*1001, 30000);
566 priv->sync_time = pts;
567 priv->prev_t_ref = priv->svh_hdr.temporal_reference;
570 /* Update presentation time, 6.3.5 */
571 if(vop_hdr->coding_type != GST_MPEG4_B_VOP) {
572 // increment basing on decoding order
573 priv->last_sync_time = priv->sync_time;
574 priv->sync_time = priv->last_sync_time + vop_hdr->modulo_time_base;
575 pts = priv->sync_time * GST_SECOND;
576 pts += gst_util_uint64_scale(vop_hdr->time_increment, GST_SECOND, vol_hdr->vop_time_increment_resolution);
579 // increment basing on display oder
580 pts = (priv->last_sync_time + vop_hdr->modulo_time_base)* GST_SECOND;
581 pts += gst_util_uint64_scale(vop_hdr->time_increment, GST_SECOND, vol_hdr->vop_time_increment_resolution);
584 picture->pts = pts + priv->pts_diff;
586 /* Update reference pictures */
587 /* XXX: consider priv->vol_hdr.low_delay, consider packed video frames for DivX/XviD */
588 if (GST_VAAPI_PICTURE_IS_REFERENCE(picture)) {
589 picture->flags |= GST_VAAPI_PICTURE_REFERENCE;
590 if (priv->prev_picture) {
591 gst_vaapi_decoder_free_picture(base_decoder, priv->prev_picture);
592 priv->prev_picture = NULL;
594 if (priv->next_picture) {
595 priv->prev_picture = priv->next_picture;
596 priv->next_picture = NULL;
597 status = render_picture(decoder, priv->prev_picture);
599 priv->next_picture = picture;
605 fill_picture(GstVaapiDecoderMpeg4 *decoder, GstVaapiPicture *picture)
607 GstVaapiDecoderMpeg4Private * const priv = decoder->priv;
608 VAPictureParameterBufferMPEG4 * const pic_param = picture->param;
609 GstMpeg4VideoObjectPlane * const vop_hdr = &priv->vop_hdr;
611 /* Fill in VAPictureParameterBufferMPEG4 */
612 pic_param->forward_reference_picture = VA_INVALID_ID;
613 pic_param->backward_reference_picture = VA_INVALID_ID;
615 pic_param->vol_fields.value = 0;
616 pic_param->vop_fields.value = 0;
618 // vol_hdr Parameters
619 pic_param->vol_fields.bits.short_video_header = 1;
620 // does the following vol_hdr parameters matter for short video header?
621 pic_param->vol_fields.bits.chroma_format = 1; // I420, see table 6-15.
622 pic_param->vol_fields.bits.interlaced = 0;
623 pic_param->vol_fields.bits.obmc_disable = 1;
624 pic_param->vol_fields.bits.sprite_enable = 0;
625 pic_param->vol_fields.bits.sprite_warping_accuracy = 0;
626 pic_param->vol_fields.bits.quant_type = 0; //method 1; $7.4.4
627 pic_param->vol_fields.bits.quarter_sample = 0;
628 pic_param->vol_fields.bits.data_partitioned = 0;
629 pic_param->vol_fields.bits.reversible_vlc = 0;
630 pic_param->vol_fields.bits.resync_marker_disable = 1;
631 pic_param->no_of_sprite_warping_points = 0;
632 pic_param->quant_precision = 5;
634 pic_param->vop_width = priv->svh_hdr.vop_width;
635 pic_param->vop_height = priv->svh_hdr.vop_height;
636 pic_param->vop_fields.bits.vop_coding_type = priv->svh_hdr.picture_coding_type;
637 pic_param->vop_time_increment_resolution = priv->vol_hdr.vop_time_increment_resolution;
639 pic_param->num_gobs_in_vop = priv->svh_hdr.num_gobs_in_vop;
640 pic_param->num_macroblocks_in_gob = priv->svh_hdr.num_macroblocks_in_gob;
644 pic_param->vol_fields.bits.short_video_header = 0;
645 pic_param->vol_fields.bits.chroma_format = priv->vol_hdr.chroma_format;
646 pic_param->vol_fields.bits.interlaced = priv->vol_hdr.interlaced;
647 pic_param->vol_fields.bits.obmc_disable = priv->vol_hdr.obmc_disable;
648 pic_param->vol_fields.bits.sprite_enable = priv->vol_hdr.sprite_enable;
649 pic_param->vol_fields.bits.sprite_warping_accuracy = priv->vol_hdr.sprite_warping_accuracy;
650 pic_param->vol_fields.bits.quant_type = priv->vol_hdr.quant_type;
651 pic_param->vol_fields.bits.quarter_sample = priv->vol_hdr.quarter_sample;
652 pic_param->vol_fields.bits.data_partitioned = priv->vol_hdr.data_partitioned;
653 pic_param->vol_fields.bits.reversible_vlc = priv->vol_hdr.reversible_vlc;
654 pic_param->vol_fields.bits.resync_marker_disable = priv->vol_hdr.resync_marker_disable;
655 pic_param->no_of_sprite_warping_points = priv->vol_hdr.no_of_sprite_warping_points;
657 for (i=0; i<3 && i<priv->vol_hdr.no_of_sprite_warping_points ; i++) {
658 pic_param->sprite_trajectory_du[i] = priv->sprite_trajectory.vop_ref_points[i];
659 pic_param->sprite_trajectory_dv[i] = priv->sprite_trajectory.sprite_ref_points[i];
661 pic_param->quant_precision = priv->vol_hdr.quant_precision;
664 pic_param->vop_width = vop_hdr->width;
665 pic_param->vop_height = vop_hdr->height;
666 pic_param->vop_fields.bits.vop_coding_type = vop_hdr->coding_type;
667 pic_param->vop_fields.bits.vop_rounding_type = vop_hdr->rounding_type;
668 pic_param->vop_fields.bits.intra_dc_vlc_thr = vop_hdr->intra_dc_vlc_thr;
669 pic_param->vop_fields.bits.top_field_first = vop_hdr->top_field_first;
670 pic_param->vop_fields.bits.alternate_vertical_scan_flag = vop_hdr->alternate_vertical_scan_flag;
672 pic_param->vop_fcode_forward = vop_hdr->fcode_forward;
673 pic_param->vop_fcode_backward = vop_hdr->fcode_backward;
674 pic_param->vop_time_increment_resolution = priv->vol_hdr.vop_time_increment_resolution;
677 switch (priv->coding_type) {
678 case GST_MPEG4_B_VOP:
679 pic_param->TRB = priv->curr_picture->pts - priv->prev_picture->pts;
680 pic_param->TRD = priv->next_picture->pts - priv->prev_picture->pts;
681 pic_param->backward_reference_picture = priv->next_picture->surface_id;
682 pic_param->vop_fields.bits.backward_reference_vop_coding_type = priv->prev_picture->type;
684 case GST_MPEG4_P_VOP:
685 if (priv->prev_picture)
686 pic_param->forward_reference_picture = priv->prev_picture->surface_id;
690 if (priv->vol_hdr.interlaced) {
691 priv->is_first_field ^= 1;
696 static GstVaapiDecoderStatus
698 GstVaapiDecoderMpeg4 *decoder,
701 gboolean has_packet_header
704 GstVaapiDecoderMpeg4Private * const priv = decoder->priv;
705 GstVaapiPicture * const picture = priv->curr_picture;
706 GstVaapiSlice *slice;
707 VASliceParameterBufferMPEG4 *slice_param;
709 GST_DEBUG("decoder silce: %p, %u bytes)", buf, buf_size);
711 // has_packet_header is ture for the 2+ slice
712 if (!has_packet_header && !fill_picture(decoder, picture))
713 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
715 slice = gst_vaapi_decoder_new_slice(
716 GST_VAAPI_DECODER(decoder),
718 (guchar*)buf, buf_size
721 GST_DEBUG("failed to allocate slice");
722 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
725 /* Fill in VASliceParameterBufferMPEG4 */
726 slice_param = slice->param;
728 slice_param->macroblock_offset = (priv->svh_hdr.size)%8;
729 slice_param->macroblock_number = 0;
730 // the header of first gob_layer is empty (gob_header_empty=1), use vop_quant
731 slice_param->quant_scale = priv->svh_hdr.vop_quant;
734 if (has_packet_header) {
735 slice_param->macroblock_offset = priv->packet_hdr.size % 8;
736 slice_param->macroblock_number = priv->packet_hdr.macroblock_number;
737 slice_param->quant_scale = priv->packet_hdr.quant_scale;
740 slice_param->macroblock_offset = priv->vop_hdr.size % 8;
741 slice_param->macroblock_number = 0;
742 slice_param->quant_scale = priv->vop_hdr.quant;
745 return GST_VAAPI_DECODER_STATUS_SUCCESS;
748 static GstVaapiDecoderStatus
749 decode_packet(GstVaapiDecoderMpeg4 *decoder, GstMpeg4Packet packet)
751 GstVaapiDecoderMpeg4Private * const priv = decoder->priv;
752 GstMpeg4Packet *tos = &packet;
753 GstVaapiDecoderStatus status;
755 status = GST_VAAPI_DECODER_STATUS_SUCCESS;
757 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
759 // packet.size is the size from current marker to the next.
760 if (tos->type == GST_MPEG4_VISUAL_OBJ_SEQ_START) {
761 status = decode_sequence(decoder, packet.data + packet.offset, packet.size);
763 else if (tos->type == GST_MPEG4_VISUAL_OBJ_SEQ_END) {
764 status = decode_sequence_end(decoder);
766 else if (tos->type == GST_MPEG4_VISUAL_OBJ) {
767 status = decode_visual_object(decoder, packet.data + packet.offset, packet.size);
769 else if (tos->type >= GST_MPEG4_VIDEO_OBJ_FIRST && tos->type <= GST_MPEG4_VIDEO_OBJ_LAST) {
770 GST_WARNING("unexpected marker: (GST_MPEG4_VIDEO_OBJ_FIRST, GST_MPEG4_VIDEO_OBJ_LAST)");
771 status = GST_VAAPI_DECODER_STATUS_SUCCESS;
773 else if (tos->type >= GST_MPEG4_VIDEO_LAYER_FIRST && tos->type <= GST_MPEG4_VIDEO_LAYER_LAST) {
774 status = decode_video_object_layer(decoder, packet.data + packet.offset, packet.size);
776 else if (tos->type == GST_MPEG4_GROUP_OF_VOP) {
777 status = decode_gop(decoder, packet.data + packet.offset, packet.size);
779 else if (tos->type == GST_MPEG4_VIDEO_OBJ_PLANE) {
780 status = decode_picture(decoder, packet.data + packet.offset, packet.size);
783 * A resync marker shall only be located immediately before a macroblock
784 * (or video packet header if exists) and aligned with a byte
785 * either start_code or resync_marker are scaned/measured by byte,
786 * while the header itself are parsed/measured in bit
787 * it means: resync_marker(video_packet_header) start from byte boundary,
788 * while MB doesn't start from byte boundary -- it is what 'macroblock_offset'
791 const guint8 *_data = packet.data + packet.offset + priv->vop_hdr.size/8;
792 gint _data_size = packet.size - (priv->vop_hdr.size/8);
793 GstMpeg4Packet video_packet;
795 if (priv->vol_hdr.resync_marker_disable) {
796 status = decode_slice(decoder, _data, _data_size, FALSE);
799 // next start_code is required to determine the end of last slice
801 GstMpeg4ParseResult ret = GST_MPEG4_PARSER_OK;
803 gboolean first_slice = TRUE;
804 while (_data_size > 0) {
805 // we can skip user data here
806 ret = gst_mpeg4_parse(&video_packet, TRUE, &priv->vop_hdr, _data, 0, _data_size);
807 if(ret != GST_MPEG4_PARSER_OK) {
812 status = decode_slice(decoder, _data, video_packet.size, FALSE);
816 _data += video_packet.offset;
817 _data_size -= video_packet.offset;
819 ret = gst_mpeg4_parse_video_packet_header (&priv->packet_hdr, &priv->vol_hdr, &priv->vop_hdr, &priv->sprite_trajectory, _data, _data_size);
820 status = decode_slice(decoder,_data + priv->packet_hdr.size/8, video_packet.size - priv->packet_hdr.size/8, TRUE);
823 _data += video_packet.size;
824 _data_size -= video_packet.size;
827 status = decode_current_picture(decoder);
829 else if (tos->type == GST_MPEG4_USER_DATA
830 || tos->type == GST_MPEG4_VIDEO_SESSION_ERR
831 || tos->type == GST_MPEG4_FBA
832 || tos->type == GST_MPEG4_FBA_PLAN
833 || tos->type == GST_MPEG4_MESH
834 || tos->type == GST_MPEG4_MESH_PLAN
835 || tos->type == GST_MPEG4_STILL_TEXTURE_OBJ
836 || tos->type == GST_MPEG4_TEXTURE_SPATIAL
837 || tos->type == GST_MPEG4_TEXTURE_SNR_LAYER
838 || tos->type == GST_MPEG4_TEXTURE_TILE
839 || tos->type == GST_MPEG4_SHAPE_LAYER
840 || tos->type == GST_MPEG4_STUFFING
841 || tos->type == GST_MPEG4_SYSTEM_FIRST
842 || tos->type == GST_MPEG4_SYSTEM_LAST) {
843 GST_WARNING("Ignore marker: %x\n", tos->type);
844 status = GST_VAAPI_DECODER_STATUS_SUCCESS;
850 static GstVaapiDecoderStatus
851 decode_buffer(GstVaapiDecoderMpeg4 *decoder, GstBuffer *buffer)
853 GstVaapiDecoderMpeg4Private * const priv = decoder->priv;
854 GstVaapiDecoderStatus status;
858 buf = GST_BUFFER_DATA(buffer);
859 buf_size = GST_BUFFER_SIZE(buffer);
861 // visual object sequence end
862 if (!buf && buf_size == 0)
863 return decode_sequence_end(decoder);
865 gst_buffer_ref(buffer);
866 gst_adapter_push(priv->adapter, buffer);
868 if (priv->sub_buffer) {
869 buffer = gst_buffer_merge(priv->sub_buffer, buffer);
871 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
872 gst_buffer_unref(priv->sub_buffer);
873 priv->sub_buffer = NULL;
876 buf = GST_BUFFER_DATA(buffer);
877 buf_size = GST_BUFFER_SIZE(buffer);
880 GstMpeg4Packet packet;
881 GstMpeg4ParseResult result = GST_MPEG4_PARSER_OK;
882 guint consumed_size = 0;
885 while (result == GST_MPEG4_PARSER_OK && pos < buf_size) {
886 result = gst_h263_parse (&packet,buf, pos, buf_size);
887 if (result != GST_MPEG4_PARSER_OK) {
890 status = decode_picture(decoder, packet.data+packet.offset, packet.size);
891 if (GST_VAAPI_DECODER_STATUS_SUCCESS == status) {
892 // MBs are not byte aligned, so we set the start address with byte aligned
893 // and mb offset with (priv->svh_hdr.size)%8
894 status = decode_slice(decoder, packet.data+packet.offset+(priv->svh_hdr.size)/8,
895 packet.size - (priv->svh_hdr.size)/8, FALSE);
896 status = decode_current_picture(decoder);
898 consumed_size = packet.offset + packet.size;
899 pos += consumed_size;
900 if (gst_adapter_available(priv->adapter) >= pos)
901 gst_adapter_flush(priv->adapter, pos);
904 GST_WARNING("decode h263 packet failed\n");
910 while (pos < buf_size) {
911 // don't skip user data, we need the size to pop tsb buffer
912 result = gst_mpeg4_parse(&packet, FALSE, NULL, buf, pos, buf_size);
913 if (result != GST_MPEG4_PARSER_OK) {
916 status = decode_packet(decoder, packet);
917 if (GST_VAAPI_DECODER_STATUS_SUCCESS == status) {
918 consumed_size = packet.offset + packet.size - pos;
919 pos = packet.offset + packet.size;
920 if (gst_adapter_available(priv->adapter) >= pos)
921 gst_adapter_flush(priv->adapter, pos);
924 GST_WARNING("decode mp4 packet failed\n");
930 if ((result == GST_MPEG4_PARSER_NO_PACKET || result == GST_MPEG4_PARSER_NO_PACKET_END) && pos < buf_size) {
931 priv->sub_buffer = gst_buffer_create_sub(buffer, pos, buf_size-pos);
932 status = GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
937 static GstVaapiDecoderStatus
938 decode_codec_data(GstVaapiDecoderMpeg4 *decoder, GstBuffer *buffer)
940 GstVaapiDecoderMpeg4Private * const priv = decoder->priv;
941 GstVaapiDecoderStatus status;
943 guint pos, buf_size, _buf_size;
945 _buf = GST_BUFFER_DATA(buffer);
946 _buf_size = GST_BUFFER_SIZE(buffer);
947 // add additional 0x000001b2 to enclose the last header
948 buf_size = _buf_size + 4;
949 buf = malloc(buf_size);
950 memcpy(buf, _buf, buf_size);
954 buf[buf_size-1] = 0xb2;
957 GstMpeg4Packet packet;
958 GstMpeg4ParseResult result = GST_MPEG4_PARSER_OK;
960 while (result == GST_MPEG4_PARSER_OK && pos < buf_size) {
961 result = gst_mpeg4_parse(&packet, FALSE, NULL, buf, pos, buf_size);
962 if (result != GST_MPEG4_PARSER_OK) {
965 status = decode_packet(decoder, packet);
966 if (GST_VAAPI_DECODER_STATUS_SUCCESS == status) {
967 pos = packet.offset + packet.size;
970 GST_WARNING("decode mp4 packet failed when decoding codec data\n");
978 GstVaapiDecoderStatus
979 gst_vaapi_decoder_mpeg4_decode(GstVaapiDecoder *base, GstBuffer *buffer)
981 GstVaapiDecoderMpeg4 * const decoder = GST_VAAPI_DECODER_MPEG4(base);
982 GstVaapiDecoderMpeg4Private * const priv = decoder->priv;
983 GstBuffer *codec_data = NULL;
984 GstVaapiDecoderStatus status;
986 g_return_val_if_fail(priv->is_constructed,
987 GST_VAAPI_DECODER_STATUS_ERROR_INIT_FAILED);
989 if (!priv->is_opened) {
990 priv->is_opened = gst_vaapi_decoder_mpeg4_open(decoder, buffer);
991 if (!priv->is_opened)
992 return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CODEC;
994 codec_data = GST_VAAPI_DECODER_CODEC_DATA(decoder);
996 status = decode_codec_data(decoder, codec_data);
997 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
1001 return decode_buffer(decoder, buffer);
1005 gst_vaapi_decoder_mpeg4_finalize(GObject *object)
1007 GstVaapiDecoderMpeg4 * const decoder = GST_VAAPI_DECODER_MPEG4(object);
1009 gst_vaapi_decoder_mpeg4_destroy(decoder);
1011 G_OBJECT_CLASS(gst_vaapi_decoder_mpeg4_parent_class)->finalize(object);
1015 gst_vaapi_decoder_mpeg4_constructed(GObject *object)
1017 GstVaapiDecoderMpeg4 * const decoder = GST_VAAPI_DECODER_MPEG4(object);
1018 GstVaapiDecoderMpeg4Private * const priv = decoder->priv;
1019 GObjectClass *parent_class;
1021 parent_class = G_OBJECT_CLASS(gst_vaapi_decoder_mpeg4_parent_class);
1022 if (parent_class->constructed)
1023 parent_class->constructed(object);
1025 priv->is_constructed = gst_vaapi_decoder_mpeg4_create(decoder);
1029 gst_vaapi_decoder_mpeg4_class_init(GstVaapiDecoderMpeg4Class *klass)
1031 GObjectClass * const object_class = G_OBJECT_CLASS(klass);
1032 GstVaapiDecoderClass * const decoder_class = GST_VAAPI_DECODER_CLASS(klass);
1034 g_type_class_add_private(klass, sizeof(GstVaapiDecoderMpeg4Private));
1036 object_class->finalize = gst_vaapi_decoder_mpeg4_finalize;
1037 object_class->constructed = gst_vaapi_decoder_mpeg4_constructed;
1039 decoder_class->decode = gst_vaapi_decoder_mpeg4_decode;
1043 gst_vaapi_decoder_mpeg4_init(GstVaapiDecoderMpeg4 *decoder)
1045 GstVaapiDecoderMpeg4Private *priv;
1047 priv = GST_VAAPI_DECODER_MPEG4_GET_PRIVATE(decoder);
1048 decoder->priv = priv;
1053 priv->profile = GST_VAAPI_PROFILE_MPEG4_SIMPLE;
1054 priv->curr_picture = NULL;
1055 priv->next_picture = NULL;
1056 priv->prev_picture = NULL;
1057 priv->adapter = NULL;
1058 priv->sub_buffer = NULL;
1059 priv->seq_pts = GST_CLOCK_TIME_NONE;
1060 priv->gop_pts = GST_CLOCK_TIME_NONE;
1062 priv->calculate_pts_diff = TRUE;
1063 priv->is_constructed = FALSE;
1064 priv->is_opened = FALSE;
1065 priv->is_first_field = FALSE;
1066 priv->size_changed = TRUE;
1067 priv->profile_changed = TRUE;
1068 priv->progressive_sequence = FALSE;
1069 priv->closed_gop = FALSE;
1070 priv->broken_link = FALSE;
1074 * gst_vaapi_decoder_mpeg4_new:
1075 * @display: a #GstVaapiDisplay
1076 * @caps: a #GstCaps holding codec information
1078 * Creates a new #GstVaapiDecoder for MPEG-2 decoding. The @caps can
1079 * hold extra information like codec-data and pictured coded size.
1081 * Return value: the newly allocated #GstVaapiDecoder object
1084 gst_vaapi_decoder_mpeg4_new(GstVaapiDisplay *display, GstCaps *caps)
1086 GstVaapiDecoderMpeg4 *decoder;
1088 static const GstVaapiCodecInfo codec_info = {
1089 .pic_size = sizeof(GstVaapiPicture),
1090 .slice_size = sizeof(GstVaapiSlice),
1091 .pic_param_size = sizeof(VAPictureParameterBufferMPEG4),
1092 .slice_param_size = sizeof(VASliceParameterBufferMPEG4),
1093 .iq_matrix_size = sizeof(VAIQMatrixBufferMPEG4),
1096 g_return_val_if_fail(GST_VAAPI_IS_DISPLAY(display), NULL);
1097 g_return_val_if_fail(GST_IS_CAPS(caps), NULL);
1099 decoder = g_object_new(
1100 GST_VAAPI_TYPE_DECODER_MPEG4,
1103 "codec-info", &codec_info,
1106 if (!decoder->priv->is_constructed) {
1107 g_object_unref(decoder);
1110 return GST_VAAPI_DECODER_CAST(decoder);