2 * Copyright (C) 2006-2012 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
21 * DEALINGS IN THE SOFTWARE.
28 #include "intel_batchbuffer.h"
29 #include "i965_drv_video.h"
30 #include "i965_decoder_utils.h"
31 #include "i965_defines.h"
33 /* Set reference surface if backing store exists */
36 struct i965_driver_data *i965,
37 GenFrameStore *ref_frame,
38 VASurfaceID va_surface,
39 struct object_surface *obj_surface
42 if (va_surface == VA_INVALID_ID)
45 if (!obj_surface || !obj_surface->bo)
48 ref_frame->surface_id = va_surface;
49 ref_frame->obj_surface = obj_surface;
53 /* Check wether codec layer incorrectly fills in slice_vertical_position */
55 mpeg2_wa_slice_vertical_position(
56 struct decode_state *decode_state,
57 VAPictureParameterBufferMPEG2 *pic_param
60 unsigned int i, j, mb_height, vpos, last_vpos = 0;
62 /* Assume progressive sequence if we got a progressive frame */
63 if (pic_param->picture_coding_extension.bits.progressive_frame)
66 /* Wait for a field coded picture */
67 if (pic_param->picture_coding_extension.bits.picture_structure == MPEG_FRAME)
70 assert(decode_state && decode_state->slice_params);
72 mb_height = (pic_param->vertical_size + 31) / 32;
74 for (j = 0; j < decode_state->num_slice_params; j++) {
75 struct buffer_store * const buffer_store =
76 decode_state->slice_params[j];
78 for (i = 0; i < buffer_store->num_elements; i++) {
79 VASliceParameterBufferMPEG2 * const slice_param =
80 ((VASliceParameterBufferMPEG2 *)buffer_store->buffer) + i;
82 vpos = slice_param->slice_vertical_position;
83 if (vpos >= mb_height || vpos == last_vpos + 2) {
84 WARN_ONCE("codec layer incorrectly fills in MPEG-2 slice_vertical_position. Workaround applied\n");
93 /* Build MPEG-2 reference frames array */
95 mpeg2_set_reference_surfaces(
97 GenFrameStore ref_frames[MAX_GEN_REFERENCE_FRAMES],
98 struct decode_state *decode_state,
99 VAPictureParameterBufferMPEG2 *pic_param
102 struct i965_driver_data * const i965 = i965_driver_data(ctx);
103 VASurfaceID va_surface;
104 unsigned pic_structure, is_second_field, n = 0;
105 struct object_surface *obj_surface;
107 pic_structure = pic_param->picture_coding_extension.bits.picture_structure;
108 is_second_field = pic_structure != MPEG_FRAME &&
109 !pic_param->picture_coding_extension.bits.is_first_field;
111 ref_frames[0].surface_id = VA_INVALID_ID;
112 ref_frames[0].obj_surface = NULL;
114 /* Reference frames are indexed by frame store ID (0:top, 1:bottom) */
115 switch (pic_param->picture_coding_type) {
117 if (is_second_field && pic_structure == MPEG_BOTTOM_FIELD) {
118 va_surface = decode_state->current_render_target;
119 obj_surface = decode_state->render_object;
120 n += set_ref_frame(i965, &ref_frames[n], va_surface, obj_surface);
122 va_surface = pic_param->forward_reference_picture;
123 obj_surface = decode_state->reference_objects[0];
124 n += set_ref_frame(i965, &ref_frames[n], va_surface, obj_surface);
128 va_surface = pic_param->forward_reference_picture;
129 obj_surface = decode_state->reference_objects[0];
130 n += set_ref_frame(i965, &ref_frames[n], va_surface, obj_surface);
131 va_surface = pic_param->backward_reference_picture;
132 obj_surface = decode_state->reference_objects[1];
133 n += set_ref_frame(i965, &ref_frames[n], va_surface, obj_surface);
138 ref_frames[n].obj_surface = ref_frames[0].obj_surface;
139 ref_frames[n++].surface_id = ref_frames[0].surface_id;
142 if (pic_param->picture_coding_extension.bits.progressive_frame)
145 ref_frames[2].surface_id = VA_INVALID_ID;
146 ref_frames[2].obj_surface = NULL;
148 /* Bottom field pictures used as reference */
149 switch (pic_param->picture_coding_type) {
151 if (is_second_field && pic_structure == MPEG_TOP_FIELD) {
152 va_surface = decode_state->current_render_target;
153 obj_surface = decode_state->render_object;
154 n += set_ref_frame(i965, &ref_frames[n], va_surface, obj_surface);
156 va_surface = pic_param->forward_reference_picture;
157 obj_surface = decode_state->reference_objects[0];
158 n += set_ref_frame(i965, &ref_frames[n], va_surface, obj_surface);
162 va_surface = pic_param->forward_reference_picture;
163 obj_surface = decode_state->reference_objects[0];
164 n += set_ref_frame(i965, &ref_frames[n], va_surface, obj_surface);
165 va_surface = pic_param->backward_reference_picture;
166 obj_surface = decode_state->reference_objects[1];
167 n += set_ref_frame(i965, &ref_frames[n], va_surface, obj_surface);
172 ref_frames[n].obj_surface = ref_frames[2].obj_surface;
173 ref_frames[n++].surface_id = ref_frames[2].surface_id;
177 /* Generate flat scaling matrices for H.264 decoding */
179 avc_gen_default_iq_matrix(VAIQMatrixBufferH264 *iq_matrix)
182 memset(&iq_matrix->ScalingList4x4, 16, sizeof(iq_matrix->ScalingList4x4));
185 memset(&iq_matrix->ScalingList8x8, 16, sizeof(iq_matrix->ScalingList8x8));
188 /* Get first macroblock bit offset for BSD, minus EPB count (AVC) */
189 /* XXX: slice_data_bit_offset does not account for EPB */
191 avc_get_first_mb_bit_offset(
192 dri_bo *slice_data_bo,
193 VASliceParameterBufferH264 *slice_param,
194 unsigned int mode_flag
197 unsigned int slice_data_bit_offset = slice_param->slice_data_bit_offset;
199 if (mode_flag == ENTROPY_CABAC)
200 slice_data_bit_offset = ALIGN(slice_data_bit_offset, 0x8);
201 return slice_data_bit_offset;
204 /* Get first macroblock bit offset for BSD, with EPB count (AVC) */
205 /* XXX: slice_data_bit_offset does not account for EPB */
207 avc_get_first_mb_bit_offset_with_epb(
208 dri_bo *slice_data_bo,
209 VASliceParameterBufferH264 *slice_param,
210 unsigned int mode_flag
213 unsigned int in_slice_data_bit_offset = slice_param->slice_data_bit_offset;
214 unsigned int out_slice_data_bit_offset;
215 unsigned int i, n, buf_size, data_size;
219 buf_size = slice_param->slice_data_bit_offset / 8;
220 data_size = slice_param->slice_data_size - slice_param->slice_data_offset;
221 if (buf_size > data_size)
222 buf_size = data_size;
224 buf = alloca(buf_size);
225 ret = dri_bo_get_subdata(
226 slice_data_bo, slice_param->slice_data_offset,
231 for (i = 2, n = 0; i < buf_size; i++) {
232 if (!buf[i - 2] && !buf[i - 1] && buf[i] == 3)
235 out_slice_data_bit_offset = in_slice_data_bit_offset + n * 8;
237 if (mode_flag == ENTROPY_CABAC)
238 out_slice_data_bit_offset = ALIGN(out_slice_data_bit_offset, 0x8);
239 return out_slice_data_bit_offset;
242 static inline uint8_t
243 get_ref_idx_state_1(const VAPictureH264 *va_pic, unsigned int frame_store_id)
245 const unsigned int is_long_term =
246 !!(va_pic->flags & VA_PICTURE_H264_LONG_TERM_REFERENCE);
247 const unsigned int is_top_field =
248 !!(va_pic->flags & VA_PICTURE_H264_TOP_FIELD);
249 const unsigned int is_bottom_field =
250 !!(va_pic->flags & VA_PICTURE_H264_BOTTOM_FIELD);
252 return ((is_long_term << 6) |
253 ((is_top_field ^ is_bottom_field ^ 1) << 5) |
254 (frame_store_id << 1) |
255 ((is_top_field ^ 1) & is_bottom_field));
258 /* Fill in Reference List Entries (Gen5+: ILK, SNB, IVB) */
260 gen5_fill_avc_ref_idx_state(
262 const VAPictureH264 ref_list[32],
263 unsigned int ref_list_count,
264 const GenFrameStore frame_store[MAX_GEN_REFERENCE_FRAMES]
267 unsigned int i, n, frame_idx;
269 for (i = 0, n = 0; i < ref_list_count; i++) {
270 const VAPictureH264 * const va_pic = &ref_list[i];
272 if (va_pic->flags & VA_PICTURE_H264_INVALID)
275 for (frame_idx = 0; frame_idx < MAX_GEN_REFERENCE_FRAMES; frame_idx++) {
276 const GenFrameStore * const fs = &frame_store[frame_idx];
277 if (fs->surface_id != VA_INVALID_ID &&
278 fs->surface_id == va_pic->picture_id) {
279 assert(frame_idx == fs->frame_store_id);
283 assert(frame_idx < MAX_GEN_REFERENCE_FRAMES);
284 state[n++] = get_ref_idx_state_1(va_pic, frame_idx);
291 /* Emit Reference List Entries (Gen6+: SNB, IVB) */
293 gen6_send_avc_ref_idx_state_1(
294 struct intel_batchbuffer *batch,
296 const VAPictureH264 *ref_list,
297 unsigned int ref_list_count,
298 const GenFrameStore frame_store[MAX_GEN_REFERENCE_FRAMES]
301 uint8_t ref_idx_state[32];
303 BEGIN_BCS_BATCH(batch, 10);
304 OUT_BCS_BATCH(batch, MFX_AVC_REF_IDX_STATE | (10 - 2));
305 OUT_BCS_BATCH(batch, list);
306 gen5_fill_avc_ref_idx_state(
308 ref_list, ref_list_count,
311 intel_batchbuffer_data(batch, ref_idx_state, sizeof(ref_idx_state));
312 ADVANCE_BCS_BATCH(batch);
316 gen6_send_avc_ref_idx_state(
317 struct intel_batchbuffer *batch,
318 const VASliceParameterBufferH264 *slice_param,
319 const GenFrameStore frame_store[MAX_GEN_REFERENCE_FRAMES]
322 if (slice_param->slice_type == SLICE_TYPE_I ||
323 slice_param->slice_type == SLICE_TYPE_SI)
327 gen6_send_avc_ref_idx_state_1(
329 slice_param->RefPicList0, slice_param->num_ref_idx_l0_active_minus1 + 1,
333 if (slice_param->slice_type != SLICE_TYPE_B)
337 gen6_send_avc_ref_idx_state_1(
339 slice_param->RefPicList1, slice_param->num_ref_idx_l1_active_minus1 + 1,
345 intel_update_avc_frame_store_index(VADriverContextP ctx,
346 struct decode_state *decode_state,
347 VAPictureParameterBufferH264 *pic_param,
348 GenFrameStore frame_store[MAX_GEN_REFERENCE_FRAMES])
352 assert(MAX_GEN_REFERENCE_FRAMES == ARRAY_ELEMS(pic_param->ReferenceFrames));
354 for (i = 0; i < MAX_GEN_REFERENCE_FRAMES; i++) {
357 if (frame_store[i].surface_id == VA_INVALID_ID ||
358 frame_store[i].obj_surface == NULL)
361 assert(frame_store[i].frame_store_id != -1);
363 for (j = 0; j < MAX_GEN_REFERENCE_FRAMES; j++) {
364 VAPictureH264 *ref_pic = &pic_param->ReferenceFrames[j];
365 if (ref_pic->flags & VA_PICTURE_H264_INVALID)
368 if (frame_store[i].surface_id == ref_pic->picture_id) {
374 /* remove it from the internal DPB */
376 struct object_surface *obj_surface = frame_store[i].obj_surface;
378 obj_surface->flags &= ~SURFACE_REFERENCED;
380 if ((obj_surface->flags & SURFACE_ALL_MASK) == SURFACE_DISPLAYED) {
381 dri_bo_unreference(obj_surface->bo);
382 obj_surface->bo = NULL;
383 obj_surface->flags &= ~SURFACE_REF_DIS_MASK;
386 if (obj_surface->free_private_data)
387 obj_surface->free_private_data(&obj_surface->private_data);
389 frame_store[i].surface_id = VA_INVALID_ID;
390 frame_store[i].frame_store_id = -1;
391 frame_store[i].obj_surface = NULL;
395 for (i = 0; i < MAX_GEN_REFERENCE_FRAMES; i++) {
396 VAPictureH264 *ref_pic = &pic_param->ReferenceFrames[i];
399 if (ref_pic->flags & VA_PICTURE_H264_INVALID ||
400 ref_pic->picture_id == VA_INVALID_SURFACE ||
401 decode_state->reference_objects[i] == NULL)
404 for (j = 0; j < MAX_GEN_REFERENCE_FRAMES; j++) {
405 if (frame_store[j].surface_id == ref_pic->picture_id) {
411 /* add the new reference frame into the internal DPB */
414 struct object_surface *obj_surface = decode_state->reference_objects[i];
417 * Sometimes a dummy frame comes from the upper layer library, call i965_check_alloc_surface_bo()
418 * to ake sure the store buffer is allocated for this reference frame
420 i965_check_alloc_surface_bo(ctx, obj_surface, 1, VA_FOURCC('N', 'V', '1', '2'), SUBSAMPLE_YUV420);
422 /* Find a free frame store index */
423 for (frame_idx = 0; frame_idx < MAX_GEN_REFERENCE_FRAMES; frame_idx++) {
424 for (j = 0; j < MAX_GEN_REFERENCE_FRAMES; j++) {
425 if (frame_store[j].surface_id == VA_INVALID_ID ||
426 frame_store[j].obj_surface == NULL)
429 if (frame_store[j].frame_store_id == frame_idx) /* the store index is in use */
433 if (j == MAX_GEN_REFERENCE_FRAMES)
437 assert(frame_idx < MAX_GEN_REFERENCE_FRAMES);
439 for (j = 0; j < MAX_GEN_REFERENCE_FRAMES; j++) {
440 if (frame_store[j].surface_id == VA_INVALID_ID ||
441 frame_store[j].obj_surface == NULL) {
442 frame_store[j].surface_id = ref_pic->picture_id;
443 frame_store[j].frame_store_id = frame_idx;
444 frame_store[j].obj_surface = obj_surface;
452 for (i = 0; i < MAX_GEN_REFERENCE_FRAMES - 1; i++) {
453 if (frame_store[i].surface_id != VA_INVALID_ID &&
454 frame_store[i].obj_surface != NULL &&
455 frame_store[i].frame_store_id == i)
458 for (j = i + 1; j < MAX_GEN_REFERENCE_FRAMES; j++) {
459 if (frame_store[j].surface_id != VA_INVALID_ID &&
460 frame_store[j].obj_surface != NULL &&
461 frame_store[j].frame_store_id == i) {
462 VASurfaceID id = frame_store[i].surface_id;
463 int frame_idx = frame_store[i].frame_store_id;
464 struct object_surface *obj_surface = frame_store[i].obj_surface;
466 frame_store[i].surface_id = frame_store[j].surface_id;
467 frame_store[i].frame_store_id = frame_store[j].frame_store_id;
468 frame_store[i].obj_surface = frame_store[j].obj_surface;
469 frame_store[j].surface_id = id;
470 frame_store[j].frame_store_id = frame_idx;
471 frame_store[j].obj_surface = obj_surface;
479 intel_update_vc1_frame_store_index(VADriverContextP ctx,
480 struct decode_state *decode_state,
481 VAPictureParameterBufferVC1 *pic_param,
482 GenFrameStore frame_store[MAX_GEN_REFERENCE_FRAMES])
484 struct object_surface *obj_surface;
487 obj_surface = decode_state->reference_objects[0];
489 if (pic_param->forward_reference_picture == VA_INVALID_ID ||
492 frame_store[0].surface_id = VA_INVALID_ID;
493 frame_store[0].obj_surface = NULL;
495 frame_store[0].surface_id = pic_param->forward_reference_picture;
496 frame_store[0].obj_surface = obj_surface;
499 obj_surface = decode_state->reference_objects[1];
501 if (pic_param->backward_reference_picture == VA_INVALID_ID ||
504 frame_store[1].surface_id = frame_store[0].surface_id;
505 frame_store[1].obj_surface = frame_store[0].obj_surface;
507 frame_store[1].surface_id = pic_param->backward_reference_picture;
508 frame_store[1].obj_surface = obj_surface;
510 for (i = 2; i < MAX_GEN_REFERENCE_FRAMES; i++) {
511 frame_store[i].surface_id = frame_store[i % 2].surface_id;
512 frame_store[i].obj_surface = frame_store[i % 2].obj_surface;
518 intel_decoder_check_avc_parameter(VADriverContextP ctx,
519 struct decode_state *decode_state)
521 struct i965_driver_data *i965 = i965_driver_data(ctx);
522 VAPictureParameterBufferH264 *pic_param = (VAPictureParameterBufferH264 *)decode_state->pic_param->buffer;
523 struct object_surface *obj_surface;
526 assert(!(pic_param->CurrPic.flags & VA_PICTURE_H264_INVALID));
527 assert(pic_param->CurrPic.picture_id != VA_INVALID_SURFACE);
529 if (pic_param->CurrPic.flags & VA_PICTURE_H264_INVALID ||
530 pic_param->CurrPic.picture_id == VA_INVALID_SURFACE)
533 assert(pic_param->CurrPic.picture_id == decode_state->current_render_target);
535 if (pic_param->CurrPic.picture_id != decode_state->current_render_target)
538 for (i = 0; i < 16; i++) {
539 if (pic_param->ReferenceFrames[i].flags & VA_PICTURE_H264_INVALID ||
540 pic_param->ReferenceFrames[i].picture_id == VA_INVALID_SURFACE)
543 obj_surface = SURFACE(pic_param->ReferenceFrames[i].picture_id);
549 if (!obj_surface->bo) { /* a reference frame without store buffer */
550 WARN_ONCE("Invalid reference frame!!!\n");
553 decode_state->reference_objects[i] = obj_surface;
558 decode_state->reference_objects[i] = NULL;
560 return VA_STATUS_SUCCESS;
563 return VA_STATUS_ERROR_INVALID_PARAMETER;
567 intel_decoder_check_mpeg2_parameter(VADriverContextP ctx,
568 struct decode_state *decode_state)
570 struct i965_driver_data *i965 = i965_driver_data(ctx);
571 VAPictureParameterBufferMPEG2 *pic_param = (VAPictureParameterBufferMPEG2 *)decode_state->pic_param->buffer;
572 struct object_surface *obj_surface;
575 if (pic_param->picture_coding_type == MPEG_I_PICTURE) {
576 } else if (pic_param->picture_coding_type == MPEG_P_PICTURE) {
577 obj_surface = SURFACE(pic_param->forward_reference_picture);
579 if (!obj_surface || !obj_surface->bo)
580 decode_state->reference_objects[i++] = NULL;
582 decode_state->reference_objects[i++] = obj_surface;
583 } else if (pic_param->picture_coding_type == MPEG_B_PICTURE) {
584 obj_surface = SURFACE(pic_param->forward_reference_picture);
586 if (!obj_surface || !obj_surface->bo)
587 decode_state->reference_objects[i++] = NULL;
589 decode_state->reference_objects[i++] = obj_surface;
591 obj_surface = SURFACE(pic_param->backward_reference_picture);
593 if (!obj_surface || !obj_surface->bo)
594 decode_state->reference_objects[i++] = NULL;
596 decode_state->reference_objects[i++] = obj_surface;
601 decode_state->reference_objects[i] = NULL;
603 return VA_STATUS_SUCCESS;
606 return VA_STATUS_ERROR_INVALID_PARAMETER;
610 intel_decoder_check_vc1_parameter(VADriverContextP ctx,
611 struct decode_state *decode_state)
613 struct i965_driver_data *i965 = i965_driver_data(ctx);
614 VAPictureParameterBufferVC1 *pic_param = (VAPictureParameterBufferVC1 *)decode_state->pic_param->buffer;
615 struct object_surface *obj_surface;
618 if (pic_param->picture_fields.bits.picture_type == 0 ||
619 pic_param->picture_fields.bits.picture_type == 3) {
620 } else if (pic_param->picture_fields.bits.picture_type == 1 ||
621 pic_param->picture_fields.bits.picture_type == 4) {
622 obj_surface = SURFACE(pic_param->forward_reference_picture);
624 if (!obj_surface || !obj_surface->bo)
625 decode_state->reference_objects[i++] = NULL;
627 decode_state->reference_objects[i++] = obj_surface;
628 } else if (pic_param->picture_fields.bits.picture_type == 2) {
629 obj_surface = SURFACE(pic_param->forward_reference_picture);
631 if (!obj_surface || !obj_surface->bo)
632 decode_state->reference_objects[i++] = NULL;
634 decode_state->reference_objects[i++] = obj_surface;
636 obj_surface = SURFACE(pic_param->backward_reference_picture);
638 if (!obj_surface || !obj_surface->bo)
639 decode_state->reference_objects[i++] = NULL;
641 decode_state->reference_objects[i++] = obj_surface;
646 decode_state->reference_objects[i] = NULL;
648 return VA_STATUS_SUCCESS;
651 return VA_STATUS_ERROR_INVALID_PARAMETER;
655 intel_decoder_sanity_check_input(VADriverContextP ctx,
657 struct decode_state *decode_state)
659 struct i965_driver_data *i965 = i965_driver_data(ctx);
660 struct object_surface *obj_surface;
661 VAStatus vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER;
663 if (decode_state->current_render_target == VA_INVALID_SURFACE)
666 obj_surface = SURFACE(decode_state->current_render_target);
671 decode_state->render_object = obj_surface;
674 case VAProfileMPEG2Simple:
675 case VAProfileMPEG2Main:
676 vaStatus = intel_decoder_check_mpeg2_parameter(ctx, decode_state);
679 case VAProfileH264Baseline:
680 case VAProfileH264Main:
681 case VAProfileH264High:
682 vaStatus = intel_decoder_check_avc_parameter(ctx, decode_state);
685 case VAProfileVC1Simple:
686 case VAProfileVC1Main:
687 case VAProfileVC1Advanced:
688 vaStatus = intel_decoder_check_vc1_parameter(ctx, decode_state);
691 case VAProfileJPEGBaseline:
692 vaStatus = VA_STATUS_SUCCESS;
696 vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER;