2 * Copyright (C) 2006-2012 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
21 * DEALINGS IN THE SOFTWARE.
28 #include "intel_batchbuffer.h"
29 #include "i965_drv_video.h"
30 #include "i965_decoder_utils.h"
31 #include "i965_defines.h"
33 /* Set reference surface if backing store exists */
36 struct i965_driver_data *i965,
37 GenFrameStore *ref_frame,
38 VASurfaceID va_surface,
39 struct object_surface *obj_surface
42 if (va_surface == VA_INVALID_ID)
45 if (!obj_surface || !obj_surface->bo)
48 ref_frame->surface_id = va_surface;
49 ref_frame->obj_surface = obj_surface;
53 /* Check wether codec layer incorrectly fills in slice_vertical_position */
55 mpeg2_wa_slice_vertical_position(
56 struct decode_state *decode_state,
57 VAPictureParameterBufferMPEG2 *pic_param
60 unsigned int i, j, mb_height, vpos, last_vpos = 0;
62 /* Assume progressive sequence if we got a progressive frame */
63 if (pic_param->picture_coding_extension.bits.progressive_frame)
66 /* Wait for a field coded picture */
67 if (pic_param->picture_coding_extension.bits.picture_structure == MPEG_FRAME)
70 assert(decode_state && decode_state->slice_params);
72 mb_height = (pic_param->vertical_size + 31) / 32;
74 for (j = 0; j < decode_state->num_slice_params; j++) {
75 struct buffer_store * const buffer_store =
76 decode_state->slice_params[j];
78 for (i = 0; i < buffer_store->num_elements; i++) {
79 VASliceParameterBufferMPEG2 * const slice_param =
80 ((VASliceParameterBufferMPEG2 *)buffer_store->buffer) + i;
82 vpos = slice_param->slice_vertical_position;
83 if (vpos >= mb_height || vpos == last_vpos + 2) {
84 WARN_ONCE("codec layer incorrectly fills in MPEG-2 slice_vertical_position. Workaround applied\n");
93 /* Build MPEG-2 reference frames array */
95 mpeg2_set_reference_surfaces(
97 GenFrameStore ref_frames[MAX_GEN_REFERENCE_FRAMES],
98 struct decode_state *decode_state,
99 VAPictureParameterBufferMPEG2 *pic_param
102 struct i965_driver_data * const i965 = i965_driver_data(ctx);
103 VASurfaceID va_surface;
104 unsigned pic_structure, is_second_field, n = 0;
105 struct object_surface *obj_surface;
107 pic_structure = pic_param->picture_coding_extension.bits.picture_structure;
108 is_second_field = pic_structure != MPEG_FRAME &&
109 !pic_param->picture_coding_extension.bits.is_first_field;
111 ref_frames[0].surface_id = VA_INVALID_ID;
112 ref_frames[0].obj_surface = NULL;
114 /* Reference frames are indexed by frame store ID (0:top, 1:bottom) */
115 switch (pic_param->picture_coding_type) {
117 if (is_second_field && pic_structure == MPEG_BOTTOM_FIELD) {
118 va_surface = decode_state->current_render_target;
119 obj_surface = decode_state->render_object;
120 n += set_ref_frame(i965, &ref_frames[n], va_surface, obj_surface);
122 va_surface = pic_param->forward_reference_picture;
123 obj_surface = decode_state->reference_objects[0];
124 n += set_ref_frame(i965, &ref_frames[n], va_surface, obj_surface);
128 va_surface = pic_param->forward_reference_picture;
129 obj_surface = decode_state->reference_objects[0];
130 n += set_ref_frame(i965, &ref_frames[n], va_surface, obj_surface);
131 va_surface = pic_param->backward_reference_picture;
132 obj_surface = decode_state->reference_objects[1];
133 n += set_ref_frame(i965, &ref_frames[n], va_surface, obj_surface);
138 ref_frames[n].obj_surface = ref_frames[0].obj_surface;
139 ref_frames[n++].surface_id = ref_frames[0].surface_id;
142 if (pic_param->picture_coding_extension.bits.frame_pred_frame_dct)
145 ref_frames[2].surface_id = VA_INVALID_ID;
146 ref_frames[2].obj_surface = NULL;
148 /* Bottom field pictures used as reference */
149 switch (pic_param->picture_coding_type) {
151 if (is_second_field && pic_structure == MPEG_TOP_FIELD) {
152 va_surface = decode_state->current_render_target;
153 obj_surface = decode_state->render_object;
154 n += set_ref_frame(i965, &ref_frames[n], va_surface, obj_surface);
156 va_surface = pic_param->forward_reference_picture;
157 obj_surface = decode_state->reference_objects[0];
158 n += set_ref_frame(i965, &ref_frames[n], va_surface, obj_surface);
162 va_surface = pic_param->forward_reference_picture;
163 obj_surface = decode_state->reference_objects[0];
164 n += set_ref_frame(i965, &ref_frames[n], va_surface, obj_surface);
165 va_surface = pic_param->backward_reference_picture;
166 obj_surface = decode_state->reference_objects[1];
167 n += set_ref_frame(i965, &ref_frames[n], va_surface, obj_surface);
172 ref_frames[n].obj_surface = ref_frames[2].obj_surface;
173 ref_frames[n++].surface_id = ref_frames[2].surface_id;
177 /* Ensure the supplied VA surface has valid storage for decoding the
180 avc_ensure_surface_bo(
181 VADriverContextP ctx,
182 struct decode_state *decode_state,
183 struct object_surface *obj_surface,
184 const VAPictureParameterBufferH264 *pic_param
188 uint32_t hw_fourcc, fourcc, subsample, chroma_format;
190 /* Validate chroma format */
191 switch (pic_param->seq_fields.bits.chroma_format_idc) {
193 fourcc = VA_FOURCC_Y800;
194 subsample = SUBSAMPLE_YUV400;
195 chroma_format = VA_RT_FORMAT_YUV400;
198 fourcc = VA_FOURCC_NV12;
199 subsample = SUBSAMPLE_YUV420;
200 chroma_format = VA_RT_FORMAT_YUV420;
203 return VA_STATUS_ERROR_UNSUPPORTED_RT_FORMAT;
206 /* Determine the HW surface format, bound to VA config needs */
207 if ((decode_state->base.chroma_formats & chroma_format) == chroma_format)
212 case VA_FOURCC_Y800: // Implement with an NV12 surface
213 if (decode_state->base.chroma_formats & VA_RT_FORMAT_YUV420) {
214 hw_fourcc = VA_FOURCC_NV12;
215 subsample = SUBSAMPLE_YUV420;
221 return VA_STATUS_ERROR_UNSUPPORTED_RT_FORMAT;
223 /* (Re-)allocate the underlying surface buffer store, if necessary */
224 if (!obj_surface->bo || obj_surface->fourcc != hw_fourcc) {
225 struct i965_driver_data * const i965 = i965_driver_data(ctx);
227 i965_destroy_surface_storage(obj_surface);
228 va_status = i965_check_alloc_surface_bo(ctx, obj_surface,
229 i965->codec_info->has_tiled_surface, hw_fourcc, subsample);
230 if (va_status != VA_STATUS_SUCCESS)
234 /* Fake chroma components if grayscale is implemented on top of NV12 */
235 if (fourcc == VA_FOURCC_Y800 && hw_fourcc == VA_FOURCC_NV12) {
236 const uint32_t uv_offset = obj_surface->width * obj_surface->height;
237 const uint32_t uv_size = obj_surface->width * obj_surface->height / 2;
239 drm_intel_gem_bo_map_gtt(obj_surface->bo);
240 memset(obj_surface->bo->virtual + uv_offset, 0x80, uv_size);
241 drm_intel_gem_bo_unmap_gtt(obj_surface->bo);
243 return VA_STATUS_SUCCESS;
246 /* Generate flat scaling matrices for H.264 decoding */
248 avc_gen_default_iq_matrix(VAIQMatrixBufferH264 *iq_matrix)
251 memset(&iq_matrix->ScalingList4x4, 16, sizeof(iq_matrix->ScalingList4x4));
254 memset(&iq_matrix->ScalingList8x8, 16, sizeof(iq_matrix->ScalingList8x8));
257 /* Get first macroblock bit offset for BSD, minus EPB count (AVC) */
258 /* XXX: slice_data_bit_offset does not account for EPB */
260 avc_get_first_mb_bit_offset(
261 dri_bo *slice_data_bo,
262 VASliceParameterBufferH264 *slice_param,
263 unsigned int mode_flag
266 unsigned int slice_data_bit_offset = slice_param->slice_data_bit_offset;
268 if (mode_flag == ENTROPY_CABAC)
269 slice_data_bit_offset = ALIGN(slice_data_bit_offset, 0x8);
270 return slice_data_bit_offset;
273 /* Get first macroblock bit offset for BSD, with EPB count (AVC) */
274 /* XXX: slice_data_bit_offset does not account for EPB */
276 avc_get_first_mb_bit_offset_with_epb(
277 dri_bo *slice_data_bo,
278 VASliceParameterBufferH264 *slice_param,
279 unsigned int mode_flag
282 unsigned int in_slice_data_bit_offset = slice_param->slice_data_bit_offset;
283 unsigned int out_slice_data_bit_offset;
284 unsigned int i, j, n, buf_size, data_size, header_size;
288 header_size = slice_param->slice_data_bit_offset / 8;
289 data_size = slice_param->slice_data_size - slice_param->slice_data_offset;
290 buf_size = (header_size * 3 + 1) / 2; // Max possible header size (x1.5)
292 if (buf_size > data_size)
293 buf_size = data_size;
295 buf = alloca(buf_size);
296 ret = dri_bo_get_subdata(
297 slice_data_bo, slice_param->slice_data_offset,
302 for (i = 2, j = 2, n = 0; i < buf_size && j < header_size; i++, j++) {
303 if (buf[i] == 0x03 && buf[i - 1] == 0x00 && buf[i - 2] == 0x00)
307 out_slice_data_bit_offset = in_slice_data_bit_offset + n * 8;
309 if (mode_flag == ENTROPY_CABAC)
310 out_slice_data_bit_offset = ALIGN(out_slice_data_bit_offset, 0x8);
311 return out_slice_data_bit_offset;
314 static inline uint8_t
315 get_ref_idx_state_1(const VAPictureH264 *va_pic, unsigned int frame_store_id)
317 const unsigned int is_long_term =
318 !!(va_pic->flags & VA_PICTURE_H264_LONG_TERM_REFERENCE);
319 const unsigned int is_top_field =
320 !!(va_pic->flags & VA_PICTURE_H264_TOP_FIELD);
321 const unsigned int is_bottom_field =
322 !!(va_pic->flags & VA_PICTURE_H264_BOTTOM_FIELD);
324 return ((is_long_term << 6) |
325 ((is_top_field ^ is_bottom_field ^ 1) << 5) |
326 (frame_store_id << 1) |
327 ((is_top_field ^ 1) & is_bottom_field));
330 /* Fill in Reference List Entries (Gen5+: ILK, SNB, IVB) */
332 gen5_fill_avc_ref_idx_state(
334 const VAPictureH264 ref_list[32],
335 unsigned int ref_list_count,
336 const GenFrameStore frame_store[MAX_GEN_REFERENCE_FRAMES]
341 for (i = 0; i < ref_list_count; i++) {
342 const VAPictureH264 * const va_pic = &ref_list[i];
344 if ((va_pic->flags & VA_PICTURE_H264_INVALID) ||
345 va_pic->picture_id == VA_INVALID_ID) {
350 for (j = 0; j < MAX_GEN_REFERENCE_FRAMES; j++) {
351 if (frame_store[j].surface_id == va_pic->picture_id)
355 if (j != MAX_GEN_REFERENCE_FRAMES) { // Found picture in the Frame Store
356 const GenFrameStore * const fs = &frame_store[j];
357 assert(fs->frame_store_id == j); // Current architecture/assumption
358 state[i] = get_ref_idx_state_1(va_pic, fs->frame_store_id);
361 WARN_ONCE("Invalid RefPicListX[] entry!!! It is not included in DPB\n");
362 state[i] = get_ref_idx_state_1(va_pic, 0) | 0x80;
370 /* Emit Reference List Entries (Gen6+: SNB, IVB) */
372 gen6_send_avc_ref_idx_state_1(
373 struct intel_batchbuffer *batch,
375 const VAPictureH264 *ref_list,
376 unsigned int ref_list_count,
377 const GenFrameStore frame_store[MAX_GEN_REFERENCE_FRAMES]
380 uint8_t ref_idx_state[32];
382 BEGIN_BCS_BATCH(batch, 10);
383 OUT_BCS_BATCH(batch, MFX_AVC_REF_IDX_STATE | (10 - 2));
384 OUT_BCS_BATCH(batch, list);
385 gen5_fill_avc_ref_idx_state(
387 ref_list, ref_list_count,
390 intel_batchbuffer_data(batch, ref_idx_state, sizeof(ref_idx_state));
391 ADVANCE_BCS_BATCH(batch);
395 gen6_send_avc_ref_idx_state(
396 struct intel_batchbuffer *batch,
397 const VASliceParameterBufferH264 *slice_param,
398 const GenFrameStore frame_store[MAX_GEN_REFERENCE_FRAMES]
401 if (slice_param->slice_type == SLICE_TYPE_I ||
402 slice_param->slice_type == SLICE_TYPE_SI)
406 gen6_send_avc_ref_idx_state_1(
408 slice_param->RefPicList0, slice_param->num_ref_idx_l0_active_minus1 + 1,
412 if (slice_param->slice_type != SLICE_TYPE_B)
416 gen6_send_avc_ref_idx_state_1(
418 slice_param->RefPicList1, slice_param->num_ref_idx_l1_active_minus1 + 1,
424 intel_update_avc_frame_store_index(
425 VADriverContextP ctx,
426 struct decode_state *decode_state,
427 VAPictureParameterBufferH264 *pic_param,
428 GenFrameStore frame_store[MAX_GEN_REFERENCE_FRAMES]
431 GenFrameStore *free_refs[MAX_GEN_REFERENCE_FRAMES];
432 int i, j, n, num_free_refs;
434 /* Remove obsolete entries from the internal DPB */
435 for (i = 0, n = 0; i < MAX_GEN_REFERENCE_FRAMES; i++) {
436 GenFrameStore * const fs = &frame_store[i];
437 if (fs->surface_id == VA_INVALID_ID || !fs->obj_surface) {
442 // Find whether the current entry is still a valid reference frame
443 for (j = 0; j < ARRAY_ELEMS(decode_state->reference_objects); j++) {
444 struct object_surface * const obj_surface =
445 decode_state->reference_objects[j];
446 if (obj_surface && obj_surface == fs->obj_surface)
451 if (j == ARRAY_ELEMS(decode_state->reference_objects)) {
452 fs->surface_id = VA_INVALID_ID;
453 fs->obj_surface = NULL;
454 fs->frame_store_id = -1;
460 /* Append the new reference frames */
461 for (i = 0, n = 0; i < ARRAY_ELEMS(decode_state->reference_objects); i++) {
462 struct object_surface * const obj_surface =
463 decode_state->reference_objects[i];
467 // Find whether the current frame is not already in our frame store
468 for (j = 0; j < MAX_GEN_REFERENCE_FRAMES; j++) {
469 GenFrameStore * const fs = &frame_store[j];
470 if (fs->obj_surface == obj_surface)
475 if (j == MAX_GEN_REFERENCE_FRAMES) {
476 if (n < num_free_refs) {
477 GenFrameStore * const fs = free_refs[n++];
478 fs->surface_id = obj_surface->base.id;
479 fs->obj_surface = obj_surface;
480 fs->frame_store_id = fs - frame_store;
483 WARN_ONCE("No free slot found for DPB reference list!!!\n");
489 intel_update_vc1_frame_store_index(VADriverContextP ctx,
490 struct decode_state *decode_state,
491 VAPictureParameterBufferVC1 *pic_param,
492 GenFrameStore frame_store[MAX_GEN_REFERENCE_FRAMES])
494 struct object_surface *obj_surface;
497 obj_surface = decode_state->reference_objects[0];
499 if (pic_param->forward_reference_picture == VA_INVALID_ID ||
502 frame_store[0].surface_id = VA_INVALID_ID;
503 frame_store[0].obj_surface = NULL;
505 frame_store[0].surface_id = pic_param->forward_reference_picture;
506 frame_store[0].obj_surface = obj_surface;
509 obj_surface = decode_state->reference_objects[1];
511 if (pic_param->backward_reference_picture == VA_INVALID_ID ||
514 frame_store[1].surface_id = frame_store[0].surface_id;
515 frame_store[1].obj_surface = frame_store[0].obj_surface;
517 frame_store[1].surface_id = pic_param->backward_reference_picture;
518 frame_store[1].obj_surface = obj_surface;
520 for (i = 2; i < MAX_GEN_REFERENCE_FRAMES; i++) {
521 frame_store[i].surface_id = frame_store[i % 2].surface_id;
522 frame_store[i].obj_surface = frame_store[i % 2].obj_surface;
528 intel_update_vp8_frame_store_index(VADriverContextP ctx,
529 struct decode_state *decode_state,
530 VAPictureParameterBufferVP8 *pic_param,
531 GenFrameStore frame_store[MAX_GEN_REFERENCE_FRAMES])
533 struct object_surface *obj_surface;
536 obj_surface = decode_state->reference_objects[0];
538 if (pic_param->last_ref_frame == VA_INVALID_ID ||
541 frame_store[0].surface_id = VA_INVALID_ID;
542 frame_store[0].obj_surface = NULL;
544 frame_store[0].surface_id = pic_param->last_ref_frame;
545 frame_store[0].obj_surface = obj_surface;
548 obj_surface = decode_state->reference_objects[1];
550 if (pic_param->golden_ref_frame == VA_INVALID_ID ||
553 frame_store[1].surface_id = frame_store[0].surface_id;
554 frame_store[1].obj_surface = frame_store[0].obj_surface;
556 frame_store[1].surface_id = pic_param->golden_ref_frame;
557 frame_store[1].obj_surface = obj_surface;
560 obj_surface = decode_state->reference_objects[2];
562 if (pic_param->alt_ref_frame == VA_INVALID_ID ||
565 frame_store[2].surface_id = frame_store[0].surface_id;
566 frame_store[2].obj_surface = frame_store[0].obj_surface;
568 frame_store[2].surface_id = pic_param->alt_ref_frame;
569 frame_store[2].obj_surface = obj_surface;
572 for (i = 3; i < MAX_GEN_REFERENCE_FRAMES; i++) {
573 frame_store[i].surface_id = frame_store[i % 2].surface_id;
574 frame_store[i].obj_surface = frame_store[i % 2].obj_surface;
580 intel_decoder_check_avc_parameter(VADriverContextP ctx,
581 VAProfile h264_profile,
582 struct decode_state *decode_state)
584 struct i965_driver_data *i965 = i965_driver_data(ctx);
585 VAPictureParameterBufferH264 *pic_param = (VAPictureParameterBufferH264 *)decode_state->pic_param->buffer;
587 struct object_surface *obj_surface;
590 assert(!(pic_param->CurrPic.flags & VA_PICTURE_H264_INVALID));
591 assert(pic_param->CurrPic.picture_id != VA_INVALID_SURFACE);
593 if (pic_param->CurrPic.flags & VA_PICTURE_H264_INVALID ||
594 pic_param->CurrPic.picture_id == VA_INVALID_SURFACE)
597 assert(pic_param->CurrPic.picture_id == decode_state->current_render_target);
599 if (pic_param->CurrPic.picture_id != decode_state->current_render_target)
602 if ((h264_profile != VAProfileH264Baseline)) {
603 if (pic_param->num_slice_groups_minus1 ||
604 pic_param->pic_fields.bits.redundant_pic_cnt_present_flag) {
605 WARN_ONCE("Unsupported the FMO/ASO constraints!!!\n");
610 /* Fill in the reference objects array with the actual VA surface
611 objects with 1:1 correspondance with any entry in ReferenceFrames[],
612 i.e. including "holes" for invalid entries, that are expanded
613 to NULL in the reference_objects[] array */
614 for (i = 0; i < ARRAY_ELEMS(pic_param->ReferenceFrames); i++) {
615 const VAPictureH264 * const va_pic = &pic_param->ReferenceFrames[i];
618 if (!(va_pic->flags & VA_PICTURE_H264_INVALID) &&
619 va_pic->picture_id != VA_INVALID_ID) {
620 obj_surface = SURFACE(pic_param->ReferenceFrames[i].picture_id);
622 return VA_STATUS_ERROR_INVALID_SURFACE;
625 * Sometimes a dummy frame comes from the upper layer
626 * library, call i965_check_alloc_surface_bo() to make
627 * sure the store buffer is allocated for this reference
630 va_status = avc_ensure_surface_bo(ctx, decode_state, obj_surface,
632 if (va_status != VA_STATUS_SUCCESS)
635 decode_state->reference_objects[i] = obj_surface;
637 return VA_STATUS_SUCCESS;
640 return VA_STATUS_ERROR_INVALID_PARAMETER;
644 intel_decoder_check_mpeg2_parameter(VADriverContextP ctx,
645 struct decode_state *decode_state)
647 struct i965_driver_data *i965 = i965_driver_data(ctx);
648 VAPictureParameterBufferMPEG2 *pic_param = (VAPictureParameterBufferMPEG2 *)decode_state->pic_param->buffer;
649 struct object_surface *obj_surface;
652 if (pic_param->picture_coding_type == MPEG_I_PICTURE) {
653 } else if (pic_param->picture_coding_type == MPEG_P_PICTURE) {
654 obj_surface = SURFACE(pic_param->forward_reference_picture);
656 if (!obj_surface || !obj_surface->bo)
657 decode_state->reference_objects[i++] = NULL;
659 decode_state->reference_objects[i++] = obj_surface;
660 } else if (pic_param->picture_coding_type == MPEG_B_PICTURE) {
661 obj_surface = SURFACE(pic_param->forward_reference_picture);
663 if (!obj_surface || !obj_surface->bo)
664 decode_state->reference_objects[i++] = NULL;
666 decode_state->reference_objects[i++] = obj_surface;
668 obj_surface = SURFACE(pic_param->backward_reference_picture);
670 if (!obj_surface || !obj_surface->bo)
671 decode_state->reference_objects[i++] = NULL;
673 decode_state->reference_objects[i++] = obj_surface;
678 decode_state->reference_objects[i] = NULL;
680 return VA_STATUS_SUCCESS;
683 return VA_STATUS_ERROR_INVALID_PARAMETER;
687 intel_decoder_check_vc1_parameter(VADriverContextP ctx,
688 struct decode_state *decode_state)
690 struct i965_driver_data *i965 = i965_driver_data(ctx);
691 VAPictureParameterBufferVC1 *pic_param = (VAPictureParameterBufferVC1 *)decode_state->pic_param->buffer;
692 struct object_surface *obj_surface;
695 if (pic_param->sequence_fields.bits.interlace == 1 &&
696 pic_param->picture_fields.bits.frame_coding_mode != 0) { /* frame-interlace or field-interlace */
697 return VA_STATUS_ERROR_DECODING_ERROR;
700 if (pic_param->picture_fields.bits.picture_type == 0 ||
701 pic_param->picture_fields.bits.picture_type == 3) {
702 } else if (pic_param->picture_fields.bits.picture_type == 1 ||
703 pic_param->picture_fields.bits.picture_type == 4) {
704 obj_surface = SURFACE(pic_param->forward_reference_picture);
706 if (!obj_surface || !obj_surface->bo)
707 decode_state->reference_objects[i++] = NULL;
709 decode_state->reference_objects[i++] = obj_surface;
710 } else if (pic_param->picture_fields.bits.picture_type == 2) {
711 obj_surface = SURFACE(pic_param->forward_reference_picture);
713 if (!obj_surface || !obj_surface->bo)
714 decode_state->reference_objects[i++] = NULL;
716 decode_state->reference_objects[i++] = obj_surface;
718 obj_surface = SURFACE(pic_param->backward_reference_picture);
720 if (!obj_surface || !obj_surface->bo)
721 decode_state->reference_objects[i++] = NULL;
723 decode_state->reference_objects[i++] = obj_surface;
728 decode_state->reference_objects[i] = NULL;
730 return VA_STATUS_SUCCESS;
733 return VA_STATUS_ERROR_INVALID_PARAMETER;
737 intel_decoder_check_vp8_parameter(VADriverContextP ctx,
738 struct decode_state *decode_state)
740 struct i965_driver_data *i965 = i965_driver_data(ctx);
741 VAPictureParameterBufferVP8 *pic_param = (VAPictureParameterBufferVP8 *)decode_state->pic_param->buffer;
742 struct object_surface *obj_surface;
745 if (pic_param->last_ref_frame != VA_INVALID_SURFACE) {
746 obj_surface = SURFACE(pic_param->last_ref_frame);
748 if (obj_surface && obj_surface->bo)
749 decode_state->reference_objects[i++] = obj_surface;
751 decode_state->reference_objects[i++] = NULL;
754 if (pic_param->golden_ref_frame != VA_INVALID_SURFACE) {
755 obj_surface = SURFACE(pic_param->golden_ref_frame);
757 if (obj_surface && obj_surface->bo)
758 decode_state->reference_objects[i++] = obj_surface;
760 decode_state->reference_objects[i++] = NULL;
763 if (pic_param->alt_ref_frame != VA_INVALID_SURFACE) {
764 obj_surface = SURFACE(pic_param->alt_ref_frame);
766 if (obj_surface && obj_surface->bo)
767 decode_state->reference_objects[i++] = obj_surface;
769 decode_state->reference_objects[i++] = NULL;
773 decode_state->reference_objects[i] = NULL;
775 return VA_STATUS_SUCCESS;
779 intel_decoder_sanity_check_input(VADriverContextP ctx,
781 struct decode_state *decode_state)
783 struct i965_driver_data *i965 = i965_driver_data(ctx);
784 struct object_surface *obj_surface;
785 VAStatus vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER;
787 if (decode_state->current_render_target == VA_INVALID_SURFACE)
790 obj_surface = SURFACE(decode_state->current_render_target);
795 decode_state->render_object = obj_surface;
798 case VAProfileMPEG2Simple:
799 case VAProfileMPEG2Main:
800 vaStatus = intel_decoder_check_mpeg2_parameter(ctx, decode_state);
803 case VAProfileH264ConstrainedBaseline:
804 case VAProfileH264Main:
805 case VAProfileH264High:
806 vaStatus = intel_decoder_check_avc_parameter(ctx, profile, decode_state);
809 case VAProfileVC1Simple:
810 case VAProfileVC1Main:
811 case VAProfileVC1Advanced:
812 vaStatus = intel_decoder_check_vc1_parameter(ctx, decode_state);
815 case VAProfileJPEGBaseline:
816 vaStatus = VA_STATUS_SUCCESS;
819 case VAProfileVP8Version0_3:
820 vaStatus = intel_decoder_check_vp8_parameter(ctx, decode_state);
824 vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER;
833 * Return the next slice paramter
836 * slice_param: the current slice
837 * *group_idx & *element_idx the current slice position in slice groups
839 * Return the next slice parameter
840 * *group_idx & *element_idx the next slice position in slice groups,
841 * if the next slice is NULL, *group_idx & *element_idx will be ignored
843 VASliceParameterBufferMPEG2 *
844 intel_mpeg2_find_next_slice(struct decode_state *decode_state,
845 VAPictureParameterBufferMPEG2 *pic_param,
846 VASliceParameterBufferMPEG2 *slice_param,
850 VASliceParameterBufferMPEG2 *next_slice_param;
851 unsigned int width_in_mbs = ALIGN(pic_param->horizontal_size, 16) / 16;
852 int j = *group_idx, i = *element_idx + 1;
854 for (; j < decode_state->num_slice_params; j++) {
855 for (; i < decode_state->slice_params[j]->num_elements; i++) {
856 next_slice_param = ((VASliceParameterBufferMPEG2 *)decode_state->slice_params[j]->buffer) + i;
858 if ((next_slice_param->slice_vertical_position * width_in_mbs + next_slice_param->slice_horizontal_position) >=
859 (slice_param->slice_vertical_position * width_in_mbs + slice_param->slice_horizontal_position)) {
863 return next_slice_param;
873 /* Ensure the segmentation buffer is large enough for the supplied
874 number of MBs, or re-allocate it */
876 intel_ensure_vp8_segmentation_buffer(VADriverContextP ctx, GenBuffer *buf,
877 unsigned int mb_width, unsigned int mb_height)
879 struct i965_driver_data * const i965 = i965_driver_data(ctx);
880 /* The segmentation map is a 64-byte aligned linear buffer, with
881 each cache line holding only 8 bits for 4 continuous MBs */
882 const unsigned int buf_size = ((mb_width + 3) / 4) * 64 * mb_height;
885 if (buf->bo && buf->bo->size >= buf_size)
887 drm_intel_bo_unreference(buf->bo);
891 buf->bo = drm_intel_bo_alloc(i965->intel.bufmgr, "segmentation map",
893 buf->valid = buf->bo != NULL;