Check the pointer against NULL
[platform/upstream/libva-intel-driver.git] / src / i965_decoder_utils.c
1 /*
2  * Copyright (C) 2006-2012 Intel Corporation
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice (including the next
12  * paragraph) shall be included in all copies or substantial portions of the
13  * Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
21  * DEALINGS IN THE SOFTWARE.
22  */
23
24 #include "sysdeps.h"
25
26 #include <alloca.h>
27
28 #include "intel_batchbuffer.h"
29 #include "i965_drv_video.h"
30 #include "i965_decoder_utils.h"
31 #include "i965_defines.h"
32
33 /* Set reference surface if backing store exists */
34 static inline int
35 set_ref_frame(
36     struct i965_driver_data *i965,
37     GenFrameStore           *ref_frame,
38     VASurfaceID              va_surface,
39     struct object_surface   *obj_surface
40 )
41 {
42     if (va_surface == VA_INVALID_ID)
43         return 0;
44
45     if (!obj_surface || !obj_surface->bo)
46         return 0;
47
48     ref_frame->surface_id = va_surface;
49     ref_frame->obj_surface = obj_surface;
50     return 1;
51 }
52
53 /* Check wether codec layer incorrectly fills in slice_vertical_position */
54 int
55 mpeg2_wa_slice_vertical_position(
56     struct decode_state           *decode_state,
57     VAPictureParameterBufferMPEG2 *pic_param
58 )
59 {
60     unsigned int i, j, mb_height, vpos, last_vpos = 0;
61
62     /* Assume progressive sequence if we got a progressive frame */
63     if (pic_param->picture_coding_extension.bits.progressive_frame)
64         return 0;
65
66     /* Wait for a field coded picture */
67     if (pic_param->picture_coding_extension.bits.picture_structure == MPEG_FRAME)
68         return -1;
69
70     assert(decode_state && decode_state->slice_params);
71
72     mb_height = (pic_param->vertical_size + 31) / 32;
73
74     for (j = 0; j < decode_state->num_slice_params; j++) {
75         struct buffer_store * const buffer_store =
76             decode_state->slice_params[j];
77
78         for (i = 0; i < buffer_store->num_elements; i++) {
79             VASliceParameterBufferMPEG2 * const slice_param =
80                 ((VASliceParameterBufferMPEG2 *)buffer_store->buffer) + i;
81
82             vpos = slice_param->slice_vertical_position;
83             if (vpos >= mb_height || vpos == last_vpos + 2) {
84                 WARN_ONCE("codec layer incorrectly fills in MPEG-2 slice_vertical_position. Workaround applied\n");
85                 return 1;
86             }
87             last_vpos = vpos;
88         }
89     }
90     return 0;
91 }
92
93 /* Build MPEG-2 reference frames array */
94 void
95 mpeg2_set_reference_surfaces(
96     VADriverContextP               ctx,
97     GenFrameStore                  ref_frames[MAX_GEN_REFERENCE_FRAMES],
98     struct decode_state           *decode_state,
99     VAPictureParameterBufferMPEG2 *pic_param
100 )
101 {
102     struct i965_driver_data * const i965 = i965_driver_data(ctx);
103     VASurfaceID va_surface;
104     unsigned pic_structure, is_second_field, n = 0;
105     struct object_surface *obj_surface;
106
107     pic_structure = pic_param->picture_coding_extension.bits.picture_structure;
108     is_second_field = pic_structure != MPEG_FRAME &&
109         !pic_param->picture_coding_extension.bits.is_first_field;
110
111     ref_frames[0].surface_id = VA_INVALID_ID;
112     ref_frames[0].obj_surface = NULL;
113
114     /* Reference frames are indexed by frame store ID  (0:top, 1:bottom) */
115     switch (pic_param->picture_coding_type) {
116     case MPEG_P_PICTURE:
117         if (is_second_field && pic_structure == MPEG_BOTTOM_FIELD) {
118             va_surface = decode_state->current_render_target;
119             obj_surface = decode_state->render_object;
120             n += set_ref_frame(i965, &ref_frames[n], va_surface, obj_surface);
121         }
122         va_surface = pic_param->forward_reference_picture;
123         obj_surface = decode_state->reference_objects[0];
124         n += set_ref_frame(i965, &ref_frames[n], va_surface, obj_surface);
125         break;
126
127     case MPEG_B_PICTURE:
128         va_surface = pic_param->forward_reference_picture;
129         obj_surface = decode_state->reference_objects[0];
130         n += set_ref_frame(i965, &ref_frames[n], va_surface, obj_surface);
131         va_surface = pic_param->backward_reference_picture;
132         obj_surface = decode_state->reference_objects[1];
133         n += set_ref_frame(i965, &ref_frames[n], va_surface, obj_surface);
134         break;
135     }
136
137     while (n != 2) {
138         ref_frames[n].obj_surface = ref_frames[0].obj_surface;
139         ref_frames[n++].surface_id = ref_frames[0].surface_id;
140     }
141
142     if (pic_param->picture_coding_extension.bits.frame_pred_frame_dct)
143         return;
144
145     ref_frames[2].surface_id = VA_INVALID_ID;
146     ref_frames[2].obj_surface = NULL;
147
148     /* Bottom field pictures used as reference */
149     switch (pic_param->picture_coding_type) {
150     case MPEG_P_PICTURE:
151         if (is_second_field && pic_structure == MPEG_TOP_FIELD) {
152             va_surface = decode_state->current_render_target;
153             obj_surface = decode_state->render_object;
154             n += set_ref_frame(i965, &ref_frames[n], va_surface, obj_surface);
155         }
156         va_surface = pic_param->forward_reference_picture;
157         obj_surface = decode_state->reference_objects[0];
158         n += set_ref_frame(i965, &ref_frames[n], va_surface, obj_surface);
159         break;
160
161     case MPEG_B_PICTURE:
162         va_surface = pic_param->forward_reference_picture;
163         obj_surface = decode_state->reference_objects[0];
164         n += set_ref_frame(i965, &ref_frames[n], va_surface, obj_surface);
165         va_surface = pic_param->backward_reference_picture;
166         obj_surface = decode_state->reference_objects[1];
167         n += set_ref_frame(i965, &ref_frames[n], va_surface, obj_surface);
168         break;
169     }
170
171     while (n != 4) {
172         ref_frames[n].obj_surface = ref_frames[2].obj_surface;
173         ref_frames[n++].surface_id = ref_frames[2].surface_id;
174     }
175 }
176
177 /* Ensure the supplied VA surface has valid storage for decoding the
178    current picture */
179 VAStatus
180 avc_ensure_surface_bo(
181     VADriverContextP                    ctx,
182     struct decode_state                *decode_state,
183     struct object_surface              *obj_surface,
184     const VAPictureParameterBufferH264 *pic_param
185 )
186 {
187     VAStatus va_status;
188     uint32_t hw_fourcc, fourcc, subsample, chroma_format;
189
190     /* Validate chroma format */
191     switch (pic_param->seq_fields.bits.chroma_format_idc) {
192     case 0: // Grayscale
193         fourcc = VA_FOURCC_Y800;
194         subsample = SUBSAMPLE_YUV400;
195         chroma_format = VA_RT_FORMAT_YUV400;
196         break;
197     case 1: // YUV 4:2:0
198         fourcc = VA_FOURCC_NV12;
199         subsample = SUBSAMPLE_YUV420;
200         chroma_format = VA_RT_FORMAT_YUV420;
201         break;
202     default:
203         return VA_STATUS_ERROR_UNSUPPORTED_RT_FORMAT;
204     }
205
206     /* Determine the HW surface format, bound to VA config needs */
207     if ((decode_state->base.chroma_formats & chroma_format) == chroma_format)
208         hw_fourcc = fourcc;
209     else {
210         hw_fourcc = 0;
211         switch (fourcc) {
212         case VA_FOURCC_Y800: // Implement with an NV12 surface
213             if (decode_state->base.chroma_formats & VA_RT_FORMAT_YUV420) {
214                 hw_fourcc = VA_FOURCC_NV12;
215                 subsample = SUBSAMPLE_YUV420;
216             }
217             break;
218         }
219     }
220     if (!hw_fourcc)
221         return VA_STATUS_ERROR_UNSUPPORTED_RT_FORMAT;
222
223     /* (Re-)allocate the underlying surface buffer store, if necessary */
224     if (!obj_surface->bo || obj_surface->fourcc != hw_fourcc) {
225         struct i965_driver_data * const i965 = i965_driver_data(ctx);
226
227         i965_destroy_surface_storage(obj_surface);
228         va_status = i965_check_alloc_surface_bo(ctx, obj_surface,
229             i965->codec_info->has_tiled_surface, hw_fourcc, subsample);
230         if (va_status != VA_STATUS_SUCCESS)
231             return va_status;
232     }
233
234     /* Fake chroma components if grayscale is implemented on top of NV12 */
235     if (fourcc == VA_FOURCC_Y800 && hw_fourcc == VA_FOURCC_NV12) {
236         const uint32_t uv_offset = obj_surface->width * obj_surface->height;
237         const uint32_t uv_size   = obj_surface->width * obj_surface->height / 2;
238
239         drm_intel_gem_bo_map_gtt(obj_surface->bo);
240         memset(obj_surface->bo->virtual + uv_offset, 0x80, uv_size);
241         drm_intel_gem_bo_unmap_gtt(obj_surface->bo);
242     }
243     return VA_STATUS_SUCCESS;
244 }
245
246 /* Generate flat scaling matrices for H.264 decoding */
247 void
248 avc_gen_default_iq_matrix(VAIQMatrixBufferH264 *iq_matrix)
249 {
250     /* Flat_4x4_16 */
251     memset(&iq_matrix->ScalingList4x4, 16, sizeof(iq_matrix->ScalingList4x4));
252
253     /* Flat_8x8_16 */
254     memset(&iq_matrix->ScalingList8x8, 16, sizeof(iq_matrix->ScalingList8x8));
255 }
256
257 /* Get first macroblock bit offset for BSD, minus EPB count (AVC) */
258 /* XXX: slice_data_bit_offset does not account for EPB */
259 unsigned int
260 avc_get_first_mb_bit_offset(
261     dri_bo                     *slice_data_bo,
262     VASliceParameterBufferH264 *slice_param,
263     unsigned int                mode_flag
264 )
265 {
266     unsigned int slice_data_bit_offset = slice_param->slice_data_bit_offset;
267
268     if (mode_flag == ENTROPY_CABAC)
269         slice_data_bit_offset = ALIGN(slice_data_bit_offset, 0x8);
270     return slice_data_bit_offset;
271 }
272
273 /* Get first macroblock bit offset for BSD, with EPB count (AVC) */
274 /* XXX: slice_data_bit_offset does not account for EPB */
275 unsigned int
276 avc_get_first_mb_bit_offset_with_epb(
277     dri_bo                     *slice_data_bo,
278     VASliceParameterBufferH264 *slice_param,
279     unsigned int                mode_flag
280 )
281 {
282     unsigned int in_slice_data_bit_offset = slice_param->slice_data_bit_offset;
283     unsigned int out_slice_data_bit_offset;
284     unsigned int i, j, n, buf_size, data_size, header_size;
285     uint8_t *buf;
286     int ret;
287
288     header_size = slice_param->slice_data_bit_offset / 8;
289     data_size   = slice_param->slice_data_size - slice_param->slice_data_offset;
290     buf_size    = (header_size * 3 + 1) / 2; // Max possible header size (x1.5)
291
292     if (buf_size > data_size)
293         buf_size = data_size;
294
295     buf = alloca(buf_size);
296     ret = dri_bo_get_subdata(
297         slice_data_bo, slice_param->slice_data_offset,
298         buf_size, buf
299     );
300     assert(ret == 0);
301
302     for (i = 2, j = 2, n = 0; i < buf_size && j < header_size; i++, j++) {
303         if (buf[i] == 0x03 && buf[i - 1] == 0x00 && buf[i - 2] == 0x00)
304             i += 2, j++, n++;
305     }
306
307     out_slice_data_bit_offset = in_slice_data_bit_offset + n * 8;
308
309     if (mode_flag == ENTROPY_CABAC)
310         out_slice_data_bit_offset = ALIGN(out_slice_data_bit_offset, 0x8);
311     return out_slice_data_bit_offset;
312 }
313
314 static inline uint8_t
315 get_ref_idx_state_1(const VAPictureH264 *va_pic, unsigned int frame_store_id)
316 {
317     const unsigned int is_long_term =
318         !!(va_pic->flags & VA_PICTURE_H264_LONG_TERM_REFERENCE);
319     const unsigned int is_top_field =
320         !!(va_pic->flags & VA_PICTURE_H264_TOP_FIELD);
321     const unsigned int is_bottom_field =
322         !!(va_pic->flags & VA_PICTURE_H264_BOTTOM_FIELD);
323
324     return ((is_long_term                         << 6) |
325             ((is_top_field ^ is_bottom_field ^ 1) << 5) |
326             (frame_store_id                       << 1) |
327             ((is_top_field ^ 1) & is_bottom_field));
328 }
329
330 /* Fill in Reference List Entries (Gen5+: ILK, SNB, IVB) */
331 void
332 gen5_fill_avc_ref_idx_state(
333     uint8_t             state[32],
334     const VAPictureH264 ref_list[32],
335     unsigned int        ref_list_count,
336     const GenFrameStore frame_store[MAX_GEN_REFERENCE_FRAMES]
337 )
338 {
339     unsigned int i, n, frame_idx;
340     int found;
341
342     for (i = 0, n = 0; i < ref_list_count; i++) {
343         const VAPictureH264 * const va_pic = &ref_list[i];
344
345         if (va_pic->flags & VA_PICTURE_H264_INVALID)
346             continue;
347
348         found = 0;
349         for (frame_idx = 0; frame_idx < MAX_GEN_REFERENCE_FRAMES; frame_idx++) {
350             const GenFrameStore * const fs = &frame_store[frame_idx];
351             if (fs->surface_id != VA_INVALID_ID &&
352                 fs->surface_id == va_pic->picture_id) {
353                 found = 1;
354                 break;
355             }
356         }
357
358         if (found) {
359             state[n++] = get_ref_idx_state_1(va_pic, frame_idx);
360         } else {
361             WARN_ONCE("Invalid Slice reference frame list !!!. It is not included in DPB \n");
362         }
363     }
364
365     for (; n < 32; n++)
366         state[n] = 0xff;
367 }
368
369 /* Emit Reference List Entries (Gen6+: SNB, IVB) */
370 static void
371 gen6_send_avc_ref_idx_state_1(
372     struct intel_batchbuffer         *batch,
373     unsigned int                      list,
374     const VAPictureH264              *ref_list,
375     unsigned int                      ref_list_count,
376     const GenFrameStore               frame_store[MAX_GEN_REFERENCE_FRAMES]
377 )
378 {
379     uint8_t ref_idx_state[32];
380
381     BEGIN_BCS_BATCH(batch, 10);
382     OUT_BCS_BATCH(batch, MFX_AVC_REF_IDX_STATE | (10 - 2));
383     OUT_BCS_BATCH(batch, list);
384     gen5_fill_avc_ref_idx_state(
385         ref_idx_state,
386         ref_list, ref_list_count,
387         frame_store
388     );
389     intel_batchbuffer_data(batch, ref_idx_state, sizeof(ref_idx_state));
390     ADVANCE_BCS_BATCH(batch);
391 }
392
393 void
394 gen6_send_avc_ref_idx_state(
395     struct intel_batchbuffer         *batch,
396     const VASliceParameterBufferH264 *slice_param,
397     const GenFrameStore               frame_store[MAX_GEN_REFERENCE_FRAMES]
398 )
399 {
400     if (slice_param->slice_type == SLICE_TYPE_I ||
401         slice_param->slice_type == SLICE_TYPE_SI)
402         return;
403
404     /* RefPicList0 */
405     gen6_send_avc_ref_idx_state_1(
406         batch, 0,
407         slice_param->RefPicList0, slice_param->num_ref_idx_l0_active_minus1 + 1,
408         frame_store
409     );
410
411     if (slice_param->slice_type != SLICE_TYPE_B)
412         return;
413
414     /* RefPicList1 */
415     gen6_send_avc_ref_idx_state_1(
416         batch, 1,
417         slice_param->RefPicList1, slice_param->num_ref_idx_l1_active_minus1 + 1,
418         frame_store
419     );
420 }
421
422 void
423 intel_update_avc_frame_store_index(VADriverContextP ctx,
424                                    struct decode_state *decode_state,
425                                    VAPictureParameterBufferH264 *pic_param,
426                                    GenFrameStore frame_store[MAX_GEN_REFERENCE_FRAMES])
427 {
428     int i, j;
429
430     assert(MAX_GEN_REFERENCE_FRAMES == ARRAY_ELEMS(pic_param->ReferenceFrames));
431
432     for (i = 0; i < MAX_GEN_REFERENCE_FRAMES; i++) {
433         int found = 0;
434
435         if (frame_store[i].surface_id == VA_INVALID_ID ||
436             frame_store[i].obj_surface == NULL)
437             continue;
438
439         assert(frame_store[i].frame_store_id != -1);
440
441         for (j = 0; j < MAX_GEN_REFERENCE_FRAMES; j++) {
442             VAPictureH264 *ref_pic = &pic_param->ReferenceFrames[j];
443             if (ref_pic->flags & VA_PICTURE_H264_INVALID)
444                 continue;
445
446             if (frame_store[i].surface_id == ref_pic->picture_id) {
447                 found = 1;
448                 break;
449             }
450         }
451
452         /* remove it from the internal DPB */
453         if (!found) {
454             struct object_surface *obj_surface = frame_store[i].obj_surface;
455             
456             obj_surface->flags &= ~SURFACE_REFERENCED;
457
458             if ((obj_surface->flags & SURFACE_ALL_MASK) == SURFACE_DISPLAYED) {
459                 obj_surface->flags &= ~SURFACE_REF_DIS_MASK;
460                 i965_destroy_surface_storage(obj_surface);
461             }
462
463             frame_store[i].surface_id = VA_INVALID_ID;
464             frame_store[i].frame_store_id = -1;
465             frame_store[i].obj_surface = NULL;
466         }
467     }
468
469     for (i = 0; i < MAX_GEN_REFERENCE_FRAMES; i++) {
470         VAPictureH264 *ref_pic = &pic_param->ReferenceFrames[i];
471         int found = 0;
472
473         if (ref_pic->flags & VA_PICTURE_H264_INVALID ||
474             ref_pic->picture_id == VA_INVALID_SURFACE ||
475             decode_state->reference_objects[i] == NULL)
476             continue;
477
478         for (j = 0; j < MAX_GEN_REFERENCE_FRAMES; j++) {
479             if (frame_store[j].surface_id == ref_pic->picture_id) {
480                 found = 1;
481                 break;
482             }
483         }
484
485         /* add the new reference frame into the internal DPB */
486         if (!found) {
487             int frame_idx;
488             int slot_found;
489             struct object_surface *obj_surface = decode_state->reference_objects[i];
490
491             /* 
492              * Sometimes a dummy frame comes from the upper layer library, call i965_check_alloc_surface_bo()
493              * to ake sure the store buffer is allocated for this reference frame
494              */
495             avc_ensure_surface_bo(ctx, decode_state, obj_surface, pic_param);
496
497             slot_found = 0;
498             frame_idx = -1;
499             /* Find a free frame store index */
500             for (j = 0; j < MAX_GEN_REFERENCE_FRAMES; j++) {
501                 if (frame_store[j].surface_id == VA_INVALID_ID ||
502                     frame_store[j].obj_surface == NULL) {
503                     frame_idx = j;
504                     slot_found = 1;
505                     break;
506                 }
507             }
508
509
510             if (slot_found) {
511                 frame_store[j].surface_id = ref_pic->picture_id;
512                 frame_store[j].frame_store_id = frame_idx;
513                 frame_store[j].obj_surface = obj_surface;
514             } else {
515                 WARN_ONCE("Not free slot for DPB reference list!!!\n");
516             }
517         }
518     }
519
520 }
521
522 void
523 intel_update_vc1_frame_store_index(VADriverContextP ctx,
524                                    struct decode_state *decode_state,
525                                    VAPictureParameterBufferVC1 *pic_param,
526                                    GenFrameStore frame_store[MAX_GEN_REFERENCE_FRAMES])
527 {
528     struct object_surface *obj_surface;
529     int i;
530
531     obj_surface = decode_state->reference_objects[0];
532
533     if (pic_param->forward_reference_picture == VA_INVALID_ID ||
534         !obj_surface || 
535         !obj_surface->bo) {
536         frame_store[0].surface_id = VA_INVALID_ID;
537         frame_store[0].obj_surface = NULL;
538     } else {
539         frame_store[0].surface_id = pic_param->forward_reference_picture;
540         frame_store[0].obj_surface = obj_surface;
541     }
542
543     obj_surface = decode_state->reference_objects[1];
544
545     if (pic_param->backward_reference_picture == VA_INVALID_ID ||
546         !obj_surface || 
547         !obj_surface->bo) {
548         frame_store[1].surface_id = frame_store[0].surface_id;
549         frame_store[1].obj_surface = frame_store[0].obj_surface;
550     } else {
551         frame_store[1].surface_id = pic_param->backward_reference_picture;
552         frame_store[1].obj_surface = obj_surface;
553     }
554     for (i = 2; i < MAX_GEN_REFERENCE_FRAMES; i++) {
555         frame_store[i].surface_id = frame_store[i % 2].surface_id;
556         frame_store[i].obj_surface = frame_store[i % 2].obj_surface;
557     }
558
559 }
560
561 void
562 intel_update_vp8_frame_store_index(VADriverContextP ctx,
563                                    struct decode_state *decode_state,
564                                    VAPictureParameterBufferVP8 *pic_param,
565                                    GenFrameStore frame_store[MAX_GEN_REFERENCE_FRAMES])
566 {
567     struct object_surface *obj_surface;
568     int i;
569
570     obj_surface = decode_state->reference_objects[0];
571
572     if (pic_param->last_ref_frame == VA_INVALID_ID ||
573         !obj_surface ||
574         !obj_surface->bo) {
575         frame_store[0].surface_id = VA_INVALID_ID;
576         frame_store[0].obj_surface = NULL;
577     } else {
578         frame_store[0].surface_id = pic_param->last_ref_frame;
579         frame_store[0].obj_surface = obj_surface;
580     }
581
582     obj_surface = decode_state->reference_objects[1];
583
584     if (pic_param->golden_ref_frame == VA_INVALID_ID ||
585         !obj_surface ||
586         !obj_surface->bo) {
587         frame_store[1].surface_id = frame_store[0].surface_id;
588         frame_store[1].obj_surface = frame_store[0].obj_surface;
589     } else {
590         frame_store[1].surface_id = pic_param->golden_ref_frame;
591         frame_store[1].obj_surface = obj_surface;
592     }
593
594     obj_surface = decode_state->reference_objects[2];
595
596     if (pic_param->alt_ref_frame == VA_INVALID_ID ||
597         !obj_surface ||
598         !obj_surface->bo) {
599         frame_store[2].surface_id = frame_store[0].surface_id;
600         frame_store[2].obj_surface = frame_store[0].obj_surface;
601     } else {
602         frame_store[2].surface_id = pic_param->alt_ref_frame;
603         frame_store[2].obj_surface = obj_surface;
604     }
605
606     for (i = 3; i < MAX_GEN_REFERENCE_FRAMES; i++) {
607         frame_store[i].surface_id = frame_store[i % 2].surface_id;
608         frame_store[i].obj_surface = frame_store[i % 2].obj_surface;
609     }
610
611 }
612
613 static VAStatus
614 intel_decoder_check_avc_parameter(VADriverContextP ctx,
615                                   VAProfile h264_profile,
616                                   struct decode_state *decode_state)
617 {
618     struct i965_driver_data *i965 = i965_driver_data(ctx);
619     VAPictureParameterBufferH264 *pic_param = (VAPictureParameterBufferH264 *)decode_state->pic_param->buffer;
620     struct object_surface *obj_surface; 
621     int i;
622
623     assert(!(pic_param->CurrPic.flags & VA_PICTURE_H264_INVALID));
624     assert(pic_param->CurrPic.picture_id != VA_INVALID_SURFACE);
625
626     if (pic_param->CurrPic.flags & VA_PICTURE_H264_INVALID ||
627         pic_param->CurrPic.picture_id == VA_INVALID_SURFACE)
628         goto error;
629
630     assert(pic_param->CurrPic.picture_id == decode_state->current_render_target);
631
632     if (pic_param->CurrPic.picture_id != decode_state->current_render_target)
633         goto error;
634
635     if ((h264_profile != VAProfileH264Baseline)) {
636        if (pic_param->num_slice_groups_minus1 ||
637            pic_param->pic_fields.bits.redundant_pic_cnt_present_flag) {
638            WARN_ONCE("Unsupported the FMO/ASO constraints!!!\n");
639            goto error;
640        }
641     }
642
643     for (i = 0; i < 16; i++) {
644         if (pic_param->ReferenceFrames[i].flags & VA_PICTURE_H264_INVALID ||
645             pic_param->ReferenceFrames[i].picture_id == VA_INVALID_SURFACE)
646             break;
647         else {
648             obj_surface = SURFACE(pic_param->ReferenceFrames[i].picture_id);
649             assert(obj_surface);
650
651             if (!obj_surface)
652                 goto error;
653
654             if (!obj_surface->bo) { /* a reference frame  without store buffer */
655                 WARN_ONCE("Invalid reference frame!!!\n");
656             }
657
658             decode_state->reference_objects[i] = obj_surface;
659         }
660     }
661
662     for ( ; i < 16; i++)
663         decode_state->reference_objects[i] = NULL;
664
665     return VA_STATUS_SUCCESS;
666
667 error:
668     return VA_STATUS_ERROR_INVALID_PARAMETER;
669 }
670
671 static VAStatus
672 intel_decoder_check_mpeg2_parameter(VADriverContextP ctx,
673                                     struct decode_state *decode_state)
674 {
675     struct i965_driver_data *i965 = i965_driver_data(ctx);
676     VAPictureParameterBufferMPEG2 *pic_param = (VAPictureParameterBufferMPEG2 *)decode_state->pic_param->buffer;
677     struct object_surface *obj_surface; 
678     int i = 0;
679     
680     if (pic_param->picture_coding_type == MPEG_I_PICTURE) {
681     } else if (pic_param->picture_coding_type == MPEG_P_PICTURE) {
682         obj_surface = SURFACE(pic_param->forward_reference_picture);
683
684         if (!obj_surface || !obj_surface->bo)
685             decode_state->reference_objects[i++] = NULL;
686         else
687             decode_state->reference_objects[i++] = obj_surface;
688     } else if (pic_param->picture_coding_type == MPEG_B_PICTURE) {
689         obj_surface = SURFACE(pic_param->forward_reference_picture);
690
691         if (!obj_surface || !obj_surface->bo)
692             decode_state->reference_objects[i++] = NULL;
693         else
694             decode_state->reference_objects[i++] = obj_surface;
695
696         obj_surface = SURFACE(pic_param->backward_reference_picture);
697
698         if (!obj_surface || !obj_surface->bo)
699             decode_state->reference_objects[i++] = NULL;
700         else
701             decode_state->reference_objects[i++] = obj_surface;
702     } else
703         goto error;
704
705     for ( ; i < 16; i++)
706         decode_state->reference_objects[i] = NULL;
707
708     return VA_STATUS_SUCCESS;
709
710 error:
711     return VA_STATUS_ERROR_INVALID_PARAMETER;
712 }
713
714 static VAStatus
715 intel_decoder_check_vc1_parameter(VADriverContextP ctx,
716                                   struct decode_state *decode_state)
717 {
718     struct i965_driver_data *i965 = i965_driver_data(ctx);
719     VAPictureParameterBufferVC1 *pic_param = (VAPictureParameterBufferVC1 *)decode_state->pic_param->buffer;
720     struct object_surface *obj_surface; 
721     int i = 0;
722
723     if (pic_param->sequence_fields.bits.interlace == 1 &&
724         pic_param->picture_fields.bits.frame_coding_mode != 0) { /* frame-interlace or field-interlace */
725         return VA_STATUS_ERROR_DECODING_ERROR;
726     }
727
728     if (pic_param->picture_fields.bits.picture_type == 0 ||
729         pic_param->picture_fields.bits.picture_type == 3) {
730     } else if (pic_param->picture_fields.bits.picture_type == 1 ||
731                pic_param->picture_fields.bits.picture_type == 4) {
732         obj_surface = SURFACE(pic_param->forward_reference_picture);
733
734         if (!obj_surface || !obj_surface->bo)
735             decode_state->reference_objects[i++] = NULL;
736         else
737             decode_state->reference_objects[i++] = obj_surface;
738     } else if (pic_param->picture_fields.bits.picture_type == 2) {
739         obj_surface = SURFACE(pic_param->forward_reference_picture);
740
741         if (!obj_surface || !obj_surface->bo)
742             decode_state->reference_objects[i++] = NULL;
743         else
744             decode_state->reference_objects[i++] = obj_surface;
745
746         obj_surface = SURFACE(pic_param->backward_reference_picture);
747
748         if (!obj_surface || !obj_surface->bo)
749             decode_state->reference_objects[i++] = NULL;
750         else
751             decode_state->reference_objects[i++] = obj_surface;
752     } else 
753         goto error;
754
755     for ( ; i < 16; i++)
756         decode_state->reference_objects[i] = NULL;
757
758     return VA_STATUS_SUCCESS;
759
760 error:
761     return VA_STATUS_ERROR_INVALID_PARAMETER;
762 }
763
764 static VAStatus
765 intel_decoder_check_vp8_parameter(VADriverContextP ctx,
766                                   struct decode_state *decode_state)
767 {
768     struct i965_driver_data *i965 = i965_driver_data(ctx);
769     VAPictureParameterBufferVP8 *pic_param = (VAPictureParameterBufferVP8 *)decode_state->pic_param->buffer;
770     struct object_surface *obj_surface; 
771     int i = 0;
772
773     if (pic_param->last_ref_frame != VA_INVALID_SURFACE) {
774         obj_surface = SURFACE(pic_param->last_ref_frame);
775
776         if (obj_surface && obj_surface->bo)
777             decode_state->reference_objects[i++] = obj_surface;
778         else
779             decode_state->reference_objects[i++] = NULL;
780     }
781
782     if (pic_param->golden_ref_frame != VA_INVALID_SURFACE) {
783         obj_surface = SURFACE(pic_param->golden_ref_frame);
784
785         if (obj_surface && obj_surface->bo)
786             decode_state->reference_objects[i++] = obj_surface;
787         else
788             decode_state->reference_objects[i++] = NULL;
789     }
790
791     if (pic_param->alt_ref_frame != VA_INVALID_SURFACE) {
792         obj_surface = SURFACE(pic_param->alt_ref_frame);
793
794         if (obj_surface && obj_surface->bo)
795             decode_state->reference_objects[i++] = obj_surface;
796         else
797             decode_state->reference_objects[i++] = NULL;
798     }
799
800     for ( ; i < 16; i++)
801         decode_state->reference_objects[i] = NULL;
802
803     return VA_STATUS_SUCCESS;
804 }
805
806 VAStatus
807 intel_decoder_sanity_check_input(VADriverContextP ctx,
808                                  VAProfile profile,
809                                  struct decode_state *decode_state)
810 {
811     struct i965_driver_data *i965 = i965_driver_data(ctx);
812     struct object_surface *obj_surface;
813     VAStatus vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER;
814
815     if (decode_state->current_render_target == VA_INVALID_SURFACE)
816         goto out;
817         
818     obj_surface = SURFACE(decode_state->current_render_target);
819
820     if (!obj_surface)
821         goto out;
822
823     decode_state->render_object = obj_surface;
824
825     switch (profile) {
826     case VAProfileMPEG2Simple:
827     case VAProfileMPEG2Main:
828         vaStatus = intel_decoder_check_mpeg2_parameter(ctx, decode_state);
829         break;
830         
831     case VAProfileH264ConstrainedBaseline:
832     case VAProfileH264Main:
833     case VAProfileH264High:
834         vaStatus = intel_decoder_check_avc_parameter(ctx, profile, decode_state);
835         break;
836
837     case VAProfileVC1Simple:
838     case VAProfileVC1Main:
839     case VAProfileVC1Advanced:
840         vaStatus = intel_decoder_check_vc1_parameter(ctx, decode_state);
841         break;
842
843     case VAProfileJPEGBaseline:
844         vaStatus = VA_STATUS_SUCCESS;
845         break;
846
847     case VAProfileVP8Version0_3:
848         vaStatus = intel_decoder_check_vp8_parameter(ctx, decode_state);
849         break;
850
851     default:
852         vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER;
853         break;
854     }
855
856 out:
857     return vaStatus;
858 }
859
860 /*
861  * Return the next slice paramter
862  *
863  * Input:
864  *      slice_param: the current slice
865  *      *group_idx & *element_idx the current slice position in slice groups
866  * Output:
867  *      Return the next slice parameter
868  *      *group_idx & *element_idx the next slice position in slice groups,
869  *      if the next slice is NULL, *group_idx & *element_idx will be ignored
870  */
871 VASliceParameterBufferMPEG2 *
872 intel_mpeg2_find_next_slice(struct decode_state *decode_state,
873                             VAPictureParameterBufferMPEG2 *pic_param,
874                             VASliceParameterBufferMPEG2 *slice_param,
875                             int *group_idx,
876                             int *element_idx)
877 {
878     VASliceParameterBufferMPEG2 *next_slice_param;
879     unsigned int width_in_mbs = ALIGN(pic_param->horizontal_size, 16) / 16;
880     int j = *group_idx, i = *element_idx + 1;
881
882     for (; j < decode_state->num_slice_params; j++) {
883         for (; i < decode_state->slice_params[j]->num_elements; i++) {
884             next_slice_param = ((VASliceParameterBufferMPEG2 *)decode_state->slice_params[j]->buffer) + i;
885
886             if ((next_slice_param->slice_vertical_position * width_in_mbs + next_slice_param->slice_horizontal_position) >=
887                 (slice_param->slice_vertical_position * width_in_mbs + slice_param->slice_horizontal_position)) {
888                 *group_idx = j;
889                 *element_idx = i;
890
891                 return next_slice_param;
892             }
893         }
894
895         i = 0;
896     }
897
898     return NULL;
899 }
900
901 /* Ensure the segmentation buffer is large enough for the supplied
902    number of MBs, or re-allocate it */
903 bool
904 intel_ensure_vp8_segmentation_buffer(VADriverContextP ctx, GenBuffer *buf,
905     unsigned int mb_width, unsigned int mb_height)
906 {
907     struct i965_driver_data * const i965 = i965_driver_data(ctx);
908     /* The segmentation map is a 64-byte aligned linear buffer, with
909        each cache line holding only 8 bits for 4 continuous MBs */
910     const unsigned int buf_size = ((mb_width + 3) / 4) * 64 * mb_height;
911
912     if (buf->valid) {
913         if (buf->bo && buf->bo->size >= buf_size)
914             return true;
915         drm_intel_bo_unreference(buf->bo);
916         buf->valid = false;
917     }
918
919     buf->bo = drm_intel_bo_alloc(i965->intel.bufmgr, "segmentation map",
920         buf_size, 0x1000);
921     buf->valid = buf->bo != NULL;
922     return buf->valid;
923 }