3421c0772aa411afc125ea427da1cc9e1ca593b5
[profile/ivi/vaapi-intel-driver.git] / src / gen7_mfd.c
1 /*
2  * Copyright © 2011 Intel Corporation
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the
6  * "Software"), to deal in the Software without restriction, including
7  * without limitation the rights to use, copy, modify, merge, publish,
8  * distribute, sub license, and/or sell copies of the Software, and to
9  * permit persons to whom the Software is furnished to do so, subject to
10  * the following conditions:
11  *
12  * The above copyright notice and this permission notice (including the
13  * next paragraph) shall be included in all copies or substantial portions
14  * of the Software.
15  *
16  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
17  * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
19  * IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR
20  * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21  * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22  * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
23  *
24  * Authors:
25  *    Xiang Haihao <haihao.xiang@intel.com>
26  *
27  */
28
29 #include <stdio.h>
30 #include <stdlib.h>
31 #include <string.h>
32 #include <assert.h>
33 #include <va/va_dec_jpeg.h>
34
35 #include "intel_batchbuffer.h"
36 #include "intel_driver.h"
37
38 #include "i965_defines.h"
39 #include "i965_drv_video.h"
40 #include "i965_decoder_utils.h"
41
42 #include "gen7_mfd.h"
43 #include "intel_media.h"
44
45 static const uint32_t zigzag_direct[64] = {
46     0,   1,  8, 16,  9,  2,  3, 10,
47     17, 24, 32, 25, 18, 11,  4,  5,
48     12, 19, 26, 33, 40, 48, 41, 34,
49     27, 20, 13,  6,  7, 14, 21, 28,
50     35, 42, 49, 56, 57, 50, 43, 36,
51     29, 22, 15, 23, 30, 37, 44, 51,
52     58, 59, 52, 45, 38, 31, 39, 46,
53     53, 60, 61, 54, 47, 55, 62, 63
54 };
55
56 static void
57 gen7_mfd_init_avc_surface(VADriverContextP ctx, 
58                           VAPictureParameterBufferH264 *pic_param,
59                           struct object_surface *obj_surface)
60 {
61     struct i965_driver_data *i965 = i965_driver_data(ctx);
62     GenAvcSurface *gen7_avc_surface = obj_surface->private_data;
63     int width_in_mbs, height_in_mbs;
64
65     obj_surface->free_private_data = gen_free_avc_surface;
66     width_in_mbs = pic_param->picture_width_in_mbs_minus1 + 1;
67     height_in_mbs = pic_param->picture_height_in_mbs_minus1 + 1; /* frame height */
68
69     if (!gen7_avc_surface) {
70         gen7_avc_surface = calloc(sizeof(GenAvcSurface), 1);
71         assert((obj_surface->size & 0x3f) == 0);
72         obj_surface->private_data = gen7_avc_surface;
73     }
74
75     gen7_avc_surface->dmv_bottom_flag = (pic_param->pic_fields.bits.field_pic_flag &&
76                                          !pic_param->seq_fields.bits.direct_8x8_inference_flag);
77
78     if (gen7_avc_surface->dmv_top == NULL) {
79         gen7_avc_surface->dmv_top = dri_bo_alloc(i965->intel.bufmgr,
80                                                  "direct mv w/r buffer",
81                                                  width_in_mbs * (height_in_mbs + 1) * 64,
82                                                  0x1000);
83         assert(gen7_avc_surface->dmv_top);
84     }
85
86     if (gen7_avc_surface->dmv_bottom_flag &&
87         gen7_avc_surface->dmv_bottom == NULL) {
88         gen7_avc_surface->dmv_bottom = dri_bo_alloc(i965->intel.bufmgr,
89                                                     "direct mv w/r buffer",
90                                                     width_in_mbs * (height_in_mbs + 1) * 64,
91                                                     0x1000);
92         assert(gen7_avc_surface->dmv_bottom);
93     }
94 }
95
96 static void
97 gen7_mfd_pipe_mode_select(VADriverContextP ctx,
98                           struct decode_state *decode_state,
99                           int standard_select,
100                           struct gen7_mfd_context *gen7_mfd_context)
101 {
102     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
103
104     assert(standard_select == MFX_FORMAT_MPEG2 ||
105            standard_select == MFX_FORMAT_AVC ||
106            standard_select == MFX_FORMAT_VC1 ||
107            standard_select == MFX_FORMAT_JPEG);
108
109     BEGIN_BCS_BATCH(batch, 5);
110     OUT_BCS_BATCH(batch, MFX_PIPE_MODE_SELECT | (5 - 2));
111     OUT_BCS_BATCH(batch,
112                   (MFX_LONG_MODE << 17) | /* Currently only support long format */
113                   (MFD_MODE_VLD << 15) | /* VLD mode */
114                   (0 << 10) | /* disable Stream-Out */
115                   (gen7_mfd_context->post_deblocking_output.valid << 9)  | /* Post Deblocking Output */
116                   (gen7_mfd_context->pre_deblocking_output.valid << 8)  | /* Pre Deblocking Output */
117                   (0 << 5)  | /* not in stitch mode */
118                   (MFX_CODEC_DECODE << 4)  | /* decoding mode */
119                   (standard_select << 0));
120     OUT_BCS_BATCH(batch,
121                   (0 << 4)  | /* terminate if AVC motion and POC table error occurs */
122                   (0 << 3)  | /* terminate if AVC mbdata error occurs */
123                   (0 << 2)  | /* terminate if AVC CABAC/CAVLC decode error occurs */
124                   (0 << 1)  |
125                   (0 << 0));
126     OUT_BCS_BATCH(batch, 0); /* pic status/error report id */ 
127     OUT_BCS_BATCH(batch, 0); /* reserved */
128     ADVANCE_BCS_BATCH(batch);
129 }
130
131 static void
132 gen7_mfd_surface_state(VADriverContextP ctx,
133                        struct decode_state *decode_state,
134                        int standard_select,
135                        struct gen7_mfd_context *gen7_mfd_context)
136 {
137     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
138     struct object_surface *obj_surface = decode_state->render_object;
139     unsigned int y_cb_offset;
140     unsigned int y_cr_offset;
141
142     assert(obj_surface);
143
144     y_cb_offset = obj_surface->y_cb_offset;
145     y_cr_offset = obj_surface->y_cr_offset;
146
147     BEGIN_BCS_BATCH(batch, 6);
148     OUT_BCS_BATCH(batch, MFX_SURFACE_STATE | (6 - 2));
149     OUT_BCS_BATCH(batch, 0);
150     OUT_BCS_BATCH(batch,
151                   ((obj_surface->orig_height - 1) << 18) |
152                   ((obj_surface->orig_width - 1) << 4));
153     OUT_BCS_BATCH(batch,
154                   (MFX_SURFACE_PLANAR_420_8 << 28) | /* 420 planar YUV surface */
155                   ((standard_select != MFX_FORMAT_JPEG) << 27) | /* interleave chroma, set to 0 for JPEG */
156                   (0 << 22) | /* surface object control state, ignored */
157                   ((obj_surface->width - 1) << 3) | /* pitch */
158                   (0 << 2)  | /* must be 0 */
159                   (1 << 1)  | /* must be tiled */
160                   (I965_TILEWALK_YMAJOR << 0));  /* tile walk, must be 1 */
161     OUT_BCS_BATCH(batch,
162                   (0 << 16) | /* X offset for U(Cb), must be 0 */
163                   (y_cb_offset << 0)); /* Y offset for U(Cb) */
164     OUT_BCS_BATCH(batch,
165                   (0 << 16) | /* X offset for V(Cr), must be 0 */
166                   (y_cr_offset << 0)); /* Y offset for V(Cr), must be 0 for video codec, non-zoro for JPEG */
167     ADVANCE_BCS_BATCH(batch);
168 }
169
170 static void
171 gen7_mfd_pipe_buf_addr_state(VADriverContextP ctx,
172                              struct decode_state *decode_state,
173                              int standard_select,
174                              struct gen7_mfd_context *gen7_mfd_context)
175 {
176     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
177     int i;
178
179     BEGIN_BCS_BATCH(batch, 24);
180     OUT_BCS_BATCH(batch, MFX_PIPE_BUF_ADDR_STATE | (24 - 2));
181     if (gen7_mfd_context->pre_deblocking_output.valid)
182         OUT_BCS_RELOC(batch, gen7_mfd_context->pre_deblocking_output.bo,
183                       I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
184                       0);
185     else
186         OUT_BCS_BATCH(batch, 0);
187
188     if (gen7_mfd_context->post_deblocking_output.valid)
189         OUT_BCS_RELOC(batch, gen7_mfd_context->post_deblocking_output.bo,
190                       I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
191                       0);
192     else
193         OUT_BCS_BATCH(batch, 0);
194
195     OUT_BCS_BATCH(batch, 0); /* ignore for decoding */
196     OUT_BCS_BATCH(batch, 0); /* ignore for decoding */
197
198     if (gen7_mfd_context->intra_row_store_scratch_buffer.valid)
199         OUT_BCS_RELOC(batch, gen7_mfd_context->intra_row_store_scratch_buffer.bo,
200                       I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
201                       0);
202     else
203         OUT_BCS_BATCH(batch, 0);
204
205     if (gen7_mfd_context->deblocking_filter_row_store_scratch_buffer.valid)
206         OUT_BCS_RELOC(batch, gen7_mfd_context->deblocking_filter_row_store_scratch_buffer.bo,
207                       I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
208                       0);
209     else
210         OUT_BCS_BATCH(batch, 0);
211
212     /* DW 7..22 */
213     for (i = 0; i < ARRAY_ELEMS(gen7_mfd_context->reference_surface); i++) {
214         struct object_surface *obj_surface;
215
216         if (gen7_mfd_context->reference_surface[i].surface_id != VA_INVALID_ID &&
217             gen7_mfd_context->reference_surface[i].obj_surface &&
218             gen7_mfd_context->reference_surface[i].obj_surface->bo) {
219             obj_surface = gen7_mfd_context->reference_surface[i].obj_surface;
220
221             OUT_BCS_RELOC(batch, obj_surface->bo,
222                           I915_GEM_DOMAIN_INSTRUCTION, 0,
223                           0);
224         } else {
225             OUT_BCS_BATCH(batch, 0);
226         }
227     }
228
229     OUT_BCS_BATCH(batch, 0);   /* ignore DW23 for decoding */
230     ADVANCE_BCS_BATCH(batch);
231 }
232
233 static void
234 gen7_mfd_ind_obj_base_addr_state(VADriverContextP ctx,
235                                  dri_bo *slice_data_bo,
236                                  int standard_select,
237                                  struct gen7_mfd_context *gen7_mfd_context)
238 {
239     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
240
241     BEGIN_BCS_BATCH(batch, 11);
242     OUT_BCS_BATCH(batch, MFX_IND_OBJ_BASE_ADDR_STATE | (11 - 2));
243     OUT_BCS_RELOC(batch, slice_data_bo, I915_GEM_DOMAIN_INSTRUCTION, 0, 0); /* MFX Indirect Bitstream Object Base Address */
244     OUT_BCS_BATCH(batch, 0x80000000); /* must set, up to 2G */
245     OUT_BCS_BATCH(batch, 0); /* ignore for VLD mode */
246     OUT_BCS_BATCH(batch, 0);
247     OUT_BCS_BATCH(batch, 0); /* ignore for VLD mode */
248     OUT_BCS_BATCH(batch, 0);
249     OUT_BCS_BATCH(batch, 0); /* ignore for VLD mode */
250     OUT_BCS_BATCH(batch, 0);
251     OUT_BCS_BATCH(batch, 0); /* ignore for VLD mode */
252     OUT_BCS_BATCH(batch, 0);
253     ADVANCE_BCS_BATCH(batch);
254 }
255
256 static void
257 gen7_mfd_bsp_buf_base_addr_state(VADriverContextP ctx,
258                                  struct decode_state *decode_state,
259                                  int standard_select,
260                                  struct gen7_mfd_context *gen7_mfd_context)
261 {
262     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
263
264     BEGIN_BCS_BATCH(batch, 4);
265     OUT_BCS_BATCH(batch, MFX_BSP_BUF_BASE_ADDR_STATE | (4 - 2));
266
267     if (gen7_mfd_context->bsd_mpc_row_store_scratch_buffer.valid)
268         OUT_BCS_RELOC(batch, gen7_mfd_context->bsd_mpc_row_store_scratch_buffer.bo,
269                       I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
270                       0);
271     else
272         OUT_BCS_BATCH(batch, 0);
273
274     if (gen7_mfd_context->mpr_row_store_scratch_buffer.valid)
275         OUT_BCS_RELOC(batch, gen7_mfd_context->mpr_row_store_scratch_buffer.bo,
276                       I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
277                       0);
278     else
279         OUT_BCS_BATCH(batch, 0);
280
281     if (gen7_mfd_context->bitplane_read_buffer.valid)
282         OUT_BCS_RELOC(batch, gen7_mfd_context->bitplane_read_buffer.bo,
283                       I915_GEM_DOMAIN_INSTRUCTION, 0,
284                       0);
285     else
286         OUT_BCS_BATCH(batch, 0);
287
288     ADVANCE_BCS_BATCH(batch);
289 }
290
291 #if 0
292 static void
293 gen7_mfd_aes_state(VADriverContextP ctx,
294                    struct decode_state *decode_state,
295                    int standard_select)
296 {
297     /* FIXME */
298 }
299 #endif
300
301 static void
302 gen7_mfd_qm_state(VADriverContextP ctx,
303                   int qm_type,
304                   unsigned char *qm,
305                   int qm_length,
306                   struct gen7_mfd_context *gen7_mfd_context)
307 {
308     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
309     unsigned int qm_buffer[16];
310
311     assert(qm_length <= 16 * 4);
312     memcpy(qm_buffer, qm, qm_length);
313
314     BEGIN_BCS_BATCH(batch, 18);
315     OUT_BCS_BATCH(batch, MFX_QM_STATE | (18 - 2));
316     OUT_BCS_BATCH(batch, qm_type << 0);
317     intel_batchbuffer_data(batch, qm_buffer, 16 * 4);
318     ADVANCE_BCS_BATCH(batch);
319 }
320
321 #if 0
322 static void
323 gen7_mfd_wait(VADriverContextP ctx,
324               struct decode_state *decode_state,
325               int standard_select,
326               struct gen7_mfd_context *gen7_mfd_context)
327 {
328     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
329
330     BEGIN_BCS_BATCH(batch, 1);
331     OUT_BCS_BATCH(batch, MFX_WAIT | (1 << 8));
332     ADVANCE_BCS_BATCH(batch);
333 }
334 #endif
335
336 static void
337 gen7_mfd_avc_img_state(VADriverContextP ctx,
338                        struct decode_state *decode_state,
339                        struct gen7_mfd_context *gen7_mfd_context)
340 {
341     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
342     int img_struct;
343     int mbaff_frame_flag;
344     unsigned int width_in_mbs, height_in_mbs;
345     VAPictureParameterBufferH264 *pic_param;
346
347     assert(decode_state->pic_param && decode_state->pic_param->buffer);
348     pic_param = (VAPictureParameterBufferH264 *)decode_state->pic_param->buffer;
349
350     if (pic_param->CurrPic.flags & VA_PICTURE_H264_TOP_FIELD)
351         img_struct = 1;
352     else if (pic_param->CurrPic.flags & VA_PICTURE_H264_BOTTOM_FIELD)
353         img_struct = 3;
354     else
355         img_struct = 0;
356
357     if ((img_struct & 0x1) == 0x1) {
358         assert(pic_param->pic_fields.bits.field_pic_flag == 0x1);
359     } else {
360         assert(pic_param->pic_fields.bits.field_pic_flag == 0x0);
361     }
362
363     if (pic_param->seq_fields.bits.frame_mbs_only_flag) { /* a frame containing only frame macroblocks */
364         assert(pic_param->seq_fields.bits.mb_adaptive_frame_field_flag == 0);
365         assert(pic_param->pic_fields.bits.field_pic_flag == 0);
366     } else {
367         assert(pic_param->seq_fields.bits.direct_8x8_inference_flag == 1); /* see H.264 spec */
368     }
369
370     mbaff_frame_flag = (pic_param->seq_fields.bits.mb_adaptive_frame_field_flag &&
371                         !pic_param->pic_fields.bits.field_pic_flag);
372
373     width_in_mbs = pic_param->picture_width_in_mbs_minus1 + 1;
374     height_in_mbs = pic_param->picture_height_in_mbs_minus1 + 1; /* frame height */
375
376     /* MFX unit doesn't support 4:2:2 and 4:4:4 picture */
377     assert(pic_param->seq_fields.bits.chroma_format_idc == 0 || /* monochrome picture */
378            pic_param->seq_fields.bits.chroma_format_idc == 1);  /* 4:2:0 */
379     assert(pic_param->seq_fields.bits.residual_colour_transform_flag == 0); /* only available for 4:4:4 */
380
381     BEGIN_BCS_BATCH(batch, 16);
382     OUT_BCS_BATCH(batch, MFX_AVC_IMG_STATE | (16 - 2));
383     OUT_BCS_BATCH(batch, 
384                   width_in_mbs * height_in_mbs);
385     OUT_BCS_BATCH(batch, 
386                   ((height_in_mbs - 1) << 16) | 
387                   ((width_in_mbs - 1) << 0));
388     OUT_BCS_BATCH(batch, 
389                   ((pic_param->second_chroma_qp_index_offset & 0x1f) << 24) |
390                   ((pic_param->chroma_qp_index_offset & 0x1f) << 16) |
391                   (0 << 14) | /* Max-bit conformance Intra flag ??? FIXME */
392                   (0 << 13) | /* Max Macroblock size conformance Inter flag ??? FIXME */
393                   (pic_param->pic_fields.bits.weighted_pred_flag << 12) | /* differ from GEN6 */
394                   (pic_param->pic_fields.bits.weighted_bipred_idc << 10) |
395                   (img_struct << 8));
396     OUT_BCS_BATCH(batch,
397                   (pic_param->seq_fields.bits.chroma_format_idc << 10) |
398                   (pic_param->pic_fields.bits.entropy_coding_mode_flag << 7) |
399                   ((!pic_param->pic_fields.bits.reference_pic_flag) << 6) |
400                   (pic_param->pic_fields.bits.constrained_intra_pred_flag << 5) |
401                   (pic_param->seq_fields.bits.direct_8x8_inference_flag << 4) |
402                   (pic_param->pic_fields.bits.transform_8x8_mode_flag << 3) |
403                   (pic_param->seq_fields.bits.frame_mbs_only_flag << 2) |
404                   (mbaff_frame_flag << 1) |
405                   (pic_param->pic_fields.bits.field_pic_flag << 0));
406     OUT_BCS_BATCH(batch, 0);
407     OUT_BCS_BATCH(batch, 0);
408     OUT_BCS_BATCH(batch, 0);
409     OUT_BCS_BATCH(batch, 0);
410     OUT_BCS_BATCH(batch, 0);
411     OUT_BCS_BATCH(batch, 0);
412     OUT_BCS_BATCH(batch, 0);
413     OUT_BCS_BATCH(batch, 0);
414     OUT_BCS_BATCH(batch, 0);
415     OUT_BCS_BATCH(batch, 0);
416     OUT_BCS_BATCH(batch, 0);
417     ADVANCE_BCS_BATCH(batch);
418 }
419
420 static void
421 gen7_mfd_avc_qm_state(VADriverContextP ctx,
422                       struct decode_state *decode_state,
423                       struct gen7_mfd_context *gen7_mfd_context)
424 {
425     VAIQMatrixBufferH264 *iq_matrix;
426     VAPictureParameterBufferH264 *pic_param;
427
428     if (decode_state->iq_matrix && decode_state->iq_matrix->buffer)
429         iq_matrix = (VAIQMatrixBufferH264 *)decode_state->iq_matrix->buffer;
430     else
431         iq_matrix = &gen7_mfd_context->iq_matrix.h264;
432
433     assert(decode_state->pic_param && decode_state->pic_param->buffer);
434     pic_param = (VAPictureParameterBufferH264 *)decode_state->pic_param->buffer;
435
436     gen7_mfd_qm_state(ctx, MFX_QM_AVC_4X4_INTRA_MATRIX, &iq_matrix->ScalingList4x4[0][0], 3 * 16, gen7_mfd_context);
437     gen7_mfd_qm_state(ctx, MFX_QM_AVC_4X4_INTER_MATRIX, &iq_matrix->ScalingList4x4[3][0], 3 * 16, gen7_mfd_context);
438
439     if (pic_param->pic_fields.bits.transform_8x8_mode_flag) {
440         gen7_mfd_qm_state(ctx, MFX_QM_AVC_8x8_INTRA_MATRIX, &iq_matrix->ScalingList8x8[0][0], 64, gen7_mfd_context);
441         gen7_mfd_qm_state(ctx, MFX_QM_AVC_8x8_INTER_MATRIX, &iq_matrix->ScalingList8x8[1][0], 64, gen7_mfd_context);
442     }
443 }
444
445 static void
446 gen7_mfd_avc_directmode_state(VADriverContextP ctx,
447                               struct decode_state *decode_state,
448                               VAPictureParameterBufferH264 *pic_param,
449                               VASliceParameterBufferH264 *slice_param,
450                               struct gen7_mfd_context *gen7_mfd_context)
451 {
452     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
453     struct object_surface *obj_surface;
454     GenAvcSurface *gen7_avc_surface;
455     VAPictureH264 *va_pic;
456     int i, j;
457
458     BEGIN_BCS_BATCH(batch, 69);
459     OUT_BCS_BATCH(batch, MFX_AVC_DIRECTMODE_STATE | (69 - 2));
460
461     /* reference surfaces 0..15 */
462     for (i = 0; i < ARRAY_ELEMS(gen7_mfd_context->reference_surface); i++) {
463         if (gen7_mfd_context->reference_surface[i].surface_id != VA_INVALID_ID &&
464             gen7_mfd_context->reference_surface[i].obj_surface &&
465             gen7_mfd_context->reference_surface[i].obj_surface->private_data) {
466
467             obj_surface = gen7_mfd_context->reference_surface[i].obj_surface;
468             gen7_avc_surface = obj_surface->private_data;
469             OUT_BCS_RELOC(batch, gen7_avc_surface->dmv_top,
470                           I915_GEM_DOMAIN_INSTRUCTION, 0,
471                           0);
472
473             if (gen7_avc_surface->dmv_bottom_flag == 1)
474                 OUT_BCS_RELOC(batch, gen7_avc_surface->dmv_bottom,
475                               I915_GEM_DOMAIN_INSTRUCTION, 0,
476                               0);
477             else
478                 OUT_BCS_RELOC(batch, gen7_avc_surface->dmv_top,
479                               I915_GEM_DOMAIN_INSTRUCTION, 0,
480                               0);
481         } else {
482             OUT_BCS_BATCH(batch, 0);
483             OUT_BCS_BATCH(batch, 0);
484         }
485     }
486
487     /* the current decoding frame/field */
488     va_pic = &pic_param->CurrPic;
489     obj_surface = decode_state->render_object;
490     assert(obj_surface->bo && obj_surface->private_data);
491     gen7_avc_surface = obj_surface->private_data;
492
493     OUT_BCS_RELOC(batch, gen7_avc_surface->dmv_top,
494                   I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
495                   0);
496
497     if (gen7_avc_surface->dmv_bottom_flag == 1)
498         OUT_BCS_RELOC(batch, gen7_avc_surface->dmv_bottom,
499                       I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
500                       0);
501     else
502         OUT_BCS_RELOC(batch, gen7_avc_surface->dmv_top,
503                       I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
504                       0);
505
506     /* POC List */
507     for (i = 0; i < ARRAY_ELEMS(gen7_mfd_context->reference_surface); i++) {
508         if (gen7_mfd_context->reference_surface[i].surface_id != VA_INVALID_ID) {
509             int found = 0;
510
511             assert(gen7_mfd_context->reference_surface[i].obj_surface != NULL);
512
513             for (j = 0; j < ARRAY_ELEMS(pic_param->ReferenceFrames); j++) {
514                 va_pic = &pic_param->ReferenceFrames[j];
515                 
516                 if (va_pic->flags & VA_PICTURE_H264_INVALID)
517                     continue;
518
519                 if (va_pic->picture_id == gen7_mfd_context->reference_surface[i].surface_id) {
520                     found = 1;
521                     break;
522                 }
523             }
524
525             assert(found == 1);
526             assert(!(va_pic->flags & VA_PICTURE_H264_INVALID));
527             
528             OUT_BCS_BATCH(batch, va_pic->TopFieldOrderCnt);
529             OUT_BCS_BATCH(batch, va_pic->BottomFieldOrderCnt);
530         } else {
531             OUT_BCS_BATCH(batch, 0);
532             OUT_BCS_BATCH(batch, 0);
533         }
534     }
535
536     va_pic = &pic_param->CurrPic;
537     OUT_BCS_BATCH(batch, va_pic->TopFieldOrderCnt);
538     OUT_BCS_BATCH(batch, va_pic->BottomFieldOrderCnt);
539
540     ADVANCE_BCS_BATCH(batch);
541 }
542
543 static void
544 gen7_mfd_avc_slice_state(VADriverContextP ctx,
545                          VAPictureParameterBufferH264 *pic_param,
546                          VASliceParameterBufferH264 *slice_param,
547                          VASliceParameterBufferH264 *next_slice_param,
548                          struct gen7_mfd_context *gen7_mfd_context)
549 {
550     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
551     int width_in_mbs = pic_param->picture_width_in_mbs_minus1 + 1;
552     int height_in_mbs = pic_param->picture_height_in_mbs_minus1 + 1;
553     int slice_hor_pos, slice_ver_pos, next_slice_hor_pos, next_slice_ver_pos;
554     int num_ref_idx_l0, num_ref_idx_l1;
555     int mbaff_picture = (!pic_param->pic_fields.bits.field_pic_flag &&
556                          pic_param->seq_fields.bits.mb_adaptive_frame_field_flag);
557     int first_mb_in_slice = 0, first_mb_in_next_slice = 0;
558     int slice_type;
559
560     if (slice_param->slice_type == SLICE_TYPE_I ||
561         slice_param->slice_type == SLICE_TYPE_SI) {
562         slice_type = SLICE_TYPE_I;
563     } else if (slice_param->slice_type == SLICE_TYPE_P ||
564                slice_param->slice_type == SLICE_TYPE_SP) {
565         slice_type = SLICE_TYPE_P;
566     } else { 
567         assert(slice_param->slice_type == SLICE_TYPE_B);
568         slice_type = SLICE_TYPE_B;
569     }
570
571     if (slice_type == SLICE_TYPE_I) {
572         assert(slice_param->num_ref_idx_l0_active_minus1 == 0);
573         assert(slice_param->num_ref_idx_l1_active_minus1 == 0);
574         num_ref_idx_l0 = 0;
575         num_ref_idx_l1 = 0;
576     } else if (slice_type == SLICE_TYPE_P) {
577         assert(slice_param->num_ref_idx_l1_active_minus1 == 0);
578         num_ref_idx_l0 = slice_param->num_ref_idx_l0_active_minus1 + 1;
579         num_ref_idx_l1 = 0;
580     } else {
581         num_ref_idx_l0 = slice_param->num_ref_idx_l0_active_minus1 + 1;
582         num_ref_idx_l1 = slice_param->num_ref_idx_l1_active_minus1 + 1;
583     }
584
585     first_mb_in_slice = slice_param->first_mb_in_slice << mbaff_picture;
586     slice_hor_pos = first_mb_in_slice % width_in_mbs; 
587     slice_ver_pos = first_mb_in_slice / width_in_mbs;
588
589     if (next_slice_param) {
590         first_mb_in_next_slice = next_slice_param->first_mb_in_slice << mbaff_picture;
591         next_slice_hor_pos = first_mb_in_next_slice % width_in_mbs; 
592         next_slice_ver_pos = first_mb_in_next_slice / width_in_mbs;
593     } else {
594         next_slice_hor_pos = 0;
595         next_slice_ver_pos = height_in_mbs / (1 + !!pic_param->pic_fields.bits.field_pic_flag);
596     }
597
598     BEGIN_BCS_BATCH(batch, 11); /* FIXME: is it 10??? */
599     OUT_BCS_BATCH(batch, MFX_AVC_SLICE_STATE | (11 - 2));
600     OUT_BCS_BATCH(batch, slice_type);
601     OUT_BCS_BATCH(batch, 
602                   (num_ref_idx_l1 << 24) |
603                   (num_ref_idx_l0 << 16) |
604                   (slice_param->chroma_log2_weight_denom << 8) |
605                   (slice_param->luma_log2_weight_denom << 0));
606     OUT_BCS_BATCH(batch, 
607                   (slice_param->direct_spatial_mv_pred_flag << 29) |
608                   (slice_param->disable_deblocking_filter_idc << 27) |
609                   (slice_param->cabac_init_idc << 24) |
610                   ((pic_param->pic_init_qp_minus26 + 26 + slice_param->slice_qp_delta) << 16) |
611                   ((slice_param->slice_beta_offset_div2 & 0xf) << 8) |
612                   ((slice_param->slice_alpha_c0_offset_div2 & 0xf) << 0));
613     OUT_BCS_BATCH(batch, 
614                   (slice_ver_pos << 24) |
615                   (slice_hor_pos << 16) | 
616                   (first_mb_in_slice << 0));
617     OUT_BCS_BATCH(batch,
618                   (next_slice_ver_pos << 16) |
619                   (next_slice_hor_pos << 0));
620     OUT_BCS_BATCH(batch, 
621                   (next_slice_param == NULL) << 19); /* last slice flag */
622     OUT_BCS_BATCH(batch, 0);
623     OUT_BCS_BATCH(batch, 0);
624     OUT_BCS_BATCH(batch, 0);
625     OUT_BCS_BATCH(batch, 0);
626     ADVANCE_BCS_BATCH(batch);
627 }
628
629 static inline void
630 gen7_mfd_avc_ref_idx_state(VADriverContextP ctx,
631                            VAPictureParameterBufferH264 *pic_param,
632                            VASliceParameterBufferH264 *slice_param,
633                            struct gen7_mfd_context *gen7_mfd_context)
634 {
635     gen6_send_avc_ref_idx_state(
636         gen7_mfd_context->base.batch,
637         slice_param,
638         gen7_mfd_context->reference_surface
639     );
640 }
641
642 static void
643 gen7_mfd_avc_weightoffset_state(VADriverContextP ctx,
644                                 VAPictureParameterBufferH264 *pic_param,
645                                 VASliceParameterBufferH264 *slice_param,
646                                 struct gen7_mfd_context *gen7_mfd_context)
647 {
648     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
649     int i, j, num_weight_offset_table = 0;
650     short weightoffsets[32 * 6];
651
652     if ((slice_param->slice_type == SLICE_TYPE_P ||
653          slice_param->slice_type == SLICE_TYPE_SP) &&
654         (pic_param->pic_fields.bits.weighted_pred_flag == 1)) {
655         num_weight_offset_table = 1;
656     }
657     
658     if ((slice_param->slice_type == SLICE_TYPE_B) &&
659         (pic_param->pic_fields.bits.weighted_bipred_idc == 1)) {
660         num_weight_offset_table = 2;
661     }
662
663     for (i = 0; i < num_weight_offset_table; i++) {
664         BEGIN_BCS_BATCH(batch, 98);
665         OUT_BCS_BATCH(batch, MFX_AVC_WEIGHTOFFSET_STATE | (98 - 2));
666         OUT_BCS_BATCH(batch, i);
667
668         if (i == 0) {
669             for (j = 0; j < 32; j++) {
670                 weightoffsets[j * 6 + 0] = slice_param->luma_weight_l0[j];
671                 weightoffsets[j * 6 + 1] = slice_param->luma_offset_l0[j];
672                 weightoffsets[j * 6 + 2] = slice_param->chroma_weight_l0[j][0];
673                 weightoffsets[j * 6 + 3] = slice_param->chroma_offset_l0[j][0];
674                 weightoffsets[j * 6 + 4] = slice_param->chroma_weight_l0[j][1];
675                 weightoffsets[j * 6 + 5] = slice_param->chroma_offset_l0[j][1];
676             }
677         } else {
678             for (j = 0; j < 32; j++) {
679                 weightoffsets[j * 6 + 0] = slice_param->luma_weight_l1[j];
680                 weightoffsets[j * 6 + 1] = slice_param->luma_offset_l1[j];
681                 weightoffsets[j * 6 + 2] = slice_param->chroma_weight_l1[j][0];
682                 weightoffsets[j * 6 + 3] = slice_param->chroma_offset_l1[j][0];
683                 weightoffsets[j * 6 + 4] = slice_param->chroma_weight_l1[j][1];
684                 weightoffsets[j * 6 + 5] = slice_param->chroma_offset_l1[j][1];
685             }
686         }
687
688         intel_batchbuffer_data(batch, weightoffsets, sizeof(weightoffsets));
689         ADVANCE_BCS_BATCH(batch);
690     }
691 }
692
693 static void
694 gen7_mfd_avc_bsd_object(VADriverContextP ctx,
695                         VAPictureParameterBufferH264 *pic_param,
696                         VASliceParameterBufferH264 *slice_param,
697                         dri_bo *slice_data_bo,
698                         VASliceParameterBufferH264 *next_slice_param,
699                         struct gen7_mfd_context *gen7_mfd_context)
700 {
701     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
702     unsigned int slice_data_bit_offset;
703
704     slice_data_bit_offset = avc_get_first_mb_bit_offset(
705         slice_data_bo,
706         slice_param,
707         pic_param->pic_fields.bits.entropy_coding_mode_flag
708     );
709
710     /* the input bitsteam format on GEN7 differs from GEN6 */
711     BEGIN_BCS_BATCH(batch, 6);
712     OUT_BCS_BATCH(batch, MFD_AVC_BSD_OBJECT | (6 - 2));
713     OUT_BCS_BATCH(batch, 
714                   (slice_param->slice_data_size - slice_param->slice_data_offset));
715     OUT_BCS_BATCH(batch, slice_param->slice_data_offset);
716     OUT_BCS_BATCH(batch,
717                   (0 << 31) |
718                   (0 << 14) |
719                   (0 << 12) |
720                   (0 << 10) |
721                   (0 << 8));
722     OUT_BCS_BATCH(batch,
723                   ((slice_data_bit_offset >> 3) << 16) |
724                   (1 << 7)  |
725                   (0 << 5)  |
726                   (0 << 4)  |
727                   ((next_slice_param == NULL) << 3) | /* LastSlice Flag */
728                   (slice_data_bit_offset & 0x7));
729     OUT_BCS_BATCH(batch, 0);
730     ADVANCE_BCS_BATCH(batch);
731 }
732
733 static inline void
734 gen7_mfd_avc_context_init(
735     VADriverContextP         ctx,
736     struct gen7_mfd_context *gen7_mfd_context
737 )
738 {
739     /* Initialize flat scaling lists */
740     avc_gen_default_iq_matrix(&gen7_mfd_context->iq_matrix.h264);
741 }
742
743 static void
744 gen7_mfd_avc_decode_init(VADriverContextP ctx,
745                          struct decode_state *decode_state,
746                          struct gen7_mfd_context *gen7_mfd_context)
747 {
748     VAPictureParameterBufferH264 *pic_param;
749     VASliceParameterBufferH264 *slice_param;
750     struct i965_driver_data *i965 = i965_driver_data(ctx);
751     struct object_surface *obj_surface;
752     dri_bo *bo;
753     int i, j, enable_avc_ildb = 0;
754     unsigned int width_in_mbs, height_in_mbs;
755
756     for (j = 0; j < decode_state->num_slice_params && enable_avc_ildb == 0; j++) {
757         assert(decode_state->slice_params && decode_state->slice_params[j]->buffer);
758         slice_param = (VASliceParameterBufferH264 *)decode_state->slice_params[j]->buffer;
759
760         for (i = 0; i < decode_state->slice_params[j]->num_elements; i++) {
761             assert(slice_param->slice_data_flag == VA_SLICE_DATA_FLAG_ALL);
762             assert((slice_param->slice_type == SLICE_TYPE_I) ||
763                    (slice_param->slice_type == SLICE_TYPE_SI) ||
764                    (slice_param->slice_type == SLICE_TYPE_P) ||
765                    (slice_param->slice_type == SLICE_TYPE_SP) ||
766                    (slice_param->slice_type == SLICE_TYPE_B));
767
768             if (slice_param->disable_deblocking_filter_idc != 1) {
769                 enable_avc_ildb = 1;
770                 break;
771             }
772
773             slice_param++;
774         }
775     }
776
777     assert(decode_state->pic_param && decode_state->pic_param->buffer);
778     pic_param = (VAPictureParameterBufferH264 *)decode_state->pic_param->buffer;
779     intel_update_avc_frame_store_index(ctx, decode_state, pic_param, gen7_mfd_context->reference_surface);
780     width_in_mbs = pic_param->picture_width_in_mbs_minus1 + 1;
781     height_in_mbs = pic_param->picture_height_in_mbs_minus1 + 1;
782     assert(width_in_mbs > 0 && width_in_mbs <= 256); /* 4K */
783     assert(height_in_mbs > 0 && height_in_mbs <= 256);
784
785     /* Current decoded picture */
786     obj_surface = decode_state->render_object;
787     obj_surface->flags &= ~SURFACE_REF_DIS_MASK;
788     obj_surface->flags |= (pic_param->pic_fields.bits.reference_pic_flag ? SURFACE_REFERENCED : 0);
789     i965_check_alloc_surface_bo(ctx, obj_surface, 1, VA_FOURCC('N','V','1','2'), SUBSAMPLE_YUV420);
790
791     /* initial uv component for YUV400 case */
792     if (pic_param->seq_fields.bits.chroma_format_idc == 0) {
793          unsigned int uv_offset = obj_surface->width * obj_surface->height;
794          unsigned int uv_size   = obj_surface->width * obj_surface->height / 2;
795
796          drm_intel_gem_bo_map_gtt(obj_surface->bo);
797          memset(obj_surface->bo->virtual + uv_offset, 0x80, uv_size);
798          drm_intel_gem_bo_unmap_gtt(obj_surface->bo);
799     }
800
801     gen7_mfd_init_avc_surface(ctx, pic_param, obj_surface);
802
803     dri_bo_unreference(gen7_mfd_context->post_deblocking_output.bo);
804     gen7_mfd_context->post_deblocking_output.bo = obj_surface->bo;
805     dri_bo_reference(gen7_mfd_context->post_deblocking_output.bo);
806     gen7_mfd_context->post_deblocking_output.valid = enable_avc_ildb;
807
808     dri_bo_unreference(gen7_mfd_context->pre_deblocking_output.bo);
809     gen7_mfd_context->pre_deblocking_output.bo = obj_surface->bo;
810     dri_bo_reference(gen7_mfd_context->pre_deblocking_output.bo);
811     gen7_mfd_context->pre_deblocking_output.valid = !enable_avc_ildb;
812
813     dri_bo_unreference(gen7_mfd_context->intra_row_store_scratch_buffer.bo);
814     bo = dri_bo_alloc(i965->intel.bufmgr,
815                       "intra row store",
816                       width_in_mbs * 64,
817                       0x1000);
818     assert(bo);
819     gen7_mfd_context->intra_row_store_scratch_buffer.bo = bo;
820     gen7_mfd_context->intra_row_store_scratch_buffer.valid = 1;
821
822     dri_bo_unreference(gen7_mfd_context->deblocking_filter_row_store_scratch_buffer.bo);
823     bo = dri_bo_alloc(i965->intel.bufmgr,
824                       "deblocking filter row store",
825                       width_in_mbs * 64 * 4,
826                       0x1000);
827     assert(bo);
828     gen7_mfd_context->deblocking_filter_row_store_scratch_buffer.bo = bo;
829     gen7_mfd_context->deblocking_filter_row_store_scratch_buffer.valid = 1;
830
831     dri_bo_unreference(gen7_mfd_context->bsd_mpc_row_store_scratch_buffer.bo);
832     bo = dri_bo_alloc(i965->intel.bufmgr,
833                       "bsd mpc row store",
834                       width_in_mbs * 64 * 2,
835                       0x1000);
836     assert(bo);
837     gen7_mfd_context->bsd_mpc_row_store_scratch_buffer.bo = bo;
838     gen7_mfd_context->bsd_mpc_row_store_scratch_buffer.valid = 1;
839
840     dri_bo_unreference(gen7_mfd_context->mpr_row_store_scratch_buffer.bo);
841     bo = dri_bo_alloc(i965->intel.bufmgr,
842                       "mpr row store",
843                       width_in_mbs * 64 * 2,
844                       0x1000);
845     assert(bo);
846     gen7_mfd_context->mpr_row_store_scratch_buffer.bo = bo;
847     gen7_mfd_context->mpr_row_store_scratch_buffer.valid = 1;
848
849     gen7_mfd_context->bitplane_read_buffer.valid = 0;
850 }
851
852 static void
853 gen7_mfd_avc_decode_picture(VADriverContextP ctx,
854                             struct decode_state *decode_state,
855                             struct gen7_mfd_context *gen7_mfd_context)
856 {
857     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
858     VAPictureParameterBufferH264 *pic_param;
859     VASliceParameterBufferH264 *slice_param, *next_slice_param, *next_slice_group_param;
860     dri_bo *slice_data_bo;
861     int i, j;
862
863     assert(decode_state->pic_param && decode_state->pic_param->buffer);
864     pic_param = (VAPictureParameterBufferH264 *)decode_state->pic_param->buffer;
865     gen7_mfd_avc_decode_init(ctx, decode_state, gen7_mfd_context);
866
867     intel_batchbuffer_start_atomic_bcs(batch, 0x1000);
868     intel_batchbuffer_emit_mi_flush(batch);
869     gen7_mfd_pipe_mode_select(ctx, decode_state, MFX_FORMAT_AVC, gen7_mfd_context);
870     gen7_mfd_surface_state(ctx, decode_state, MFX_FORMAT_AVC, gen7_mfd_context);
871     gen7_mfd_pipe_buf_addr_state(ctx, decode_state, MFX_FORMAT_AVC, gen7_mfd_context);
872     gen7_mfd_bsp_buf_base_addr_state(ctx, decode_state, MFX_FORMAT_AVC, gen7_mfd_context);
873     gen7_mfd_avc_qm_state(ctx, decode_state, gen7_mfd_context);
874     gen7_mfd_avc_img_state(ctx, decode_state, gen7_mfd_context);
875
876     for (j = 0; j < decode_state->num_slice_params; j++) {
877         assert(decode_state->slice_params && decode_state->slice_params[j]->buffer);
878         slice_param = (VASliceParameterBufferH264 *)decode_state->slice_params[j]->buffer;
879         slice_data_bo = decode_state->slice_datas[j]->bo;
880         gen7_mfd_ind_obj_base_addr_state(ctx, slice_data_bo, MFX_FORMAT_AVC, gen7_mfd_context);
881
882         if (j == decode_state->num_slice_params - 1)
883             next_slice_group_param = NULL;
884         else
885             next_slice_group_param = (VASliceParameterBufferH264 *)decode_state->slice_params[j + 1]->buffer;
886
887         for (i = 0; i < decode_state->slice_params[j]->num_elements; i++) {
888             assert(slice_param->slice_data_flag == VA_SLICE_DATA_FLAG_ALL);
889             assert((slice_param->slice_type == SLICE_TYPE_I) ||
890                    (slice_param->slice_type == SLICE_TYPE_SI) ||
891                    (slice_param->slice_type == SLICE_TYPE_P) ||
892                    (slice_param->slice_type == SLICE_TYPE_SP) ||
893                    (slice_param->slice_type == SLICE_TYPE_B));
894
895             if (i < decode_state->slice_params[j]->num_elements - 1)
896                 next_slice_param = slice_param + 1;
897             else
898                 next_slice_param = next_slice_group_param;
899
900             gen7_mfd_avc_directmode_state(ctx, decode_state, pic_param, slice_param, gen7_mfd_context);
901             gen7_mfd_avc_ref_idx_state(ctx, pic_param, slice_param, gen7_mfd_context);
902             gen7_mfd_avc_weightoffset_state(ctx, pic_param, slice_param, gen7_mfd_context);
903             gen7_mfd_avc_slice_state(ctx, pic_param, slice_param, next_slice_param, gen7_mfd_context);
904             gen7_mfd_avc_bsd_object(ctx, pic_param, slice_param, slice_data_bo, next_slice_param, gen7_mfd_context);
905             slice_param++;
906         }
907     }
908
909     intel_batchbuffer_end_atomic(batch);
910     intel_batchbuffer_flush(batch);
911 }
912
913 static void
914 gen7_mfd_mpeg2_decode_init(VADriverContextP ctx,
915                            struct decode_state *decode_state,
916                            struct gen7_mfd_context *gen7_mfd_context)
917 {
918     VAPictureParameterBufferMPEG2 *pic_param;
919     struct i965_driver_data *i965 = i965_driver_data(ctx);
920     struct object_surface *obj_surface;
921     dri_bo *bo;
922     unsigned int width_in_mbs;
923
924     assert(decode_state->pic_param && decode_state->pic_param->buffer);
925     pic_param = (VAPictureParameterBufferMPEG2 *)decode_state->pic_param->buffer;
926     width_in_mbs = ALIGN(pic_param->horizontal_size, 16) / 16;
927
928     mpeg2_set_reference_surfaces(
929         ctx,
930         gen7_mfd_context->reference_surface,
931         decode_state,
932         pic_param
933     );
934
935     /* Current decoded picture */
936     obj_surface = decode_state->render_object;
937     i965_check_alloc_surface_bo(ctx, obj_surface, 1, VA_FOURCC('N','V','1','2'), SUBSAMPLE_YUV420);
938
939     dri_bo_unreference(gen7_mfd_context->pre_deblocking_output.bo);
940     gen7_mfd_context->pre_deblocking_output.bo = obj_surface->bo;
941     dri_bo_reference(gen7_mfd_context->pre_deblocking_output.bo);
942     gen7_mfd_context->pre_deblocking_output.valid = 1;
943
944     dri_bo_unreference(gen7_mfd_context->bsd_mpc_row_store_scratch_buffer.bo);
945     bo = dri_bo_alloc(i965->intel.bufmgr,
946                       "bsd mpc row store",
947                       width_in_mbs * 96,
948                       0x1000);
949     assert(bo);
950     gen7_mfd_context->bsd_mpc_row_store_scratch_buffer.bo = bo;
951     gen7_mfd_context->bsd_mpc_row_store_scratch_buffer.valid = 1;
952
953     gen7_mfd_context->post_deblocking_output.valid = 0;
954     gen7_mfd_context->intra_row_store_scratch_buffer.valid = 0;
955     gen7_mfd_context->deblocking_filter_row_store_scratch_buffer.valid = 0;
956     gen7_mfd_context->mpr_row_store_scratch_buffer.valid = 0;
957     gen7_mfd_context->bitplane_read_buffer.valid = 0;
958 }
959
960 static void
961 gen7_mfd_mpeg2_pic_state(VADriverContextP ctx,
962                          struct decode_state *decode_state,
963                          struct gen7_mfd_context *gen7_mfd_context)
964 {
965     struct i965_driver_data * const i965 = i965_driver_data(ctx);
966     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
967     VAPictureParameterBufferMPEG2 *pic_param;
968     unsigned int slice_concealment_disable_bit = 0;
969
970     assert(decode_state->pic_param && decode_state->pic_param->buffer);
971     pic_param = (VAPictureParameterBufferMPEG2 *)decode_state->pic_param->buffer;
972
973     if (IS_HASWELL(i965->intel.device_id)) {
974         /* XXX: disable concealment for now */
975         slice_concealment_disable_bit = 1;
976     }
977
978     BEGIN_BCS_BATCH(batch, 13);
979     OUT_BCS_BATCH(batch, MFX_MPEG2_PIC_STATE | (13 - 2));
980     OUT_BCS_BATCH(batch,
981                   (pic_param->f_code & 0xf) << 28 | /* f_code[1][1] */
982                   ((pic_param->f_code >> 4) & 0xf) << 24 | /* f_code[1][0] */
983                   ((pic_param->f_code >> 8) & 0xf) << 20 | /* f_code[0][1] */
984                   ((pic_param->f_code >> 12) & 0xf) << 16 | /* f_code[0][0] */
985                   pic_param->picture_coding_extension.bits.intra_dc_precision << 14 |
986                   pic_param->picture_coding_extension.bits.picture_structure << 12 |
987                   pic_param->picture_coding_extension.bits.top_field_first << 11 |
988                   pic_param->picture_coding_extension.bits.frame_pred_frame_dct << 10 |
989                   pic_param->picture_coding_extension.bits.concealment_motion_vectors << 9 |
990                   pic_param->picture_coding_extension.bits.q_scale_type << 8 |
991                   pic_param->picture_coding_extension.bits.intra_vlc_format << 7 | 
992                   pic_param->picture_coding_extension.bits.alternate_scan << 6);
993     OUT_BCS_BATCH(batch,
994                   pic_param->picture_coding_type << 9);
995     OUT_BCS_BATCH(batch,
996                   (slice_concealment_disable_bit << 31) |
997                   ((ALIGN(pic_param->vertical_size, 16) / 16) - 1) << 16 |
998                   ((ALIGN(pic_param->horizontal_size, 16) / 16) - 1));
999     OUT_BCS_BATCH(batch, 0);
1000     OUT_BCS_BATCH(batch, 0);
1001     OUT_BCS_BATCH(batch, 0);
1002     OUT_BCS_BATCH(batch, 0);
1003     OUT_BCS_BATCH(batch, 0);
1004     OUT_BCS_BATCH(batch, 0);
1005     OUT_BCS_BATCH(batch, 0);
1006     OUT_BCS_BATCH(batch, 0);
1007     OUT_BCS_BATCH(batch, 0);
1008     ADVANCE_BCS_BATCH(batch);
1009 }
1010
1011 static void
1012 gen7_mfd_mpeg2_qm_state(VADriverContextP ctx,
1013                         struct decode_state *decode_state,
1014                         struct gen7_mfd_context *gen7_mfd_context)
1015 {
1016     VAIQMatrixBufferMPEG2 * const gen_iq_matrix = &gen7_mfd_context->iq_matrix.mpeg2;
1017     int i, j;
1018
1019     /* Update internal QM state */
1020     if (decode_state->iq_matrix && decode_state->iq_matrix->buffer) {
1021         VAIQMatrixBufferMPEG2 * const iq_matrix =
1022             (VAIQMatrixBufferMPEG2 *)decode_state->iq_matrix->buffer;
1023
1024         if (gen_iq_matrix->load_intra_quantiser_matrix == -1 ||
1025             iq_matrix->load_intra_quantiser_matrix) {
1026             gen_iq_matrix->load_intra_quantiser_matrix =
1027                 iq_matrix->load_intra_quantiser_matrix;
1028             if (iq_matrix->load_intra_quantiser_matrix) {
1029                 for (j = 0; j < 64; j++)
1030                     gen_iq_matrix->intra_quantiser_matrix[zigzag_direct[j]] =
1031                         iq_matrix->intra_quantiser_matrix[j];
1032             }
1033         }
1034
1035         if (gen_iq_matrix->load_non_intra_quantiser_matrix == -1 ||
1036             iq_matrix->load_non_intra_quantiser_matrix) {
1037             gen_iq_matrix->load_non_intra_quantiser_matrix =
1038                 iq_matrix->load_non_intra_quantiser_matrix;
1039             if (iq_matrix->load_non_intra_quantiser_matrix) {
1040                 for (j = 0; j < 64; j++)
1041                     gen_iq_matrix->non_intra_quantiser_matrix[zigzag_direct[j]] =
1042                         iq_matrix->non_intra_quantiser_matrix[j];
1043             }
1044         }
1045     }
1046
1047     /* Commit QM state to HW */
1048     for (i = 0; i < 2; i++) {
1049         unsigned char *qm = NULL;
1050         int qm_type;
1051
1052         if (i == 0) {
1053             if (gen_iq_matrix->load_intra_quantiser_matrix) {
1054                 qm = gen_iq_matrix->intra_quantiser_matrix;
1055                 qm_type = MFX_QM_MPEG_INTRA_QUANTIZER_MATRIX;
1056             }
1057         } else {
1058             if (gen_iq_matrix->load_non_intra_quantiser_matrix) {
1059                 qm = gen_iq_matrix->non_intra_quantiser_matrix;
1060                 qm_type = MFX_QM_MPEG_NON_INTRA_QUANTIZER_MATRIX;
1061             }
1062         }
1063
1064         if (!qm)
1065             continue;
1066
1067         gen7_mfd_qm_state(ctx, qm_type, qm, 64, gen7_mfd_context);
1068     }
1069 }
1070
1071 static void
1072 gen7_mfd_mpeg2_bsd_object(VADriverContextP ctx,
1073                           VAPictureParameterBufferMPEG2 *pic_param,
1074                           VASliceParameterBufferMPEG2 *slice_param,
1075                           VASliceParameterBufferMPEG2 *next_slice_param,
1076                           struct gen7_mfd_context *gen7_mfd_context)
1077 {
1078     struct i965_driver_data * const i965 = i965_driver_data(ctx);
1079     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
1080     unsigned int width_in_mbs = ALIGN(pic_param->horizontal_size, 16) / 16;
1081     int mb_count, vpos0, hpos0, vpos1, hpos1, is_field_pic_wa, is_field_pic = 0;
1082
1083     if (pic_param->picture_coding_extension.bits.picture_structure == MPEG_TOP_FIELD ||
1084         pic_param->picture_coding_extension.bits.picture_structure == MPEG_BOTTOM_FIELD)
1085         is_field_pic = 1;
1086     is_field_pic_wa = is_field_pic &&
1087         gen7_mfd_context->wa_mpeg2_slice_vertical_position > 0;
1088
1089     vpos0 = slice_param->slice_vertical_position / (1 + is_field_pic_wa);
1090     hpos0 = slice_param->slice_horizontal_position;
1091
1092     if (next_slice_param == NULL) {
1093         vpos1 = ALIGN(pic_param->vertical_size, 16) / 16 / (1 + is_field_pic);
1094         hpos1 = 0;
1095     } else {
1096         vpos1 = next_slice_param->slice_vertical_position / (1 + is_field_pic_wa);
1097         hpos1 = next_slice_param->slice_horizontal_position;
1098     }
1099
1100     mb_count = (vpos1 * width_in_mbs + hpos1) - (vpos0 * width_in_mbs + hpos0);
1101
1102     BEGIN_BCS_BATCH(batch, 5);
1103     OUT_BCS_BATCH(batch, MFD_MPEG2_BSD_OBJECT | (5 - 2));
1104     OUT_BCS_BATCH(batch, 
1105                   slice_param->slice_data_size - (slice_param->macroblock_offset >> 3));
1106     OUT_BCS_BATCH(batch, 
1107                   slice_param->slice_data_offset + (slice_param->macroblock_offset >> 3));
1108     OUT_BCS_BATCH(batch,
1109                   hpos0 << 24 |
1110                   vpos0 << 16 |
1111                   mb_count << 8 |
1112                   (next_slice_param == NULL) << 5 |
1113                   (next_slice_param == NULL) << 3 |
1114                   (slice_param->macroblock_offset & 0x7));
1115     OUT_BCS_BATCH(batch,
1116                   (slice_param->quantiser_scale_code << 24) |
1117                   (IS_HASWELL(i965->intel.device_id) ? (vpos1 << 8 | hpos1) : 0));
1118     ADVANCE_BCS_BATCH(batch);
1119 }
1120
1121 static void
1122 gen7_mfd_mpeg2_decode_picture(VADriverContextP ctx,
1123                               struct decode_state *decode_state,
1124                               struct gen7_mfd_context *gen7_mfd_context)
1125 {
1126     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
1127     VAPictureParameterBufferMPEG2 *pic_param;
1128     VASliceParameterBufferMPEG2 *slice_param, *next_slice_param, *next_slice_group_param;
1129     dri_bo *slice_data_bo;
1130     int i, j;
1131
1132     assert(decode_state->pic_param && decode_state->pic_param->buffer);
1133     pic_param = (VAPictureParameterBufferMPEG2 *)decode_state->pic_param->buffer;
1134
1135     gen7_mfd_mpeg2_decode_init(ctx, decode_state, gen7_mfd_context);
1136     intel_batchbuffer_start_atomic_bcs(batch, 0x1000);
1137     intel_batchbuffer_emit_mi_flush(batch);
1138     gen7_mfd_pipe_mode_select(ctx, decode_state, MFX_FORMAT_MPEG2, gen7_mfd_context);
1139     gen7_mfd_surface_state(ctx, decode_state, MFX_FORMAT_MPEG2, gen7_mfd_context);
1140     gen7_mfd_pipe_buf_addr_state(ctx, decode_state, MFX_FORMAT_MPEG2, gen7_mfd_context);
1141     gen7_mfd_bsp_buf_base_addr_state(ctx, decode_state, MFX_FORMAT_MPEG2, gen7_mfd_context);
1142     gen7_mfd_mpeg2_pic_state(ctx, decode_state, gen7_mfd_context);
1143     gen7_mfd_mpeg2_qm_state(ctx, decode_state, gen7_mfd_context);
1144
1145     if (gen7_mfd_context->wa_mpeg2_slice_vertical_position < 0)
1146         gen7_mfd_context->wa_mpeg2_slice_vertical_position =
1147             mpeg2_wa_slice_vertical_position(decode_state, pic_param);
1148
1149     for (j = 0; j < decode_state->num_slice_params; j++) {
1150         assert(decode_state->slice_params && decode_state->slice_params[j]->buffer);
1151         slice_param = (VASliceParameterBufferMPEG2 *)decode_state->slice_params[j]->buffer;
1152         slice_data_bo = decode_state->slice_datas[j]->bo;
1153         gen7_mfd_ind_obj_base_addr_state(ctx, slice_data_bo, MFX_FORMAT_MPEG2, gen7_mfd_context);
1154
1155         if (j == decode_state->num_slice_params - 1)
1156             next_slice_group_param = NULL;
1157         else
1158             next_slice_group_param = (VASliceParameterBufferMPEG2 *)decode_state->slice_params[j + 1]->buffer;
1159
1160         for (i = 0; i < decode_state->slice_params[j]->num_elements; i++) {
1161             assert(slice_param->slice_data_flag == VA_SLICE_DATA_FLAG_ALL);
1162
1163             if (i < decode_state->slice_params[j]->num_elements - 1)
1164                 next_slice_param = slice_param + 1;
1165             else
1166                 next_slice_param = next_slice_group_param;
1167
1168             gen7_mfd_mpeg2_bsd_object(ctx, pic_param, slice_param, next_slice_param, gen7_mfd_context);
1169             slice_param++;
1170         }
1171     }
1172
1173     intel_batchbuffer_end_atomic(batch);
1174     intel_batchbuffer_flush(batch);
1175 }
1176
1177 static const int va_to_gen7_vc1_pic_type[5] = {
1178     GEN7_VC1_I_PICTURE,
1179     GEN7_VC1_P_PICTURE,
1180     GEN7_VC1_B_PICTURE,
1181     GEN7_VC1_BI_PICTURE,
1182     GEN7_VC1_P_PICTURE,
1183 };
1184
1185 static const int va_to_gen7_vc1_mv[4] = {
1186     1, /* 1-MV */
1187     2, /* 1-MV half-pel */
1188     3, /* 1-MV half-pef bilinear */
1189     0, /* Mixed MV */
1190 };
1191
1192 static const int b_picture_scale_factor[21] = {
1193     128, 85,  170, 64,  192,
1194     51,  102, 153, 204, 43,
1195     215, 37,  74,  111, 148,
1196     185, 222, 32,  96,  160, 
1197     224,
1198 };
1199
1200 static const int va_to_gen7_vc1_condover[3] = {
1201     0,
1202     2,
1203     3
1204 };
1205
1206 static const int va_to_gen7_vc1_profile[4] = {
1207     GEN7_VC1_SIMPLE_PROFILE,
1208     GEN7_VC1_MAIN_PROFILE,
1209     GEN7_VC1_RESERVED_PROFILE,
1210     GEN7_VC1_ADVANCED_PROFILE
1211 };
1212
1213 static void 
1214 gen7_mfd_free_vc1_surface(void **data)
1215 {
1216     struct gen7_vc1_surface *gen7_vc1_surface = *data;
1217
1218     if (!gen7_vc1_surface)
1219         return;
1220
1221     dri_bo_unreference(gen7_vc1_surface->dmv);
1222     free(gen7_vc1_surface);
1223     *data = NULL;
1224 }
1225
1226 static void
1227 gen7_mfd_init_vc1_surface(VADriverContextP ctx, 
1228                           VAPictureParameterBufferVC1 *pic_param,
1229                           struct object_surface *obj_surface)
1230 {
1231     struct i965_driver_data *i965 = i965_driver_data(ctx);
1232     struct gen7_vc1_surface *gen7_vc1_surface = obj_surface->private_data;
1233     int width_in_mbs = ALIGN(pic_param->coded_width, 16) / 16;
1234     int height_in_mbs = ALIGN(pic_param->coded_height, 16) / 16;
1235
1236     obj_surface->free_private_data = gen7_mfd_free_vc1_surface;
1237
1238     if (!gen7_vc1_surface) {
1239         gen7_vc1_surface = calloc(sizeof(struct gen7_vc1_surface), 1);
1240         assert((obj_surface->size & 0x3f) == 0);
1241         obj_surface->private_data = gen7_vc1_surface;
1242     }
1243
1244     gen7_vc1_surface->picture_type = pic_param->picture_fields.bits.picture_type;
1245
1246     if (gen7_vc1_surface->dmv == NULL) {
1247         gen7_vc1_surface->dmv = dri_bo_alloc(i965->intel.bufmgr,
1248                                              "direct mv w/r buffer",
1249                                              width_in_mbs * height_in_mbs * 64,
1250                                              0x1000);
1251     }
1252 }
1253
1254 static void
1255 gen7_mfd_vc1_decode_init(VADriverContextP ctx,
1256                          struct decode_state *decode_state,
1257                          struct gen7_mfd_context *gen7_mfd_context)
1258 {
1259     VAPictureParameterBufferVC1 *pic_param;
1260     struct i965_driver_data *i965 = i965_driver_data(ctx);
1261     struct object_surface *obj_surface;
1262     dri_bo *bo;
1263     int width_in_mbs;
1264     int picture_type;
1265  
1266     assert(decode_state->pic_param && decode_state->pic_param->buffer);
1267     pic_param = (VAPictureParameterBufferVC1 *)decode_state->pic_param->buffer;
1268     width_in_mbs = ALIGN(pic_param->coded_width, 16) / 16;
1269     picture_type = pic_param->picture_fields.bits.picture_type;
1270  
1271     intel_update_vc1_frame_store_index(ctx,
1272                                        decode_state,
1273                                        pic_param,
1274                                        gen7_mfd_context->reference_surface);
1275
1276     /* Current decoded picture */
1277     obj_surface = decode_state->render_object;
1278     i965_check_alloc_surface_bo(ctx, obj_surface, 1, VA_FOURCC('N','V','1','2'), SUBSAMPLE_YUV420);
1279     gen7_mfd_init_vc1_surface(ctx, pic_param, obj_surface);
1280
1281     dri_bo_unreference(gen7_mfd_context->post_deblocking_output.bo);
1282     gen7_mfd_context->post_deblocking_output.bo = obj_surface->bo;
1283     dri_bo_reference(gen7_mfd_context->post_deblocking_output.bo);
1284     gen7_mfd_context->post_deblocking_output.valid = pic_param->entrypoint_fields.bits.loopfilter;
1285
1286     dri_bo_unreference(gen7_mfd_context->pre_deblocking_output.bo);
1287     gen7_mfd_context->pre_deblocking_output.bo = obj_surface->bo;
1288     dri_bo_reference(gen7_mfd_context->pre_deblocking_output.bo);
1289     gen7_mfd_context->pre_deblocking_output.valid = !pic_param->entrypoint_fields.bits.loopfilter;
1290
1291     dri_bo_unreference(gen7_mfd_context->intra_row_store_scratch_buffer.bo);
1292     bo = dri_bo_alloc(i965->intel.bufmgr,
1293                       "intra row store",
1294                       width_in_mbs * 64,
1295                       0x1000);
1296     assert(bo);
1297     gen7_mfd_context->intra_row_store_scratch_buffer.bo = bo;
1298     gen7_mfd_context->intra_row_store_scratch_buffer.valid = 1;
1299
1300     dri_bo_unreference(gen7_mfd_context->deblocking_filter_row_store_scratch_buffer.bo);
1301     bo = dri_bo_alloc(i965->intel.bufmgr,
1302                       "deblocking filter row store",
1303                       width_in_mbs * 7 * 64,
1304                       0x1000);
1305     assert(bo);
1306     gen7_mfd_context->deblocking_filter_row_store_scratch_buffer.bo = bo;
1307     gen7_mfd_context->deblocking_filter_row_store_scratch_buffer.valid = 1;
1308
1309     dri_bo_unreference(gen7_mfd_context->bsd_mpc_row_store_scratch_buffer.bo);
1310     bo = dri_bo_alloc(i965->intel.bufmgr,
1311                       "bsd mpc row store",
1312                       width_in_mbs * 96,
1313                       0x1000);
1314     assert(bo);
1315     gen7_mfd_context->bsd_mpc_row_store_scratch_buffer.bo = bo;
1316     gen7_mfd_context->bsd_mpc_row_store_scratch_buffer.valid = 1;
1317
1318     gen7_mfd_context->mpr_row_store_scratch_buffer.valid = 0;
1319
1320     gen7_mfd_context->bitplane_read_buffer.valid = !!pic_param->bitplane_present.value;
1321     dri_bo_unreference(gen7_mfd_context->bitplane_read_buffer.bo);
1322     
1323     if (gen7_mfd_context->bitplane_read_buffer.valid) {
1324         int width_in_mbs = ALIGN(pic_param->coded_width, 16) / 16;
1325         int height_in_mbs = ALIGN(pic_param->coded_height, 16) / 16;
1326         int bitplane_width = ALIGN(width_in_mbs, 2) / 2;
1327         int src_w, src_h;
1328         uint8_t *src = NULL, *dst = NULL;
1329
1330         assert(decode_state->bit_plane->buffer);
1331         src = decode_state->bit_plane->buffer;
1332
1333         bo = dri_bo_alloc(i965->intel.bufmgr,
1334                           "VC-1 Bitplane",
1335                           bitplane_width * height_in_mbs,
1336                           0x1000);
1337         assert(bo);
1338         gen7_mfd_context->bitplane_read_buffer.bo = bo;
1339
1340         dri_bo_map(bo, True);
1341         assert(bo->virtual);
1342         dst = bo->virtual;
1343
1344         for (src_h = 0; src_h < height_in_mbs; src_h++) {
1345             for(src_w = 0; src_w < width_in_mbs; src_w++) {
1346                 int src_index, dst_index;
1347                 int src_shift;
1348                 uint8_t src_value;
1349
1350                 src_index = (src_h * width_in_mbs + src_w) / 2;
1351                 src_shift = !((src_h * width_in_mbs + src_w) & 1) * 4;
1352                 src_value = ((src[src_index] >> src_shift) & 0xf);
1353
1354                 if (picture_type == GEN7_VC1_SKIPPED_PICTURE){
1355                     src_value |= 0x2;
1356                 }
1357
1358                 dst_index = src_w / 2;
1359                 dst[dst_index] = ((dst[dst_index] >> 4) | (src_value << 4));
1360             }
1361
1362             if (src_w & 1)
1363                 dst[src_w / 2] >>= 4;
1364
1365             dst += bitplane_width;
1366         }
1367
1368         dri_bo_unmap(bo);
1369     } else
1370         gen7_mfd_context->bitplane_read_buffer.bo = NULL;
1371 }
1372
1373 static void
1374 gen7_mfd_vc1_pic_state(VADriverContextP ctx,
1375                        struct decode_state *decode_state,
1376                        struct gen7_mfd_context *gen7_mfd_context)
1377 {
1378     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
1379     VAPictureParameterBufferVC1 *pic_param;
1380     struct object_surface *obj_surface;
1381     int alt_pquant_config = 0, alt_pquant_edge_mask = 0, alt_pq;
1382     int dquant, dquantfrm, dqprofile, dqdbedge, dqsbedge, dqbilevel;
1383     int unified_mv_mode;
1384     int ref_field_pic_polarity = 0;
1385     int scale_factor = 0;
1386     int trans_ac_y = 0;
1387     int dmv_surface_valid = 0;
1388     int brfd = 0;
1389     int fcm = 0;
1390     int picture_type;
1391     int profile;
1392     int overlap;
1393     int interpolation_mode = 0;
1394
1395     assert(decode_state->pic_param && decode_state->pic_param->buffer);
1396     pic_param = (VAPictureParameterBufferVC1 *)decode_state->pic_param->buffer;
1397
1398     profile = va_to_gen7_vc1_profile[pic_param->sequence_fields.bits.profile];
1399     dquant = pic_param->pic_quantizer_fields.bits.dquant;
1400     dquantfrm = pic_param->pic_quantizer_fields.bits.dq_frame;
1401     dqprofile = pic_param->pic_quantizer_fields.bits.dq_profile;
1402     dqdbedge = pic_param->pic_quantizer_fields.bits.dq_db_edge;
1403     dqsbedge = pic_param->pic_quantizer_fields.bits.dq_sb_edge;
1404     dqbilevel = pic_param->pic_quantizer_fields.bits.dq_binary_level;
1405     alt_pq = pic_param->pic_quantizer_fields.bits.alt_pic_quantizer;
1406
1407     if (dquant == 0) {
1408         alt_pquant_config = 0;
1409         alt_pquant_edge_mask = 0;
1410     } else if (dquant == 2) {
1411         alt_pquant_config = 1;
1412         alt_pquant_edge_mask = 0xf;
1413     } else {
1414         assert(dquant == 1);
1415         if (dquantfrm == 0) {
1416             alt_pquant_config = 0;
1417             alt_pquant_edge_mask = 0;
1418             alt_pq = 0;
1419         } else {
1420             assert(dquantfrm == 1);
1421             alt_pquant_config = 1;
1422
1423             switch (dqprofile) {
1424             case 3:
1425                 if (dqbilevel == 0) {
1426                     alt_pquant_config = 2;
1427                     alt_pquant_edge_mask = 0;
1428                 } else {
1429                     assert(dqbilevel == 1);
1430                     alt_pquant_config = 3;
1431                     alt_pquant_edge_mask = 0;
1432                 }
1433                 break;
1434                 
1435             case 0:
1436                 alt_pquant_edge_mask = 0xf;
1437                 break;
1438
1439             case 1:
1440                 if (dqdbedge == 3)
1441                     alt_pquant_edge_mask = 0x9;
1442                 else
1443                     alt_pquant_edge_mask = (0x3 << dqdbedge);
1444
1445                 break;
1446
1447             case 2:
1448                 alt_pquant_edge_mask = (0x1 << dqsbedge);
1449                 break;
1450
1451             default:
1452                 assert(0);
1453             }
1454         }
1455     }
1456
1457     if (pic_param->mv_fields.bits.mv_mode == VAMvModeIntensityCompensation) {
1458         assert(pic_param->mv_fields.bits.mv_mode2 < 4);
1459         unified_mv_mode = va_to_gen7_vc1_mv[pic_param->mv_fields.bits.mv_mode2];
1460     } else {
1461         assert(pic_param->mv_fields.bits.mv_mode < 4);
1462         unified_mv_mode = va_to_gen7_vc1_mv[pic_param->mv_fields.bits.mv_mode];
1463     }
1464
1465     if (pic_param->sequence_fields.bits.interlace == 1 &&
1466         pic_param->picture_fields.bits.frame_coding_mode != 0) { /* frame-interlace or field-interlace */
1467         /* FIXME: calculate reference field picture polarity */
1468         assert(0);
1469         ref_field_pic_polarity = 0;
1470     }
1471
1472     if (pic_param->b_picture_fraction < 21)
1473         scale_factor = b_picture_scale_factor[pic_param->b_picture_fraction];
1474
1475     picture_type = va_to_gen7_vc1_pic_type[pic_param->picture_fields.bits.picture_type];
1476     
1477     if (profile == GEN7_VC1_ADVANCED_PROFILE && 
1478         picture_type == GEN7_VC1_I_PICTURE)
1479         picture_type = GEN7_VC1_BI_PICTURE;
1480
1481     if (picture_type == GEN7_VC1_I_PICTURE || picture_type == GEN7_VC1_BI_PICTURE) /* I picture */
1482         trans_ac_y = pic_param->transform_fields.bits.transform_ac_codingset_idx2;
1483     else {
1484         trans_ac_y = pic_param->transform_fields.bits.transform_ac_codingset_idx1;
1485         /*
1486          * 8.3.6.2.1 Transform Type Selection
1487          * If variable-sized transform coding is not enabled,
1488          * then the 8x8 transform shall be used for all blocks.
1489          * it is also MFX_VC1_PIC_STATE requirement.
1490          */
1491         if (pic_param->transform_fields.bits.variable_sized_transform_flag == 0) {
1492             pic_param->transform_fields.bits.mb_level_transform_type_flag   = 1;
1493             pic_param->transform_fields.bits.frame_level_transform_type     = 0;
1494         }
1495     }
1496
1497
1498     if (picture_type == GEN7_VC1_B_PICTURE) {
1499         struct gen7_vc1_surface *gen7_vc1_surface = NULL;
1500
1501         obj_surface = decode_state->reference_objects[1];
1502
1503         if (obj_surface)
1504             gen7_vc1_surface = obj_surface->private_data;
1505
1506         if (!gen7_vc1_surface || 
1507             (va_to_gen7_vc1_pic_type[gen7_vc1_surface->picture_type] == GEN7_VC1_I_PICTURE ||
1508              va_to_gen7_vc1_pic_type[gen7_vc1_surface->picture_type] == GEN7_VC1_BI_PICTURE))
1509             dmv_surface_valid = 0;
1510         else
1511             dmv_surface_valid = 1;
1512     }
1513
1514     assert(pic_param->picture_fields.bits.frame_coding_mode < 3);
1515
1516     if (pic_param->picture_fields.bits.frame_coding_mode < 2)
1517         fcm = pic_param->picture_fields.bits.frame_coding_mode;
1518     else {
1519         if (pic_param->picture_fields.bits.top_field_first)
1520             fcm = 2;
1521         else
1522             fcm = 3;
1523     }
1524
1525     if (pic_param->picture_fields.bits.picture_type == GEN7_VC1_B_PICTURE) { /* B picture */
1526         brfd = pic_param->reference_fields.bits.reference_distance;
1527         brfd = (scale_factor * brfd) >> 8;
1528         brfd = pic_param->reference_fields.bits.reference_distance - brfd - 1;
1529
1530         if (brfd < 0)
1531             brfd = 0;
1532     }
1533
1534     overlap = 0;
1535     if (profile != GEN7_VC1_ADVANCED_PROFILE){
1536         if (pic_param->pic_quantizer_fields.bits.pic_quantizer_scale >= 9 &&
1537             pic_param->picture_fields.bits.picture_type != GEN7_VC1_B_PICTURE) {
1538             overlap = 1; 
1539         }
1540     }else {
1541         if (pic_param->picture_fields.bits.picture_type == GEN7_VC1_P_PICTURE &&
1542              pic_param->pic_quantizer_fields.bits.pic_quantizer_scale >= 9){
1543               overlap = 1; 
1544         }
1545         if (pic_param->picture_fields.bits.picture_type == GEN7_VC1_I_PICTURE ||
1546             pic_param->picture_fields.bits.picture_type == GEN7_VC1_BI_PICTURE){
1547              if (pic_param->pic_quantizer_fields.bits.pic_quantizer_scale >= 9){
1548                 overlap = 1; 
1549              } else if (va_to_gen7_vc1_condover[pic_param->conditional_overlap_flag] == 2 ||
1550                         va_to_gen7_vc1_condover[pic_param->conditional_overlap_flag] == 3) {
1551                  overlap = 1;
1552              }
1553         }
1554     } 
1555
1556     assert(pic_param->conditional_overlap_flag < 3);
1557     assert(pic_param->mv_fields.bits.mv_table < 4); /* FIXME: interlace mode */
1558
1559     if (pic_param->mv_fields.bits.mv_mode == VAMvMode1MvHalfPelBilinear ||
1560         (pic_param->mv_fields.bits.mv_mode == VAMvModeIntensityCompensation &&
1561          pic_param->mv_fields.bits.mv_mode2 == VAMvMode1MvHalfPelBilinear))
1562         interpolation_mode = 9; /* Half-pel bilinear */
1563     else if (pic_param->mv_fields.bits.mv_mode == VAMvMode1MvHalfPel ||
1564              (pic_param->mv_fields.bits.mv_mode == VAMvModeIntensityCompensation &&
1565               pic_param->mv_fields.bits.mv_mode2 == VAMvMode1MvHalfPel))
1566         interpolation_mode = 1; /* Half-pel bicubic */
1567     else
1568         interpolation_mode = 0; /* Quarter-pel bicubic */
1569
1570     BEGIN_BCS_BATCH(batch, 6);
1571     OUT_BCS_BATCH(batch, MFD_VC1_LONG_PIC_STATE | (6 - 2));
1572     OUT_BCS_BATCH(batch,
1573                   (((ALIGN(pic_param->coded_height, 16) / 16) - 1) << 16) |
1574                   ((ALIGN(pic_param->coded_width, 16) / 16) - 1));
1575     OUT_BCS_BATCH(batch,
1576                   ((ALIGN(pic_param->coded_width, 16) / 16 + 1) / 2 - 1) << 24 |
1577                   dmv_surface_valid << 15 |
1578                   (pic_param->pic_quantizer_fields.bits.quantizer == 0) << 14 | /* implicit quantizer */
1579                   pic_param->rounding_control << 13 |
1580                   pic_param->sequence_fields.bits.syncmarker << 12 |
1581                   interpolation_mode << 8 |
1582                   0 << 7 | /* FIXME: scale up or down ??? */
1583                   pic_param->range_reduction_frame << 6 |
1584                   pic_param->entrypoint_fields.bits.loopfilter << 5 |
1585                   overlap << 4 |
1586                   !pic_param->picture_fields.bits.is_first_field << 3 |
1587                   (pic_param->sequence_fields.bits.profile == 3) << 0);
1588     OUT_BCS_BATCH(batch,
1589                   va_to_gen7_vc1_condover[pic_param->conditional_overlap_flag] << 29 |
1590                   picture_type << 26 |
1591                   fcm << 24 |
1592                   alt_pq << 16 |
1593                   pic_param->pic_quantizer_fields.bits.pic_quantizer_scale << 8 |
1594                   scale_factor << 0);
1595     OUT_BCS_BATCH(batch,
1596                   unified_mv_mode << 28 |
1597                   pic_param->mv_fields.bits.four_mv_switch << 27 |
1598                   pic_param->fast_uvmc_flag << 26 |
1599                   ref_field_pic_polarity << 25 |
1600                   pic_param->reference_fields.bits.num_reference_pictures << 24 |
1601                   pic_param->reference_fields.bits.reference_distance << 20 |
1602                   pic_param->reference_fields.bits.reference_distance << 16 | /* FIXME: ??? */
1603                   pic_param->mv_fields.bits.extended_dmv_range << 10 |
1604                   pic_param->mv_fields.bits.extended_mv_range << 8 |
1605                   alt_pquant_edge_mask << 4 |
1606                   alt_pquant_config << 2 |
1607                   pic_param->pic_quantizer_fields.bits.half_qp << 1 |                  
1608                   pic_param->pic_quantizer_fields.bits.pic_quantizer_type << 0);
1609     OUT_BCS_BATCH(batch,
1610                   !!pic_param->bitplane_present.value << 31 |
1611                   !pic_param->bitplane_present.flags.bp_forward_mb << 30 |
1612                   !pic_param->bitplane_present.flags.bp_mv_type_mb << 29 |
1613                   !pic_param->bitplane_present.flags.bp_skip_mb << 28 |
1614                   !pic_param->bitplane_present.flags.bp_direct_mb << 27 |
1615                   !pic_param->bitplane_present.flags.bp_overflags << 26 |
1616                   !pic_param->bitplane_present.flags.bp_ac_pred << 25 |
1617                   !pic_param->bitplane_present.flags.bp_field_tx << 24 |
1618                   pic_param->mv_fields.bits.mv_table << 20 |
1619                   pic_param->mv_fields.bits.four_mv_block_pattern_table << 18 |
1620                   pic_param->mv_fields.bits.two_mv_block_pattern_table << 16 |
1621                   pic_param->transform_fields.bits.frame_level_transform_type << 12 |                  
1622                   pic_param->transform_fields.bits.mb_level_transform_type_flag << 11 |
1623                   pic_param->mb_mode_table << 8 |
1624                   trans_ac_y << 6 |
1625                   pic_param->transform_fields.bits.transform_ac_codingset_idx1 << 4 |
1626                   pic_param->transform_fields.bits.intra_transform_dc_table << 3 |
1627                   pic_param->cbp_table << 0);
1628     ADVANCE_BCS_BATCH(batch);
1629 }
1630
1631 static void
1632 gen7_mfd_vc1_pred_pipe_state(VADriverContextP ctx,
1633                              struct decode_state *decode_state,
1634                              struct gen7_mfd_context *gen7_mfd_context)
1635 {
1636     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
1637     VAPictureParameterBufferVC1 *pic_param;
1638     int intensitycomp_single;
1639
1640     assert(decode_state->pic_param && decode_state->pic_param->buffer);
1641     pic_param = (VAPictureParameterBufferVC1 *)decode_state->pic_param->buffer;
1642
1643     assert(decode_state->pic_param && decode_state->pic_param->buffer);
1644     pic_param = (VAPictureParameterBufferVC1 *)decode_state->pic_param->buffer;
1645     intensitycomp_single = (pic_param->mv_fields.bits.mv_mode == VAMvModeIntensityCompensation);
1646
1647     BEGIN_BCS_BATCH(batch, 6);
1648     OUT_BCS_BATCH(batch, MFX_VC1_PRED_PIPE_STATE | (6 - 2));
1649     OUT_BCS_BATCH(batch,
1650                   0 << 14 | /* FIXME: double ??? */
1651                   0 << 12 |
1652                   intensitycomp_single << 10 |
1653                   intensitycomp_single << 8 |
1654                   0 << 4 | /* FIXME: interlace mode */
1655                   0);
1656     OUT_BCS_BATCH(batch,
1657                   pic_param->luma_shift << 16 |
1658                   pic_param->luma_scale << 0); /* FIXME: Luma Scaling */
1659     OUT_BCS_BATCH(batch, 0);
1660     OUT_BCS_BATCH(batch, 0);
1661     OUT_BCS_BATCH(batch, 0);
1662     ADVANCE_BCS_BATCH(batch);
1663 }
1664
1665
1666 static void
1667 gen7_mfd_vc1_directmode_state(VADriverContextP ctx,
1668                               struct decode_state *decode_state,
1669                               struct gen7_mfd_context *gen7_mfd_context)
1670 {
1671     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
1672     struct object_surface *obj_surface;
1673     dri_bo *dmv_read_buffer = NULL, *dmv_write_buffer = NULL;
1674
1675     obj_surface = decode_state->render_object;
1676
1677     if (obj_surface && obj_surface->private_data) {
1678         dmv_write_buffer = ((struct gen7_vc1_surface *)(obj_surface->private_data))->dmv;
1679     }
1680
1681     obj_surface = decode_state->reference_objects[1];
1682
1683     if (obj_surface && obj_surface->private_data) {
1684         dmv_read_buffer = ((struct gen7_vc1_surface *)(obj_surface->private_data))->dmv;
1685     }
1686
1687     BEGIN_BCS_BATCH(batch, 3);
1688     OUT_BCS_BATCH(batch, MFX_VC1_DIRECTMODE_STATE | (3 - 2));
1689
1690     if (dmv_write_buffer)
1691         OUT_BCS_RELOC(batch, dmv_write_buffer,
1692                       I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
1693                       0);
1694     else
1695         OUT_BCS_BATCH(batch, 0);
1696
1697     if (dmv_read_buffer)
1698         OUT_BCS_RELOC(batch, dmv_read_buffer,
1699                       I915_GEM_DOMAIN_INSTRUCTION, 0,
1700                       0);
1701     else
1702         OUT_BCS_BATCH(batch, 0);
1703                   
1704     ADVANCE_BCS_BATCH(batch);
1705 }
1706
1707 static int
1708 gen7_mfd_vc1_get_macroblock_bit_offset(uint8_t *buf, int in_slice_data_bit_offset, int profile)
1709 {
1710     int out_slice_data_bit_offset;
1711     int slice_header_size = in_slice_data_bit_offset / 8;
1712     int i, j;
1713
1714     if (profile != 3)
1715         out_slice_data_bit_offset = in_slice_data_bit_offset;
1716     else {
1717         for (i = 0, j = 0; i < slice_header_size; i++, j++) {
1718             if (!buf[j] && !buf[j + 1] && buf[j + 2] == 3 && buf[j + 3] < 4) {
1719                 i++, j += 2;
1720             }
1721         }
1722
1723         out_slice_data_bit_offset = 8 * j + in_slice_data_bit_offset % 8;
1724     }
1725
1726     return out_slice_data_bit_offset;
1727 }
1728
1729 static void
1730 gen7_mfd_vc1_bsd_object(VADriverContextP ctx,
1731                         VAPictureParameterBufferVC1 *pic_param,
1732                         VASliceParameterBufferVC1 *slice_param,
1733                         VASliceParameterBufferVC1 *next_slice_param,
1734                         dri_bo *slice_data_bo,
1735                         struct gen7_mfd_context *gen7_mfd_context)
1736 {
1737     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
1738     int next_slice_start_vert_pos;
1739     int macroblock_offset;
1740     uint8_t *slice_data = NULL;
1741
1742     dri_bo_map(slice_data_bo, 0);
1743     slice_data = (uint8_t *)(slice_data_bo->virtual + slice_param->slice_data_offset);
1744     macroblock_offset = gen7_mfd_vc1_get_macroblock_bit_offset(slice_data, 
1745                                                                slice_param->macroblock_offset,
1746                                                                pic_param->sequence_fields.bits.profile);
1747     dri_bo_unmap(slice_data_bo);
1748
1749     if (next_slice_param)
1750         next_slice_start_vert_pos = next_slice_param->slice_vertical_position;
1751     else
1752         next_slice_start_vert_pos = ALIGN(pic_param->coded_height, 16) / 16;
1753
1754     BEGIN_BCS_BATCH(batch, 5);
1755     OUT_BCS_BATCH(batch, MFD_VC1_BSD_OBJECT | (5 - 2));
1756     OUT_BCS_BATCH(batch, 
1757                   slice_param->slice_data_size - (macroblock_offset >> 3));
1758     OUT_BCS_BATCH(batch, 
1759                   slice_param->slice_data_offset + (macroblock_offset >> 3));
1760     OUT_BCS_BATCH(batch,
1761                   slice_param->slice_vertical_position << 16 |
1762                   next_slice_start_vert_pos << 0);
1763     OUT_BCS_BATCH(batch,
1764                   (macroblock_offset & 0x7));
1765     ADVANCE_BCS_BATCH(batch);
1766 }
1767
1768 static void
1769 gen7_mfd_vc1_decode_picture(VADriverContextP ctx,
1770                             struct decode_state *decode_state,
1771                             struct gen7_mfd_context *gen7_mfd_context)
1772 {
1773     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
1774     VAPictureParameterBufferVC1 *pic_param;
1775     VASliceParameterBufferVC1 *slice_param, *next_slice_param, *next_slice_group_param;
1776     dri_bo *slice_data_bo;
1777     int i, j;
1778
1779     assert(decode_state->pic_param && decode_state->pic_param->buffer);
1780     pic_param = (VAPictureParameterBufferVC1 *)decode_state->pic_param->buffer;
1781
1782     gen7_mfd_vc1_decode_init(ctx, decode_state, gen7_mfd_context);
1783     intel_batchbuffer_start_atomic_bcs(batch, 0x1000);
1784     intel_batchbuffer_emit_mi_flush(batch);
1785     gen7_mfd_pipe_mode_select(ctx, decode_state, MFX_FORMAT_VC1, gen7_mfd_context);
1786     gen7_mfd_surface_state(ctx, decode_state, MFX_FORMAT_VC1, gen7_mfd_context);
1787     gen7_mfd_pipe_buf_addr_state(ctx, decode_state, MFX_FORMAT_VC1, gen7_mfd_context);
1788     gen7_mfd_bsp_buf_base_addr_state(ctx, decode_state, MFX_FORMAT_VC1, gen7_mfd_context);
1789     gen7_mfd_vc1_pic_state(ctx, decode_state, gen7_mfd_context);
1790     gen7_mfd_vc1_pred_pipe_state(ctx, decode_state, gen7_mfd_context);
1791     gen7_mfd_vc1_directmode_state(ctx, decode_state, gen7_mfd_context);
1792
1793     for (j = 0; j < decode_state->num_slice_params; j++) {
1794         assert(decode_state->slice_params && decode_state->slice_params[j]->buffer);
1795         slice_param = (VASliceParameterBufferVC1 *)decode_state->slice_params[j]->buffer;
1796         slice_data_bo = decode_state->slice_datas[j]->bo;
1797         gen7_mfd_ind_obj_base_addr_state(ctx, slice_data_bo, MFX_FORMAT_VC1, gen7_mfd_context);
1798
1799         if (j == decode_state->num_slice_params - 1)
1800             next_slice_group_param = NULL;
1801         else
1802             next_slice_group_param = (VASliceParameterBufferVC1 *)decode_state->slice_params[j + 1]->buffer;
1803
1804         for (i = 0; i < decode_state->slice_params[j]->num_elements; i++) {
1805             assert(slice_param->slice_data_flag == VA_SLICE_DATA_FLAG_ALL);
1806
1807             if (i < decode_state->slice_params[j]->num_elements - 1)
1808                 next_slice_param = slice_param + 1;
1809             else
1810                 next_slice_param = next_slice_group_param;
1811
1812             gen7_mfd_vc1_bsd_object(ctx, pic_param, slice_param, next_slice_param, slice_data_bo, gen7_mfd_context);
1813             slice_param++;
1814         }
1815     }
1816
1817     intel_batchbuffer_end_atomic(batch);
1818     intel_batchbuffer_flush(batch);
1819 }
1820
1821 static void
1822 gen7_mfd_jpeg_decode_init(VADriverContextP ctx,
1823                           struct decode_state *decode_state,
1824                           struct gen7_mfd_context *gen7_mfd_context)
1825 {
1826     struct object_surface *obj_surface;
1827     VAPictureParameterBufferJPEGBaseline *pic_param;
1828     int subsampling = SUBSAMPLE_YUV420;
1829
1830     pic_param = (VAPictureParameterBufferJPEGBaseline *)decode_state->pic_param->buffer;
1831
1832     if (pic_param->num_components == 1)
1833         subsampling = SUBSAMPLE_YUV400;
1834     else if (pic_param->num_components == 3) {
1835         int h1 = pic_param->components[0].h_sampling_factor;
1836         int h2 = pic_param->components[1].h_sampling_factor;
1837         int h3 = pic_param->components[2].h_sampling_factor;
1838         int v1 = pic_param->components[0].v_sampling_factor;
1839         int v2 = pic_param->components[1].v_sampling_factor;
1840         int v3 = pic_param->components[2].v_sampling_factor;
1841
1842         if (h1 == 2 && h2 == 1 && h3 == 1 &&
1843             v1 == 2 && v2 == 1 && v3 == 1)
1844             subsampling = SUBSAMPLE_YUV420;
1845         else if (h1 == 2 && h2 == 1 && h3 == 1 &&
1846                  v1 == 1 && v2 == 1 && v3 == 1)
1847             subsampling = SUBSAMPLE_YUV422H;
1848         else if (h1 == 1 && h2 == 1 && h3 == 1 &&
1849                  v1 == 1 && v2 == 1 && v3 == 1)
1850             subsampling = SUBSAMPLE_YUV444;
1851         else if (h1 == 4 && h2 == 1 && h3 == 1 &&
1852                  v1 == 1 && v2 == 1 && v3 == 1)
1853             subsampling = SUBSAMPLE_YUV411;
1854         else if (h1 == 1 && h2 == 1 && h3 == 1 &&
1855                  v1 == 2 && v2 == 1 && v3 == 1)
1856             subsampling = SUBSAMPLE_YUV422V;
1857         else if (h1 == 2 && h2 == 1 && h3 == 1 &&
1858                  v1 == 2 && v2 == 2 && v3 == 2)
1859             subsampling = SUBSAMPLE_YUV422H;
1860         else if (h2 == 2 && h2 == 2 && h3 == 2 &&
1861                  v1 == 2 && v2 == 1 && v3 == 1)
1862             subsampling = SUBSAMPLE_YUV422V;
1863         else
1864             assert(0);
1865     } else {
1866         assert(0);
1867     }
1868
1869     /* Current decoded picture */
1870     obj_surface = decode_state->render_object;
1871     i965_check_alloc_surface_bo(ctx, obj_surface, 1, VA_FOURCC('I','M','C','1'), subsampling);
1872
1873     dri_bo_unreference(gen7_mfd_context->pre_deblocking_output.bo);
1874     gen7_mfd_context->pre_deblocking_output.bo = obj_surface->bo;
1875     dri_bo_reference(gen7_mfd_context->pre_deblocking_output.bo);
1876     gen7_mfd_context->pre_deblocking_output.valid = 1;
1877
1878     gen7_mfd_context->post_deblocking_output.bo = NULL;
1879     gen7_mfd_context->post_deblocking_output.valid = 0;
1880
1881     gen7_mfd_context->intra_row_store_scratch_buffer.bo = NULL;
1882     gen7_mfd_context->intra_row_store_scratch_buffer.valid = 0;
1883
1884     gen7_mfd_context->deblocking_filter_row_store_scratch_buffer.bo = NULL;
1885     gen7_mfd_context->deblocking_filter_row_store_scratch_buffer.valid = 0;
1886
1887     gen7_mfd_context->bsd_mpc_row_store_scratch_buffer.bo = NULL;
1888     gen7_mfd_context->bsd_mpc_row_store_scratch_buffer.valid = 0;
1889
1890     gen7_mfd_context->mpr_row_store_scratch_buffer.bo = NULL;
1891     gen7_mfd_context->mpr_row_store_scratch_buffer.valid = 0;
1892
1893     gen7_mfd_context->bitplane_read_buffer.bo = NULL;
1894     gen7_mfd_context->bitplane_read_buffer.valid = 0;
1895 }
1896
1897 static const int va_to_gen7_jpeg_rotation[4] = {
1898     GEN7_JPEG_ROTATION_0,
1899     GEN7_JPEG_ROTATION_90,
1900     GEN7_JPEG_ROTATION_180,
1901     GEN7_JPEG_ROTATION_270
1902 };
1903
1904 static void
1905 gen7_mfd_jpeg_pic_state(VADriverContextP ctx,
1906                         struct decode_state *decode_state,
1907                         struct gen7_mfd_context *gen7_mfd_context)
1908 {
1909     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
1910     VAPictureParameterBufferJPEGBaseline *pic_param;
1911     int chroma_type = GEN7_YUV420;
1912     int frame_width_in_blks;
1913     int frame_height_in_blks;
1914
1915     assert(decode_state->pic_param && decode_state->pic_param->buffer);
1916     pic_param = (VAPictureParameterBufferJPEGBaseline *)decode_state->pic_param->buffer;
1917
1918     if (pic_param->num_components == 1)
1919         chroma_type = GEN7_YUV400;
1920     else if (pic_param->num_components == 3) {
1921         int h1 = pic_param->components[0].h_sampling_factor;
1922         int h2 = pic_param->components[1].h_sampling_factor;
1923         int h3 = pic_param->components[2].h_sampling_factor;
1924         int v1 = pic_param->components[0].v_sampling_factor;
1925         int v2 = pic_param->components[1].v_sampling_factor;
1926         int v3 = pic_param->components[2].v_sampling_factor;
1927
1928         if (h1 == 2 && h2 == 1 && h3 == 1 &&
1929             v1 == 2 && v2 == 1 && v3 == 1)
1930             chroma_type = GEN7_YUV420;
1931         else if (h1 == 2 && h2 == 1 && h3 == 1 &&
1932                  v1 == 1 && v2 == 1 && v3 == 1)
1933             chroma_type = GEN7_YUV422H_2Y;
1934         else if (h1 == 1 && h2 == 1 && h3 == 1 &&
1935                  v1 == 1 && v2 == 1 && v3 == 1)
1936             chroma_type = GEN7_YUV444;
1937         else if (h1 == 4 && h2 == 1 && h3 == 1 &&
1938                  v1 == 1 && v2 == 1 && v3 == 1)
1939             chroma_type = GEN7_YUV411;
1940         else if (h1 == 1 && h2 == 1 && h3 == 1 &&
1941                  v1 == 2 && v2 == 1 && v3 == 1)
1942             chroma_type = GEN7_YUV422V_2Y;
1943         else if (h1 == 2 && h2 == 1 && h3 == 1 &&
1944                  v1 == 2 && v2 == 2 && v3 == 2)
1945             chroma_type = GEN7_YUV422H_4Y;
1946         else if (h2 == 2 && h2 == 2 && h3 == 2 &&
1947                  v1 == 2 && v2 == 1 && v3 == 1)
1948             chroma_type = GEN7_YUV422V_4Y;
1949         else
1950             assert(0);
1951     }
1952
1953     if (chroma_type == GEN7_YUV400 ||
1954         chroma_type == GEN7_YUV444 ||
1955         chroma_type == GEN7_YUV422V_2Y) {
1956         frame_width_in_blks = ((pic_param->picture_width + 7) / 8);
1957         frame_height_in_blks = ((pic_param->picture_height + 7) / 8);
1958     } else if (chroma_type == GEN7_YUV411) {
1959         frame_width_in_blks = ((pic_param->picture_width + 31) / 32) * 4;
1960         frame_height_in_blks = ((pic_param->picture_height + 31) / 32) * 4;
1961     } else {
1962         frame_width_in_blks = ((pic_param->picture_width + 15) / 16) * 2;
1963         frame_height_in_blks = ((pic_param->picture_height + 15) / 16) * 2;
1964     }
1965
1966     BEGIN_BCS_BATCH(batch, 3);
1967     OUT_BCS_BATCH(batch, MFX_JPEG_PIC_STATE | (3 - 2));
1968     OUT_BCS_BATCH(batch,
1969                   (va_to_gen7_jpeg_rotation[0] << 4) |    /* without rotation */
1970                   (chroma_type << 0));
1971     OUT_BCS_BATCH(batch,
1972                   ((frame_height_in_blks - 1) << 16) |   /* FrameHeightInBlks */
1973                   ((frame_width_in_blks - 1) << 0));    /* FrameWidthInBlks */
1974     ADVANCE_BCS_BATCH(batch);
1975 }
1976
1977 static const int va_to_gen7_jpeg_hufftable[2] = {
1978     MFX_HUFFTABLE_ID_Y,
1979     MFX_HUFFTABLE_ID_UV
1980 };
1981
1982 static void
1983 gen7_mfd_jpeg_huff_table_state(VADriverContextP ctx,
1984                                struct decode_state *decode_state,
1985                                struct gen7_mfd_context *gen7_mfd_context,
1986                                int num_tables)
1987 {
1988     VAHuffmanTableBufferJPEGBaseline *huffman_table;
1989     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
1990     int index;
1991
1992     if (!decode_state->huffman_table || !decode_state->huffman_table->buffer)
1993         return;
1994
1995     huffman_table = (VAHuffmanTableBufferJPEGBaseline *)decode_state->huffman_table->buffer;
1996
1997     for (index = 0; index < num_tables; index++) {
1998         int id = va_to_gen7_jpeg_hufftable[index];
1999         if (!huffman_table->load_huffman_table[index])
2000             continue;
2001         BEGIN_BCS_BATCH(batch, 53);
2002         OUT_BCS_BATCH(batch, MFX_JPEG_HUFF_TABLE_STATE | (53 - 2));
2003         OUT_BCS_BATCH(batch, id);
2004         intel_batchbuffer_data(batch, huffman_table->huffman_table[index].num_dc_codes, 12);
2005         intel_batchbuffer_data(batch, huffman_table->huffman_table[index].dc_values, 12);
2006         intel_batchbuffer_data(batch, huffman_table->huffman_table[index].num_ac_codes, 16);
2007         intel_batchbuffer_data(batch, huffman_table->huffman_table[index].ac_values, 164);
2008         ADVANCE_BCS_BATCH(batch);
2009     }
2010 }
2011
2012 static const int va_to_gen7_jpeg_qm[5] = {
2013     -1,
2014     MFX_QM_JPEG_LUMA_Y_QUANTIZER_MATRIX,
2015     MFX_QM_JPEG_CHROMA_CB_QUANTIZER_MATRIX,
2016     MFX_QM_JPEG_CHROMA_CR_QUANTIZER_MATRIX,
2017     MFX_QM_JPEG_ALPHA_QUANTIZER_MATRIX
2018 };
2019
2020 static void
2021 gen7_mfd_jpeg_qm_state(VADriverContextP ctx,
2022                        struct decode_state *decode_state,
2023                        struct gen7_mfd_context *gen7_mfd_context)
2024 {
2025     VAPictureParameterBufferJPEGBaseline *pic_param;
2026     VAIQMatrixBufferJPEGBaseline *iq_matrix;
2027     int index;
2028
2029     if (!decode_state->iq_matrix || !decode_state->iq_matrix->buffer)
2030         return;
2031
2032     iq_matrix = (VAIQMatrixBufferJPEGBaseline *)decode_state->iq_matrix->buffer;
2033     pic_param = (VAPictureParameterBufferJPEGBaseline *)decode_state->pic_param->buffer;
2034
2035     assert(pic_param->num_components <= 3);
2036
2037     for (index = 0; index < pic_param->num_components; index++) {
2038         int qm_type = va_to_gen7_jpeg_qm[pic_param->components[index].component_id - pic_param->components[0].component_id + 1];
2039         unsigned char *qm = iq_matrix->quantiser_table[pic_param->components[index].quantiser_table_selector];
2040         unsigned char raster_qm[64];
2041         int j;
2042
2043         if (!iq_matrix->load_quantiser_table[pic_param->components[index].quantiser_table_selector])
2044             continue;
2045
2046         for (j = 0; j < 64; j++)
2047             raster_qm[zigzag_direct[j]] = qm[j];
2048
2049         gen7_mfd_qm_state(ctx, qm_type, raster_qm, 64, gen7_mfd_context);
2050     }
2051 }
2052
2053 static void
2054 gen7_mfd_jpeg_bsd_object(VADriverContextP ctx,
2055                          VAPictureParameterBufferJPEGBaseline *pic_param,
2056                          VASliceParameterBufferJPEGBaseline *slice_param,
2057                          VASliceParameterBufferJPEGBaseline *next_slice_param,
2058                          dri_bo *slice_data_bo,
2059                          struct gen7_mfd_context *gen7_mfd_context)
2060 {
2061     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
2062     int scan_component_mask = 0;
2063     int i;
2064
2065     assert(slice_param->num_components > 0);
2066     assert(slice_param->num_components < 4);
2067     assert(slice_param->num_components <= pic_param->num_components);
2068
2069     for (i = 0; i < slice_param->num_components; i++) {
2070         switch (slice_param->components[i].component_selector - pic_param->components[0].component_id + 1) {
2071         case 1:
2072             scan_component_mask |= (1 << 0);
2073             break;
2074         case 2:
2075             scan_component_mask |= (1 << 1);
2076             break;
2077         case 3:
2078             scan_component_mask |= (1 << 2);
2079             break;
2080         default:
2081             assert(0);
2082             break;
2083         }
2084     }
2085
2086     BEGIN_BCS_BATCH(batch, 6);
2087     OUT_BCS_BATCH(batch, MFD_JPEG_BSD_OBJECT | (6 - 2));
2088     OUT_BCS_BATCH(batch, 
2089                   slice_param->slice_data_size);
2090     OUT_BCS_BATCH(batch, 
2091                   slice_param->slice_data_offset);
2092     OUT_BCS_BATCH(batch,
2093                   slice_param->slice_horizontal_position << 16 |
2094                   slice_param->slice_vertical_position << 0);
2095     OUT_BCS_BATCH(batch,
2096                   ((slice_param->num_components != 1) << 30) |  /* interleaved */
2097                   (scan_component_mask << 27) |                 /* scan components */
2098                   (0 << 26) |   /* disable interrupt allowed */
2099                   (slice_param->num_mcus << 0));                /* MCU count */
2100     OUT_BCS_BATCH(batch,
2101                   (slice_param->restart_interval << 0));    /* RestartInterval */
2102     ADVANCE_BCS_BATCH(batch);
2103 }
2104
2105 /* Workaround for JPEG decoding on Ivybridge */
2106
2107 VAStatus 
2108 i965_DestroySurfaces(VADriverContextP ctx,
2109                      VASurfaceID *surface_list,
2110                      int num_surfaces);
2111 VAStatus 
2112 i965_CreateSurfaces(VADriverContextP ctx,
2113                     int width,
2114                     int height,
2115                     int format,
2116                     int num_surfaces,
2117                     VASurfaceID *surfaces);
2118
2119 static struct {
2120     int width;
2121     int height;
2122     unsigned char data[32];
2123     int data_size;
2124     int data_bit_offset;
2125     int qp;
2126 } gen7_jpeg_wa_clip = {
2127     16,
2128     16,
2129     {
2130         0x65, 0xb8, 0x40, 0x32, 0x13, 0xfd, 0x06, 0x6c,
2131         0xfc, 0x0a, 0x50, 0x71, 0x5c, 0x00
2132     },
2133     14,
2134     40,
2135     28,
2136 };
2137
2138 static void
2139 gen7_jpeg_wa_init(VADriverContextP ctx,
2140                   struct gen7_mfd_context *gen7_mfd_context)
2141 {
2142     struct i965_driver_data *i965 = i965_driver_data(ctx);
2143     VAStatus status;
2144     struct object_surface *obj_surface;
2145
2146     if (gen7_mfd_context->jpeg_wa_surface_id != VA_INVALID_SURFACE)
2147         i965_DestroySurfaces(ctx,
2148                              &gen7_mfd_context->jpeg_wa_surface_id,
2149                              1);
2150
2151     status = i965_CreateSurfaces(ctx,
2152                                  gen7_jpeg_wa_clip.width,
2153                                  gen7_jpeg_wa_clip.height,
2154                                  VA_RT_FORMAT_YUV420,
2155                                  1,
2156                                  &gen7_mfd_context->jpeg_wa_surface_id);
2157     assert(status == VA_STATUS_SUCCESS);
2158
2159     obj_surface = SURFACE(gen7_mfd_context->jpeg_wa_surface_id);
2160     assert(obj_surface);
2161     i965_check_alloc_surface_bo(ctx, obj_surface, 1, VA_FOURCC('N', 'V', '1', '2'), SUBSAMPLE_YUV420);
2162
2163     if (!gen7_mfd_context->jpeg_wa_slice_data_bo) {
2164         gen7_mfd_context->jpeg_wa_slice_data_bo = dri_bo_alloc(i965->intel.bufmgr,
2165                                                                "JPEG WA data",
2166                                                                0x1000,
2167                                                                0x1000);
2168         dri_bo_subdata(gen7_mfd_context->jpeg_wa_slice_data_bo,
2169                        0,
2170                        gen7_jpeg_wa_clip.data_size,
2171                        gen7_jpeg_wa_clip.data);
2172     }
2173 }
2174
2175 static void
2176 gen7_jpeg_wa_pipe_mode_select(VADriverContextP ctx,
2177                               struct gen7_mfd_context *gen7_mfd_context)
2178 {
2179     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
2180
2181     BEGIN_BCS_BATCH(batch, 5);
2182     OUT_BCS_BATCH(batch, MFX_PIPE_MODE_SELECT | (5 - 2));
2183     OUT_BCS_BATCH(batch,
2184                   (MFX_LONG_MODE << 17) | /* Currently only support long format */
2185                   (MFD_MODE_VLD << 15) | /* VLD mode */
2186                   (0 << 10) | /* disable Stream-Out */
2187                   (0 << 9)  | /* Post Deblocking Output */
2188                   (1 << 8)  | /* Pre Deblocking Output */
2189                   (0 << 5)  | /* not in stitch mode */
2190                   (MFX_CODEC_DECODE << 4)  | /* decoding mode */
2191                   (MFX_FORMAT_AVC << 0));
2192     OUT_BCS_BATCH(batch,
2193                   (0 << 4)  | /* terminate if AVC motion and POC table error occurs */
2194                   (0 << 3)  | /* terminate if AVC mbdata error occurs */
2195                   (0 << 2)  | /* terminate if AVC CABAC/CAVLC decode error occurs */
2196                   (0 << 1)  |
2197                   (0 << 0));
2198     OUT_BCS_BATCH(batch, 0); /* pic status/error report id */ 
2199     OUT_BCS_BATCH(batch, 0); /* reserved */
2200     ADVANCE_BCS_BATCH(batch);
2201 }
2202
2203 static void
2204 gen7_jpeg_wa_surface_state(VADriverContextP ctx,
2205                            struct gen7_mfd_context *gen7_mfd_context)
2206 {
2207     struct i965_driver_data *i965 = i965_driver_data(ctx);
2208     struct object_surface *obj_surface = SURFACE(gen7_mfd_context->jpeg_wa_surface_id);
2209     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
2210
2211     BEGIN_BCS_BATCH(batch, 6);
2212     OUT_BCS_BATCH(batch, MFX_SURFACE_STATE | (6 - 2));
2213     OUT_BCS_BATCH(batch, 0);
2214     OUT_BCS_BATCH(batch,
2215                   ((obj_surface->orig_width - 1) << 18) |
2216                   ((obj_surface->orig_height - 1) << 4));
2217     OUT_BCS_BATCH(batch,
2218                   (MFX_SURFACE_PLANAR_420_8 << 28) | /* 420 planar YUV surface */
2219                   (1 << 27) | /* interleave chroma, set to 0 for JPEG */
2220                   (0 << 22) | /* surface object control state, ignored */
2221                   ((obj_surface->width - 1) << 3) | /* pitch */
2222                   (0 << 2)  | /* must be 0 */
2223                   (1 << 1)  | /* must be tiled */
2224                   (I965_TILEWALK_YMAJOR << 0));  /* tile walk, must be 1 */
2225     OUT_BCS_BATCH(batch,
2226                   (0 << 16) | /* X offset for U(Cb), must be 0 */
2227                   (obj_surface->y_cb_offset << 0)); /* Y offset for U(Cb) */
2228     OUT_BCS_BATCH(batch,
2229                   (0 << 16) | /* X offset for V(Cr), must be 0 */
2230                   (0 << 0)); /* Y offset for V(Cr), must be 0 for video codec, non-zoro for JPEG */
2231     ADVANCE_BCS_BATCH(batch);
2232 }
2233
2234 static void
2235 gen7_jpeg_wa_pipe_buf_addr_state(VADriverContextP ctx,
2236                                  struct gen7_mfd_context *gen7_mfd_context)
2237 {
2238     struct i965_driver_data *i965 = i965_driver_data(ctx);
2239     struct object_surface *obj_surface = SURFACE(gen7_mfd_context->jpeg_wa_surface_id);
2240     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
2241     dri_bo *intra_bo;
2242     int i;
2243
2244     intra_bo = dri_bo_alloc(i965->intel.bufmgr,
2245                             "intra row store",
2246                             128 * 64,
2247                             0x1000);
2248
2249     BEGIN_BCS_BATCH(batch, 24);
2250     OUT_BCS_BATCH(batch, MFX_PIPE_BUF_ADDR_STATE | (24 - 2));
2251     OUT_BCS_RELOC(batch,
2252                   obj_surface->bo,
2253                   I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
2254                   0);
2255     
2256     OUT_BCS_BATCH(batch, 0); /* post deblocking */
2257
2258     OUT_BCS_BATCH(batch, 0); /* ignore for decoding */
2259     OUT_BCS_BATCH(batch, 0); /* ignore for decoding */
2260
2261     OUT_BCS_RELOC(batch,
2262                   intra_bo,
2263                   I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
2264                   0);
2265
2266     OUT_BCS_BATCH(batch, 0);
2267
2268     /* DW 7..22 */
2269     for (i = 0; i < MAX_GEN_REFERENCE_FRAMES; i++) {
2270         OUT_BCS_BATCH(batch, 0);
2271     }
2272
2273     OUT_BCS_BATCH(batch, 0);   /* ignore DW23 for decoding */
2274     ADVANCE_BCS_BATCH(batch);
2275
2276     dri_bo_unreference(intra_bo);
2277 }
2278
2279 static void
2280 gen7_jpeg_wa_bsp_buf_base_addr_state(VADriverContextP ctx,
2281                                      struct gen7_mfd_context *gen7_mfd_context)
2282 {
2283     struct i965_driver_data *i965 = i965_driver_data(ctx);
2284     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
2285     dri_bo *bsd_mpc_bo, *mpr_bo;
2286
2287     bsd_mpc_bo = dri_bo_alloc(i965->intel.bufmgr,
2288                               "bsd mpc row store",
2289                               11520, /* 1.5 * 120 * 64 */
2290                               0x1000);
2291
2292     mpr_bo = dri_bo_alloc(i965->intel.bufmgr,
2293                           "mpr row store",
2294                           7680, /* 1. 0 * 120 * 64 */
2295                           0x1000);
2296
2297     BEGIN_BCS_BATCH(batch, 4);
2298     OUT_BCS_BATCH(batch, MFX_BSP_BUF_BASE_ADDR_STATE | (4 - 2));
2299
2300     OUT_BCS_RELOC(batch,
2301                   bsd_mpc_bo,
2302                   I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
2303                   0);
2304
2305     OUT_BCS_RELOC(batch,
2306                   mpr_bo,
2307                   I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
2308                   0);
2309     OUT_BCS_BATCH(batch, 0);
2310
2311     ADVANCE_BCS_BATCH(batch);
2312
2313     dri_bo_unreference(bsd_mpc_bo);
2314     dri_bo_unreference(mpr_bo);
2315 }
2316
2317 static void
2318 gen7_jpeg_wa_avc_qm_state(VADriverContextP ctx,
2319                           struct gen7_mfd_context *gen7_mfd_context)
2320 {
2321
2322 }
2323
2324 static void
2325 gen7_jpeg_wa_avc_img_state(VADriverContextP ctx,
2326                            struct gen7_mfd_context *gen7_mfd_context)
2327 {
2328     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
2329     int img_struct = 0;
2330     int mbaff_frame_flag = 0;
2331     unsigned int width_in_mbs = 1, height_in_mbs = 1;
2332
2333     BEGIN_BCS_BATCH(batch, 16);
2334     OUT_BCS_BATCH(batch, MFX_AVC_IMG_STATE | (16 - 2));
2335     OUT_BCS_BATCH(batch, 
2336                   width_in_mbs * height_in_mbs);
2337     OUT_BCS_BATCH(batch, 
2338                   ((height_in_mbs - 1) << 16) | 
2339                   ((width_in_mbs - 1) << 0));
2340     OUT_BCS_BATCH(batch, 
2341                   (0 << 24) |
2342                   (0 << 16) |
2343                   (0 << 14) |
2344                   (0 << 13) |
2345                   (0 << 12) | /* differ from GEN6 */
2346                   (0 << 10) |
2347                   (img_struct << 8));
2348     OUT_BCS_BATCH(batch,
2349                   (1 << 10) | /* 4:2:0 */
2350                   (1 << 7) |  /* CABAC */
2351                   (0 << 6) |
2352                   (0 << 5) |
2353                   (0 << 4) |
2354                   (0 << 3) |
2355                   (1 << 2) |
2356                   (mbaff_frame_flag << 1) |
2357                   (0 << 0));
2358     OUT_BCS_BATCH(batch, 0);
2359     OUT_BCS_BATCH(batch, 0);
2360     OUT_BCS_BATCH(batch, 0);
2361     OUT_BCS_BATCH(batch, 0);
2362     OUT_BCS_BATCH(batch, 0);
2363     OUT_BCS_BATCH(batch, 0);
2364     OUT_BCS_BATCH(batch, 0);
2365     OUT_BCS_BATCH(batch, 0);
2366     OUT_BCS_BATCH(batch, 0);
2367     OUT_BCS_BATCH(batch, 0);
2368     OUT_BCS_BATCH(batch, 0);
2369     ADVANCE_BCS_BATCH(batch);
2370 }
2371
2372 static void
2373 gen7_jpeg_wa_avc_directmode_state(VADriverContextP ctx,
2374                                   struct gen7_mfd_context *gen7_mfd_context)
2375 {
2376     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
2377     int i;
2378
2379     BEGIN_BCS_BATCH(batch, 69);
2380     OUT_BCS_BATCH(batch, MFX_AVC_DIRECTMODE_STATE | (69 - 2));
2381
2382     /* reference surfaces 0..15 */
2383     for (i = 0; i < MAX_GEN_REFERENCE_FRAMES; i++) {
2384         OUT_BCS_BATCH(batch, 0); /* top */
2385         OUT_BCS_BATCH(batch, 0); /* bottom */
2386     }
2387
2388     /* the current decoding frame/field */
2389     OUT_BCS_BATCH(batch, 0); /* top */
2390     OUT_BCS_BATCH(batch, 0); /* bottom */
2391
2392     /* POC List */
2393     for (i = 0; i < MAX_GEN_REFERENCE_FRAMES; i++) {
2394         OUT_BCS_BATCH(batch, 0);
2395         OUT_BCS_BATCH(batch, 0);
2396     }
2397
2398     OUT_BCS_BATCH(batch, 0);
2399     OUT_BCS_BATCH(batch, 0);
2400
2401     ADVANCE_BCS_BATCH(batch);
2402 }
2403
2404 static void
2405 gen7_jpeg_wa_ind_obj_base_addr_state(VADriverContextP ctx,
2406                                      struct gen7_mfd_context *gen7_mfd_context)
2407 {
2408     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
2409
2410     BEGIN_BCS_BATCH(batch, 11);
2411     OUT_BCS_BATCH(batch, MFX_IND_OBJ_BASE_ADDR_STATE | (11 - 2));
2412     OUT_BCS_RELOC(batch,
2413                   gen7_mfd_context->jpeg_wa_slice_data_bo,
2414                   I915_GEM_DOMAIN_INSTRUCTION, 0,
2415                   0);
2416     OUT_BCS_BATCH(batch, 0x80000000); /* must set, up to 2G */
2417     OUT_BCS_BATCH(batch, 0); /* ignore for VLD mode */
2418     OUT_BCS_BATCH(batch, 0);
2419     OUT_BCS_BATCH(batch, 0); /* ignore for VLD mode */
2420     OUT_BCS_BATCH(batch, 0);
2421     OUT_BCS_BATCH(batch, 0); /* ignore for VLD mode */
2422     OUT_BCS_BATCH(batch, 0);
2423     OUT_BCS_BATCH(batch, 0); /* ignore for VLD mode */
2424     OUT_BCS_BATCH(batch, 0);
2425     ADVANCE_BCS_BATCH(batch);
2426 }
2427
2428 static void
2429 gen7_jpeg_wa_avc_bsd_object(VADriverContextP ctx,
2430                             struct gen7_mfd_context *gen7_mfd_context)
2431 {
2432     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
2433
2434     /* the input bitsteam format on GEN7 differs from GEN6 */
2435     BEGIN_BCS_BATCH(batch, 6);
2436     OUT_BCS_BATCH(batch, MFD_AVC_BSD_OBJECT | (6 - 2));
2437     OUT_BCS_BATCH(batch, gen7_jpeg_wa_clip.data_size);
2438     OUT_BCS_BATCH(batch, 0);
2439     OUT_BCS_BATCH(batch,
2440                   (0 << 31) |
2441                   (0 << 14) |
2442                   (0 << 12) |
2443                   (0 << 10) |
2444                   (0 << 8));
2445     OUT_BCS_BATCH(batch,
2446                   ((gen7_jpeg_wa_clip.data_bit_offset >> 3) << 16) |
2447                   (0 << 5)  |
2448                   (0 << 4)  |
2449                   (1 << 3) | /* LastSlice Flag */
2450                   (gen7_jpeg_wa_clip.data_bit_offset & 0x7));
2451     OUT_BCS_BATCH(batch, 0);
2452     ADVANCE_BCS_BATCH(batch);
2453 }
2454
2455 static void
2456 gen7_jpeg_wa_avc_slice_state(VADriverContextP ctx,
2457                              struct gen7_mfd_context *gen7_mfd_context)
2458 {
2459     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
2460     int slice_hor_pos = 0, slice_ver_pos = 0, next_slice_hor_pos = 0, next_slice_ver_pos = 1;
2461     int num_ref_idx_l0 = 0, num_ref_idx_l1 = 0;
2462     int first_mb_in_slice = 0;
2463     int slice_type = SLICE_TYPE_I;
2464
2465     BEGIN_BCS_BATCH(batch, 11);
2466     OUT_BCS_BATCH(batch, MFX_AVC_SLICE_STATE | (11 - 2));
2467     OUT_BCS_BATCH(batch, slice_type);
2468     OUT_BCS_BATCH(batch, 
2469                   (num_ref_idx_l1 << 24) |
2470                   (num_ref_idx_l0 << 16) |
2471                   (0 << 8) |
2472                   (0 << 0));
2473     OUT_BCS_BATCH(batch, 
2474                   (0 << 29) |
2475                   (1 << 27) |   /* disable Deblocking */
2476                   (0 << 24) |
2477                   (gen7_jpeg_wa_clip.qp << 16) |
2478                   (0 << 8) |
2479                   (0 << 0));
2480     OUT_BCS_BATCH(batch, 
2481                   (slice_ver_pos << 24) |
2482                   (slice_hor_pos << 16) | 
2483                   (first_mb_in_slice << 0));
2484     OUT_BCS_BATCH(batch,
2485                   (next_slice_ver_pos << 16) |
2486                   (next_slice_hor_pos << 0));
2487     OUT_BCS_BATCH(batch, (1 << 19)); /* last slice flag */
2488     OUT_BCS_BATCH(batch, 0);
2489     OUT_BCS_BATCH(batch, 0);
2490     OUT_BCS_BATCH(batch, 0);
2491     OUT_BCS_BATCH(batch, 0);
2492     ADVANCE_BCS_BATCH(batch);
2493 }
2494
2495 static void
2496 gen7_mfd_jpeg_wa(VADriverContextP ctx,
2497                  struct gen7_mfd_context *gen7_mfd_context)
2498 {
2499     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
2500     gen7_jpeg_wa_init(ctx, gen7_mfd_context);
2501     intel_batchbuffer_emit_mi_flush(batch);
2502     gen7_jpeg_wa_pipe_mode_select(ctx, gen7_mfd_context);
2503     gen7_jpeg_wa_surface_state(ctx, gen7_mfd_context);
2504     gen7_jpeg_wa_pipe_buf_addr_state(ctx, gen7_mfd_context);
2505     gen7_jpeg_wa_bsp_buf_base_addr_state(ctx, gen7_mfd_context);
2506     gen7_jpeg_wa_avc_qm_state(ctx, gen7_mfd_context);
2507     gen7_jpeg_wa_avc_img_state(ctx, gen7_mfd_context);
2508     gen7_jpeg_wa_ind_obj_base_addr_state(ctx, gen7_mfd_context);
2509
2510     gen7_jpeg_wa_avc_directmode_state(ctx, gen7_mfd_context);
2511     gen7_jpeg_wa_avc_slice_state(ctx, gen7_mfd_context);
2512     gen7_jpeg_wa_avc_bsd_object(ctx, gen7_mfd_context);
2513 }
2514
2515 void
2516 gen7_mfd_jpeg_decode_picture(VADriverContextP ctx,
2517                              struct decode_state *decode_state,
2518                              struct gen7_mfd_context *gen7_mfd_context)
2519 {
2520     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
2521     VAPictureParameterBufferJPEGBaseline *pic_param;
2522     VASliceParameterBufferJPEGBaseline *slice_param, *next_slice_param, *next_slice_group_param;
2523     dri_bo *slice_data_bo;
2524     int i, j, max_selector = 0;
2525
2526     assert(decode_state->pic_param && decode_state->pic_param->buffer);
2527     pic_param = (VAPictureParameterBufferJPEGBaseline *)decode_state->pic_param->buffer;
2528
2529     /* Currently only support Baseline DCT */
2530     gen7_mfd_jpeg_decode_init(ctx, decode_state, gen7_mfd_context);
2531     intel_batchbuffer_start_atomic_bcs(batch, 0x1000);
2532     gen7_mfd_jpeg_wa(ctx, gen7_mfd_context);
2533     intel_batchbuffer_emit_mi_flush(batch);
2534     gen7_mfd_pipe_mode_select(ctx, decode_state, MFX_FORMAT_JPEG, gen7_mfd_context);
2535     gen7_mfd_surface_state(ctx, decode_state, MFX_FORMAT_JPEG, gen7_mfd_context);
2536     gen7_mfd_pipe_buf_addr_state(ctx, decode_state, MFX_FORMAT_JPEG, gen7_mfd_context);
2537     gen7_mfd_jpeg_pic_state(ctx, decode_state, gen7_mfd_context);
2538     gen7_mfd_jpeg_qm_state(ctx, decode_state, gen7_mfd_context);
2539
2540     for (j = 0; j < decode_state->num_slice_params; j++) {
2541         assert(decode_state->slice_params && decode_state->slice_params[j]->buffer);
2542         slice_param = (VASliceParameterBufferJPEGBaseline *)decode_state->slice_params[j]->buffer;
2543         slice_data_bo = decode_state->slice_datas[j]->bo;
2544         gen7_mfd_ind_obj_base_addr_state(ctx, slice_data_bo, MFX_FORMAT_JPEG, gen7_mfd_context);
2545
2546         if (j == decode_state->num_slice_params - 1)
2547             next_slice_group_param = NULL;
2548         else
2549             next_slice_group_param = (VASliceParameterBufferJPEGBaseline *)decode_state->slice_params[j + 1]->buffer;
2550
2551         for (i = 0; i < decode_state->slice_params[j]->num_elements; i++) {
2552             int component;
2553
2554             assert(slice_param->slice_data_flag == VA_SLICE_DATA_FLAG_ALL);
2555
2556             if (i < decode_state->slice_params[j]->num_elements - 1)
2557                 next_slice_param = slice_param + 1;
2558             else
2559                 next_slice_param = next_slice_group_param;
2560
2561             for (component = 0; component < slice_param->num_components; component++) {
2562                 if (max_selector < slice_param->components[component].dc_table_selector)
2563                     max_selector = slice_param->components[component].dc_table_selector;
2564
2565                 if (max_selector < slice_param->components[component].ac_table_selector)
2566                     max_selector = slice_param->components[component].ac_table_selector;
2567             }
2568
2569             slice_param++;
2570         }
2571     }
2572
2573     assert(max_selector < 2);
2574     gen7_mfd_jpeg_huff_table_state(ctx, decode_state, gen7_mfd_context, max_selector + 1);
2575
2576     for (j = 0; j < decode_state->num_slice_params; j++) {
2577         assert(decode_state->slice_params && decode_state->slice_params[j]->buffer);
2578         slice_param = (VASliceParameterBufferJPEGBaseline *)decode_state->slice_params[j]->buffer;
2579         slice_data_bo = decode_state->slice_datas[j]->bo;
2580         gen7_mfd_ind_obj_base_addr_state(ctx, slice_data_bo, MFX_FORMAT_JPEG, gen7_mfd_context);
2581
2582         if (j == decode_state->num_slice_params - 1)
2583             next_slice_group_param = NULL;
2584         else
2585             next_slice_group_param = (VASliceParameterBufferJPEGBaseline *)decode_state->slice_params[j + 1]->buffer;
2586
2587         for (i = 0; i < decode_state->slice_params[j]->num_elements; i++) {
2588             assert(slice_param->slice_data_flag == VA_SLICE_DATA_FLAG_ALL);
2589
2590             if (i < decode_state->slice_params[j]->num_elements - 1)
2591                 next_slice_param = slice_param + 1;
2592             else
2593                 next_slice_param = next_slice_group_param;
2594
2595             gen7_mfd_jpeg_bsd_object(ctx, pic_param, slice_param, next_slice_param, slice_data_bo, gen7_mfd_context);
2596             slice_param++;
2597         }
2598     }
2599
2600     intel_batchbuffer_end_atomic(batch);
2601     intel_batchbuffer_flush(batch);
2602 }
2603
2604 static VAStatus
2605 gen7_mfd_decode_picture(VADriverContextP ctx, 
2606                         VAProfile profile, 
2607                         union codec_state *codec_state,
2608                         struct hw_context *hw_context)
2609
2610 {
2611     struct gen7_mfd_context *gen7_mfd_context = (struct gen7_mfd_context *)hw_context;
2612     struct decode_state *decode_state = &codec_state->decode;
2613     VAStatus vaStatus;
2614
2615     assert(gen7_mfd_context);
2616
2617     vaStatus = intel_decoder_sanity_check_input(ctx, profile, decode_state);
2618
2619     if (vaStatus != VA_STATUS_SUCCESS)
2620         goto out;
2621
2622     gen7_mfd_context->wa_mpeg2_slice_vertical_position = -1;
2623
2624     switch (profile) {
2625     case VAProfileMPEG2Simple:
2626     case VAProfileMPEG2Main:
2627         gen7_mfd_mpeg2_decode_picture(ctx, decode_state, gen7_mfd_context);
2628         break;
2629         
2630     case VAProfileH264Baseline:
2631     case VAProfileH264Main:
2632     case VAProfileH264High:
2633         gen7_mfd_avc_decode_picture(ctx, decode_state, gen7_mfd_context);
2634         break;
2635
2636     case VAProfileVC1Simple:
2637     case VAProfileVC1Main:
2638     case VAProfileVC1Advanced:
2639         gen7_mfd_vc1_decode_picture(ctx, decode_state, gen7_mfd_context);
2640         break;
2641
2642     case VAProfileJPEGBaseline:
2643         gen7_mfd_jpeg_decode_picture(ctx, decode_state, gen7_mfd_context);
2644         break;
2645
2646     default:
2647         assert(0);
2648         break;
2649     }
2650
2651     vaStatus = VA_STATUS_SUCCESS;
2652
2653 out:
2654     return vaStatus;
2655 }
2656
2657 static void
2658 gen7_mfd_context_destroy(void *hw_context)
2659 {
2660     struct gen7_mfd_context *gen7_mfd_context = (struct gen7_mfd_context *)hw_context;
2661
2662     dri_bo_unreference(gen7_mfd_context->post_deblocking_output.bo);
2663     gen7_mfd_context->post_deblocking_output.bo = NULL;
2664
2665     dri_bo_unreference(gen7_mfd_context->pre_deblocking_output.bo);
2666     gen7_mfd_context->pre_deblocking_output.bo = NULL;
2667
2668     dri_bo_unreference(gen7_mfd_context->intra_row_store_scratch_buffer.bo);
2669     gen7_mfd_context->intra_row_store_scratch_buffer.bo = NULL;
2670
2671     dri_bo_unreference(gen7_mfd_context->deblocking_filter_row_store_scratch_buffer.bo);
2672     gen7_mfd_context->deblocking_filter_row_store_scratch_buffer.bo = NULL;
2673
2674     dri_bo_unreference(gen7_mfd_context->bsd_mpc_row_store_scratch_buffer.bo);
2675     gen7_mfd_context->bsd_mpc_row_store_scratch_buffer.bo = NULL;
2676
2677     dri_bo_unreference(gen7_mfd_context->mpr_row_store_scratch_buffer.bo);
2678     gen7_mfd_context->mpr_row_store_scratch_buffer.bo = NULL;
2679
2680     dri_bo_unreference(gen7_mfd_context->bitplane_read_buffer.bo);
2681     gen7_mfd_context->bitplane_read_buffer.bo = NULL;
2682
2683     dri_bo_unreference(gen7_mfd_context->jpeg_wa_slice_data_bo);
2684
2685     intel_batchbuffer_free(gen7_mfd_context->base.batch);
2686     free(gen7_mfd_context);
2687 }
2688
2689 static void gen7_mfd_mpeg2_context_init(VADriverContextP ctx,
2690                                     struct gen7_mfd_context *gen7_mfd_context)
2691 {
2692     gen7_mfd_context->iq_matrix.mpeg2.load_intra_quantiser_matrix = -1;
2693     gen7_mfd_context->iq_matrix.mpeg2.load_non_intra_quantiser_matrix = -1;
2694     gen7_mfd_context->iq_matrix.mpeg2.load_chroma_intra_quantiser_matrix = -1;
2695     gen7_mfd_context->iq_matrix.mpeg2.load_chroma_non_intra_quantiser_matrix = -1;
2696 }
2697
2698 struct hw_context *
2699 gen7_dec_hw_context_init(VADriverContextP ctx, struct object_config *obj_config)
2700 {
2701     struct intel_driver_data *intel = intel_driver_data(ctx);
2702     struct gen7_mfd_context *gen7_mfd_context = calloc(1, sizeof(struct gen7_mfd_context));
2703     int i;
2704
2705     gen7_mfd_context->base.destroy = gen7_mfd_context_destroy;
2706     gen7_mfd_context->base.run = gen7_mfd_decode_picture;
2707     gen7_mfd_context->base.batch = intel_batchbuffer_new(intel, I915_EXEC_RENDER, 0);
2708
2709     for (i = 0; i < ARRAY_ELEMS(gen7_mfd_context->reference_surface); i++) {
2710         gen7_mfd_context->reference_surface[i].surface_id = VA_INVALID_ID;
2711         gen7_mfd_context->reference_surface[i].frame_store_id = -1;
2712         gen7_mfd_context->reference_surface[i].obj_surface = NULL;
2713     }
2714
2715     gen7_mfd_context->jpeg_wa_surface_id = VA_INVALID_SURFACE;
2716
2717     switch (obj_config->profile) {
2718     case VAProfileMPEG2Simple:
2719     case VAProfileMPEG2Main:
2720         gen7_mfd_mpeg2_context_init(ctx, gen7_mfd_context);
2721         break;
2722
2723     case VAProfileH264Baseline:
2724     case VAProfileH264Main:
2725     case VAProfileH264High:
2726         gen7_mfd_avc_context_init(ctx, gen7_mfd_context);
2727         break;
2728     default:
2729         break;
2730     }
2731     return (struct hw_context *)gen7_mfd_context;
2732 }