895b19405158b56bd6e11dc093dab76e016893ce
[platform/upstream/libva-intel-driver.git] / src / gen75_mfd.c
1 /*
2  * Copyright © 2011 Intel Corporation
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the
6  * "Software"), to deal in the Software without restriction, including
7  * without limitation the rights to use, copy, modify, merge, publish,
8  * distribute, sub license, and/or sell copies of the Software, and to
9  * permit persons to whom the Software is furnished to do so, subject to
10  * the following conditions:
11  *
12  * The above copyright notice and this permission notice (including the
13  * next paragraph) shall be included in all copies or substantial portions
14  * of the Software.
15  *
16  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
17  * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
19  * IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR
20  * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21  * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22  * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
23  *
24  * Authors:
25  *    Xiang Haihao <haihao.xiang@intel.com>
26  *    Zhao Yakui  <yakui.zhao@intel.com>
27  *
28  */
29 #include "sysdeps.h"
30
31 #include <va/va_dec_jpeg.h>
32
33 #include "intel_batchbuffer.h"
34 #include "intel_driver.h"
35 #include "i965_defines.h"
36 #include "i965_drv_video.h"
37 #include "i965_decoder_utils.h"
38 #include "gen7_mfd.h"
39 #include "intel_media.h"
40
41 #define B0_STEP_REV             2
42 #define IS_STEPPING_BPLUS(i965) ((i965->intel.revision) >= B0_STEP_REV)
43
44 static const uint32_t zigzag_direct[64] = {
45     0,   1,  8, 16,  9,  2,  3, 10,
46     17, 24, 32, 25, 18, 11,  4,  5,
47     12, 19, 26, 33, 40, 48, 41, 34,
48     27, 20, 13,  6,  7, 14, 21, 28,
49     35, 42, 49, 56, 57, 50, 43, 36,
50     29, 22, 15, 23, 30, 37, 44, 51,
51     58, 59, 52, 45, 38, 31, 39, 46,
52     53, 60, 61, 54, 47, 55, 62, 63
53 };
54
55 static void
56 gen75_mfd_init_avc_surface(VADriverContextP ctx, 
57                           VAPictureParameterBufferH264 *pic_param,
58                           struct object_surface *obj_surface)
59 {
60     struct i965_driver_data *i965 = i965_driver_data(ctx);
61     GenAvcSurface *gen7_avc_surface = obj_surface->private_data;
62     int width_in_mbs, height_in_mbs;
63
64     obj_surface->free_private_data = gen_free_avc_surface;
65     width_in_mbs = pic_param->picture_width_in_mbs_minus1 + 1;
66     height_in_mbs = pic_param->picture_height_in_mbs_minus1 + 1; /* frame height */
67
68     if (!gen7_avc_surface) {
69         gen7_avc_surface = calloc(sizeof(GenAvcSurface), 1);
70         assert((obj_surface->size & 0x3f) == 0);
71         obj_surface->private_data = gen7_avc_surface;
72     }
73
74     gen7_avc_surface->dmv_bottom_flag = (pic_param->pic_fields.bits.field_pic_flag &&
75                                          !pic_param->seq_fields.bits.direct_8x8_inference_flag);
76
77     if (gen7_avc_surface->dmv_top == NULL) {
78         gen7_avc_surface->dmv_top = dri_bo_alloc(i965->intel.bufmgr,
79                                                  "direct mv w/r buffer",
80                                                  width_in_mbs * height_in_mbs * 128,
81                                                  0x1000);
82         assert(gen7_avc_surface->dmv_top);
83     }
84
85     if (gen7_avc_surface->dmv_bottom_flag &&
86         gen7_avc_surface->dmv_bottom == NULL) {
87         gen7_avc_surface->dmv_bottom = dri_bo_alloc(i965->intel.bufmgr,
88                                                     "direct mv w/r buffer",
89                                                     width_in_mbs * height_in_mbs * 128,                                                    
90                                                     0x1000);
91         assert(gen7_avc_surface->dmv_bottom);
92     }
93 }
94
95 static void
96 gen75_mfd_pipe_mode_select(VADriverContextP ctx,
97                           struct decode_state *decode_state,
98                           int standard_select,
99                           struct gen7_mfd_context *gen7_mfd_context)
100 {
101     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
102
103     assert(standard_select == MFX_FORMAT_MPEG2 ||
104            standard_select == MFX_FORMAT_AVC ||
105            standard_select == MFX_FORMAT_VC1 ||
106            standard_select == MFX_FORMAT_JPEG);
107
108     BEGIN_BCS_BATCH(batch, 5);
109     OUT_BCS_BATCH(batch, MFX_PIPE_MODE_SELECT | (5 - 2));
110     OUT_BCS_BATCH(batch,
111                   (MFX_LONG_MODE << 17) | /* Currently only support long format */
112                   (MFD_MODE_VLD << 15) | /* VLD mode */
113                   (0 << 10) | /* disable Stream-Out */
114                   (gen7_mfd_context->post_deblocking_output.valid << 9)  | /* Post Deblocking Output */
115                   (gen7_mfd_context->pre_deblocking_output.valid << 8)  | /* Pre Deblocking Output */
116                   (0 << 5)  | /* not in stitch mode */
117                   (MFX_CODEC_DECODE << 4)  | /* decoding mode */
118                   (standard_select << 0));
119     OUT_BCS_BATCH(batch,
120                   (0 << 4)  | /* terminate if AVC motion and POC table error occurs */
121                   (0 << 3)  | /* terminate if AVC mbdata error occurs */
122                   (0 << 2)  | /* terminate if AVC CABAC/CAVLC decode error occurs */
123                   (0 << 1)  |
124                   (0 << 0));
125     OUT_BCS_BATCH(batch, 0); /* pic status/error report id */ 
126     OUT_BCS_BATCH(batch, 0); /* reserved */
127     ADVANCE_BCS_BATCH(batch);
128 }
129
130 static void
131 gen75_mfd_surface_state(VADriverContextP ctx,
132                        struct decode_state *decode_state,
133                        int standard_select,
134                        struct gen7_mfd_context *gen7_mfd_context)
135 {
136     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
137     struct object_surface *obj_surface = decode_state->render_object;
138     unsigned int y_cb_offset;
139     unsigned int y_cr_offset;
140     unsigned int surface_format;
141
142     assert(obj_surface);
143
144     y_cb_offset = obj_surface->y_cb_offset;
145     y_cr_offset = obj_surface->y_cr_offset;
146
147     surface_format = obj_surface->fourcc == VA_FOURCC_Y800 ?
148         MFX_SURFACE_MONOCHROME : MFX_SURFACE_PLANAR_420_8;
149
150     BEGIN_BCS_BATCH(batch, 6);
151     OUT_BCS_BATCH(batch, MFX_SURFACE_STATE | (6 - 2));
152     OUT_BCS_BATCH(batch, 0);
153     OUT_BCS_BATCH(batch,
154                   ((obj_surface->orig_height - 1) << 18) |
155                   ((obj_surface->orig_width - 1) << 4));
156     OUT_BCS_BATCH(batch,
157                   (surface_format << 28) | /* 420 planar YUV surface */
158                   ((standard_select != MFX_FORMAT_JPEG) << 27) | /* interleave chroma, set to 0 for JPEG */
159                   (0 << 22) | /* surface object control state, ignored */
160                   ((obj_surface->width - 1) << 3) | /* pitch */
161                   (0 << 2)  | /* must be 0 */
162                   (1 << 1)  | /* must be tiled */
163                   (I965_TILEWALK_YMAJOR << 0));  /* tile walk, must be 1 */
164     OUT_BCS_BATCH(batch,
165                   (0 << 16) | /* X offset for U(Cb), must be 0 */
166                   (y_cb_offset << 0)); /* Y offset for U(Cb) */
167     OUT_BCS_BATCH(batch,
168                   (0 << 16) | /* X offset for V(Cr), must be 0 */
169                   (y_cr_offset << 0)); /* Y offset for V(Cr), must be 0 for video codec, non-zoro for JPEG */
170     ADVANCE_BCS_BATCH(batch);
171 }
172
173 static void
174 gen75_mfd_pipe_buf_addr_state_bplus(VADriverContextP ctx,
175                              struct decode_state *decode_state,
176                              int standard_select,
177                              struct gen7_mfd_context *gen7_mfd_context)
178 {
179     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
180     int i;
181
182     BEGIN_BCS_BATCH(batch, 61);
183     OUT_BCS_BATCH(batch, MFX_PIPE_BUF_ADDR_STATE | (61 - 2));
184         /* Pre-deblock 1-3 */
185     if (gen7_mfd_context->pre_deblocking_output.valid)
186         OUT_BCS_RELOC(batch, gen7_mfd_context->pre_deblocking_output.bo,
187                       I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
188                       0);
189     else
190         OUT_BCS_BATCH(batch, 0);
191
192         OUT_BCS_BATCH(batch, 0);
193         OUT_BCS_BATCH(batch, 0);
194         /* Post-debloing 4-6 */
195     if (gen7_mfd_context->post_deblocking_output.valid)
196         OUT_BCS_RELOC(batch, gen7_mfd_context->post_deblocking_output.bo,
197                       I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
198                       0);
199     else
200         OUT_BCS_BATCH(batch, 0);
201
202         OUT_BCS_BATCH(batch, 0);
203         OUT_BCS_BATCH(batch, 0);
204
205         /* uncompressed-video & stream out 7-12 */
206     OUT_BCS_BATCH(batch, 0); /* ignore for decoding */
207     OUT_BCS_BATCH(batch, 0); /* ignore for decoding */
208         OUT_BCS_BATCH(batch, 0);
209         OUT_BCS_BATCH(batch, 0);
210         OUT_BCS_BATCH(batch, 0);
211         OUT_BCS_BATCH(batch, 0);
212
213         /* intra row-store scratch 13-15 */
214     if (gen7_mfd_context->intra_row_store_scratch_buffer.valid)
215         OUT_BCS_RELOC(batch, gen7_mfd_context->intra_row_store_scratch_buffer.bo,
216                       I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
217                       0);
218     else
219         OUT_BCS_BATCH(batch, 0);
220
221         OUT_BCS_BATCH(batch, 0);
222         OUT_BCS_BATCH(batch, 0);
223         /* deblocking-filter-row-store 16-18 */
224     if (gen7_mfd_context->deblocking_filter_row_store_scratch_buffer.valid)
225         OUT_BCS_RELOC(batch, gen7_mfd_context->deblocking_filter_row_store_scratch_buffer.bo,
226                       I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
227                       0);
228     else
229         OUT_BCS_BATCH(batch, 0);
230         OUT_BCS_BATCH(batch, 0);
231         OUT_BCS_BATCH(batch, 0);
232
233     /* DW 19..50 */
234     for (i = 0; i < ARRAY_ELEMS(gen7_mfd_context->reference_surface); i++) {
235         struct object_surface *obj_surface;
236
237         if (gen7_mfd_context->reference_surface[i].surface_id != VA_INVALID_ID &&
238             gen7_mfd_context->reference_surface[i].obj_surface &&
239             gen7_mfd_context->reference_surface[i].obj_surface->bo) {
240             obj_surface = gen7_mfd_context->reference_surface[i].obj_surface;
241
242             OUT_BCS_RELOC(batch, obj_surface->bo,
243                           I915_GEM_DOMAIN_INSTRUCTION, 0,
244                           0);
245         } else {
246             OUT_BCS_BATCH(batch, 0);
247         }
248             OUT_BCS_BATCH(batch, 0);
249     }
250         /* reference property 51 */
251     OUT_BCS_BATCH(batch, 0);  
252         
253         /* Macroblock status & ILDB 52-57 */
254         OUT_BCS_BATCH(batch, 0);
255         OUT_BCS_BATCH(batch, 0);
256         OUT_BCS_BATCH(batch, 0);
257         OUT_BCS_BATCH(batch, 0);
258         OUT_BCS_BATCH(batch, 0);
259         OUT_BCS_BATCH(batch, 0);
260
261         /* the second Macroblock status 58-60 */        
262         OUT_BCS_BATCH(batch, 0);
263         OUT_BCS_BATCH(batch, 0);
264         OUT_BCS_BATCH(batch, 0);
265     ADVANCE_BCS_BATCH(batch);
266 }
267
268 static void
269 gen75_mfd_pipe_buf_addr_state(VADriverContextP ctx,
270                              struct decode_state *decode_state,
271                              int standard_select,
272                              struct gen7_mfd_context *gen7_mfd_context)
273 {
274     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
275     struct i965_driver_data *i965 = i965_driver_data(ctx);
276     int i;
277
278     if (IS_STEPPING_BPLUS(i965)) {
279         gen75_mfd_pipe_buf_addr_state_bplus(ctx, decode_state,
280                                             standard_select, gen7_mfd_context);
281         return;
282     }
283
284     BEGIN_BCS_BATCH(batch, 25);
285     OUT_BCS_BATCH(batch, MFX_PIPE_BUF_ADDR_STATE | (25 - 2));
286     if (gen7_mfd_context->pre_deblocking_output.valid)
287         OUT_BCS_RELOC(batch, gen7_mfd_context->pre_deblocking_output.bo,
288                       I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
289                       0);
290     else
291         OUT_BCS_BATCH(batch, 0);
292
293     if (gen7_mfd_context->post_deblocking_output.valid)
294         OUT_BCS_RELOC(batch, gen7_mfd_context->post_deblocking_output.bo,
295                       I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
296                       0);
297     else
298         OUT_BCS_BATCH(batch, 0);
299
300     OUT_BCS_BATCH(batch, 0); /* ignore for decoding */
301     OUT_BCS_BATCH(batch, 0); /* ignore for decoding */
302
303     if (gen7_mfd_context->intra_row_store_scratch_buffer.valid)
304         OUT_BCS_RELOC(batch, gen7_mfd_context->intra_row_store_scratch_buffer.bo,
305                       I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
306                       0);
307     else
308         OUT_BCS_BATCH(batch, 0);
309
310     if (gen7_mfd_context->deblocking_filter_row_store_scratch_buffer.valid)
311         OUT_BCS_RELOC(batch, gen7_mfd_context->deblocking_filter_row_store_scratch_buffer.bo,
312                       I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
313                       0);
314     else
315         OUT_BCS_BATCH(batch, 0);
316
317     /* DW 7..22 */
318     for (i = 0; i < ARRAY_ELEMS(gen7_mfd_context->reference_surface); i++) {
319         struct object_surface *obj_surface;
320
321         if (gen7_mfd_context->reference_surface[i].surface_id != VA_INVALID_ID &&
322             gen7_mfd_context->reference_surface[i].obj_surface &&
323             gen7_mfd_context->reference_surface[i].obj_surface->bo) {
324             obj_surface = gen7_mfd_context->reference_surface[i].obj_surface;
325
326             OUT_BCS_RELOC(batch, obj_surface->bo,
327                           I915_GEM_DOMAIN_INSTRUCTION, 0,
328                           0);
329         } else {
330             OUT_BCS_BATCH(batch, 0);
331         }
332     }
333
334     OUT_BCS_BATCH(batch, 0);   /* ignore DW23 for decoding */
335     OUT_BCS_BATCH(batch, 0);   /* ignore DW24 for decoding */
336     ADVANCE_BCS_BATCH(batch);
337 }
338
339 static void
340 gen75_mfd_ind_obj_base_addr_state_bplus(VADriverContextP ctx,
341                                  dri_bo *slice_data_bo,
342                                  int standard_select,
343                                  struct gen7_mfd_context *gen7_mfd_context)
344 {
345     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
346
347     BEGIN_BCS_BATCH(batch, 26);
348     OUT_BCS_BATCH(batch, MFX_IND_OBJ_BASE_ADDR_STATE | (26 - 2));
349         /* MFX In BS 1-5 */
350     OUT_BCS_RELOC(batch, slice_data_bo, I915_GEM_DOMAIN_INSTRUCTION, 0, 0); /* MFX Indirect Bitstream Object Base Address */
351     OUT_BCS_BATCH(batch, 0);
352     OUT_BCS_BATCH(batch, 0);
353         /* Upper bound 4-5 */   
354     OUT_BCS_BATCH(batch, 0x80000000); /* must set, up to 2G */
355     OUT_BCS_BATCH(batch, 0);
356
357         /* MFX indirect MV 6-10 */
358     OUT_BCS_BATCH(batch, 0);
359     OUT_BCS_BATCH(batch, 0);
360     OUT_BCS_BATCH(batch, 0);
361     OUT_BCS_BATCH(batch, 0);
362     OUT_BCS_BATCH(batch, 0);
363         
364         /* MFX IT_COFF 11-15 */
365     OUT_BCS_BATCH(batch, 0);
366     OUT_BCS_BATCH(batch, 0);
367     OUT_BCS_BATCH(batch, 0);
368     OUT_BCS_BATCH(batch, 0);
369     OUT_BCS_BATCH(batch, 0);
370
371         /* MFX IT_DBLK 16-20 */
372     OUT_BCS_BATCH(batch, 0);
373     OUT_BCS_BATCH(batch, 0);
374     OUT_BCS_BATCH(batch, 0);
375     OUT_BCS_BATCH(batch, 0);
376     OUT_BCS_BATCH(batch, 0);
377
378         /* MFX PAK_BSE object for encoder 21-25 */
379     OUT_BCS_BATCH(batch, 0);
380     OUT_BCS_BATCH(batch, 0);
381     OUT_BCS_BATCH(batch, 0);
382     OUT_BCS_BATCH(batch, 0);
383     OUT_BCS_BATCH(batch, 0);
384
385     ADVANCE_BCS_BATCH(batch);
386 }
387
388 static void
389 gen75_mfd_ind_obj_base_addr_state(VADriverContextP ctx,
390                                  dri_bo *slice_data_bo,
391                                  int standard_select,
392                                  struct gen7_mfd_context *gen7_mfd_context)
393 {
394     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
395     struct i965_driver_data *i965 = i965_driver_data(ctx);
396
397     if (IS_STEPPING_BPLUS(i965)) {
398         gen75_mfd_ind_obj_base_addr_state_bplus(ctx, slice_data_bo,
399                                                 standard_select, gen7_mfd_context);
400         return;
401     }
402
403     BEGIN_BCS_BATCH(batch, 11);
404     OUT_BCS_BATCH(batch, MFX_IND_OBJ_BASE_ADDR_STATE | (11 - 2));
405     OUT_BCS_RELOC(batch, slice_data_bo, I915_GEM_DOMAIN_INSTRUCTION, 0, 0); /* MFX Indirect Bitstream Object Base Address */
406     OUT_BCS_BATCH(batch, 0x80000000); /* must set, up to 2G */
407     OUT_BCS_BATCH(batch, 0); /* ignore for VLD mode */
408     OUT_BCS_BATCH(batch, 0);
409     OUT_BCS_BATCH(batch, 0); /* ignore for VLD mode */
410     OUT_BCS_BATCH(batch, 0);
411     OUT_BCS_BATCH(batch, 0); /* ignore for VLD mode */
412     OUT_BCS_BATCH(batch, 0);
413     OUT_BCS_BATCH(batch, 0); /* ignore for VLD mode */
414     OUT_BCS_BATCH(batch, 0);
415     ADVANCE_BCS_BATCH(batch);
416 }
417
418 static void
419 gen75_mfd_bsp_buf_base_addr_state_bplus(VADriverContextP ctx,
420                                  struct decode_state *decode_state,
421                                  int standard_select,
422                                  struct gen7_mfd_context *gen7_mfd_context)
423 {
424     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
425
426     BEGIN_BCS_BATCH(batch, 10);
427     OUT_BCS_BATCH(batch, MFX_BSP_BUF_BASE_ADDR_STATE | (10 - 2));
428
429     if (gen7_mfd_context->bsd_mpc_row_store_scratch_buffer.valid)
430         OUT_BCS_RELOC(batch, gen7_mfd_context->bsd_mpc_row_store_scratch_buffer.bo,
431                       I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
432                       0);
433         else
434                 OUT_BCS_BATCH(batch, 0);
435                 
436     OUT_BCS_BATCH(batch, 0);
437     OUT_BCS_BATCH(batch, 0);
438         /* MPR Row Store Scratch buffer 4-6 */
439     if (gen7_mfd_context->mpr_row_store_scratch_buffer.valid)
440         OUT_BCS_RELOC(batch, gen7_mfd_context->mpr_row_store_scratch_buffer.bo,
441                       I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
442                       0);
443     else
444             OUT_BCS_BATCH(batch, 0);
445     OUT_BCS_BATCH(batch, 0);
446     OUT_BCS_BATCH(batch, 0);
447
448         /* Bitplane 7-9 */ 
449     if (gen7_mfd_context->bitplane_read_buffer.valid)
450         OUT_BCS_RELOC(batch, gen7_mfd_context->bitplane_read_buffer.bo,
451                       I915_GEM_DOMAIN_INSTRUCTION, 0,
452                       0);
453     else
454         OUT_BCS_BATCH(batch, 0);
455     OUT_BCS_BATCH(batch, 0);
456     OUT_BCS_BATCH(batch, 0);
457
458     ADVANCE_BCS_BATCH(batch);
459 }
460
461 static void
462 gen75_mfd_bsp_buf_base_addr_state(VADriverContextP ctx,
463                                  struct decode_state *decode_state,
464                                  int standard_select,
465                                  struct gen7_mfd_context *gen7_mfd_context)
466 {
467     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
468     struct i965_driver_data *i965 = i965_driver_data(ctx);
469
470     if (IS_STEPPING_BPLUS(i965)) {
471         gen75_mfd_bsp_buf_base_addr_state_bplus(ctx, decode_state,
472                                                 standard_select, gen7_mfd_context);
473         return;
474     }
475
476     BEGIN_BCS_BATCH(batch, 4);
477     OUT_BCS_BATCH(batch, MFX_BSP_BUF_BASE_ADDR_STATE | (4 - 2));
478
479     if (gen7_mfd_context->bsd_mpc_row_store_scratch_buffer.valid)
480         OUT_BCS_RELOC(batch, gen7_mfd_context->bsd_mpc_row_store_scratch_buffer.bo,
481                       I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
482                       0);
483     else
484         OUT_BCS_BATCH(batch, 0);
485
486     if (gen7_mfd_context->mpr_row_store_scratch_buffer.valid)
487         OUT_BCS_RELOC(batch, gen7_mfd_context->mpr_row_store_scratch_buffer.bo,
488                       I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
489                       0);
490     else
491         OUT_BCS_BATCH(batch, 0);
492
493     if (gen7_mfd_context->bitplane_read_buffer.valid)
494         OUT_BCS_RELOC(batch, gen7_mfd_context->bitplane_read_buffer.bo,
495                       I915_GEM_DOMAIN_INSTRUCTION, 0,
496                       0);
497     else
498         OUT_BCS_BATCH(batch, 0);
499
500     ADVANCE_BCS_BATCH(batch);
501 }
502
503 static void
504 gen75_mfd_qm_state(VADriverContextP ctx,
505                   int qm_type,
506                   unsigned char *qm,
507                   int qm_length,
508                   struct gen7_mfd_context *gen7_mfd_context)
509 {
510     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
511     unsigned int qm_buffer[16];
512
513     assert(qm_length <= 16 * 4);
514     memcpy(qm_buffer, qm, qm_length);
515
516     BEGIN_BCS_BATCH(batch, 18);
517     OUT_BCS_BATCH(batch, MFX_QM_STATE | (18 - 2));
518     OUT_BCS_BATCH(batch, qm_type << 0);
519     intel_batchbuffer_data(batch, qm_buffer, 16 * 4);
520     ADVANCE_BCS_BATCH(batch);
521 }
522
523 static void
524 gen75_mfd_avc_img_state(VADriverContextP ctx,
525                        struct decode_state *decode_state,
526                        struct gen7_mfd_context *gen7_mfd_context)
527 {
528     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
529     int img_struct;
530     int mbaff_frame_flag;
531     unsigned int width_in_mbs, height_in_mbs;
532     VAPictureParameterBufferH264 *pic_param;
533
534     assert(decode_state->pic_param && decode_state->pic_param->buffer);
535     pic_param = (VAPictureParameterBufferH264 *)decode_state->pic_param->buffer;
536
537     assert(!(pic_param->CurrPic.flags & VA_PICTURE_H264_INVALID));
538
539     if (pic_param->CurrPic.flags & VA_PICTURE_H264_TOP_FIELD)
540         img_struct = 1;
541     else if (pic_param->CurrPic.flags & VA_PICTURE_H264_BOTTOM_FIELD)
542         img_struct = 3;
543     else
544         img_struct = 0;
545
546     if ((img_struct & 0x1) == 0x1) {
547         assert(pic_param->pic_fields.bits.field_pic_flag == 0x1);
548     } else {
549         assert(pic_param->pic_fields.bits.field_pic_flag == 0x0);
550     }
551
552     if (pic_param->seq_fields.bits.frame_mbs_only_flag) { /* a frame containing only frame macroblocks */
553         assert(pic_param->seq_fields.bits.mb_adaptive_frame_field_flag == 0);
554         assert(pic_param->pic_fields.bits.field_pic_flag == 0);
555     } else {
556         assert(pic_param->seq_fields.bits.direct_8x8_inference_flag == 1); /* see H.264 spec */
557     }
558
559     mbaff_frame_flag = (pic_param->seq_fields.bits.mb_adaptive_frame_field_flag &&
560                         !pic_param->pic_fields.bits.field_pic_flag);
561
562     width_in_mbs = pic_param->picture_width_in_mbs_minus1 + 1;
563     height_in_mbs = pic_param->picture_height_in_mbs_minus1 + 1; /* frame height */
564
565     /* MFX unit doesn't support 4:2:2 and 4:4:4 picture */
566     assert(pic_param->seq_fields.bits.chroma_format_idc == 0 || /* monochrome picture */
567            pic_param->seq_fields.bits.chroma_format_idc == 1);  /* 4:2:0 */
568     assert(pic_param->seq_fields.bits.residual_colour_transform_flag == 0); /* only available for 4:4:4 */
569
570     BEGIN_BCS_BATCH(batch, 17);
571     OUT_BCS_BATCH(batch, MFX_AVC_IMG_STATE | (17 - 2));
572     OUT_BCS_BATCH(batch, 
573                   (width_in_mbs * height_in_mbs - 1));
574     OUT_BCS_BATCH(batch, 
575                   ((height_in_mbs - 1) << 16) | 
576                   ((width_in_mbs - 1) << 0));
577     OUT_BCS_BATCH(batch, 
578                   ((pic_param->second_chroma_qp_index_offset & 0x1f) << 24) |
579                   ((pic_param->chroma_qp_index_offset & 0x1f) << 16) |
580                   (0 << 14) | /* Max-bit conformance Intra flag ??? FIXME */
581                   (0 << 13) | /* Max Macroblock size conformance Inter flag ??? FIXME */
582                   (pic_param->pic_fields.bits.weighted_pred_flag << 12) | /* differ from GEN6 */
583                   (pic_param->pic_fields.bits.weighted_bipred_idc << 10) |
584                   (img_struct << 8));
585     OUT_BCS_BATCH(batch,
586                   (pic_param->seq_fields.bits.chroma_format_idc << 10) |
587                   (pic_param->pic_fields.bits.entropy_coding_mode_flag << 7) |
588                   ((!pic_param->pic_fields.bits.reference_pic_flag) << 6) |
589                   (pic_param->pic_fields.bits.constrained_intra_pred_flag << 5) |
590                   (pic_param->seq_fields.bits.direct_8x8_inference_flag << 4) |
591                   (pic_param->pic_fields.bits.transform_8x8_mode_flag << 3) |
592                   (pic_param->seq_fields.bits.frame_mbs_only_flag << 2) |
593                   (mbaff_frame_flag << 1) |
594                   (pic_param->pic_fields.bits.field_pic_flag << 0));
595     OUT_BCS_BATCH(batch, 0);
596     OUT_BCS_BATCH(batch, 0);
597     OUT_BCS_BATCH(batch, 0);
598     OUT_BCS_BATCH(batch, 0);
599     OUT_BCS_BATCH(batch, 0);
600     OUT_BCS_BATCH(batch, 0);
601     OUT_BCS_BATCH(batch, 0);
602     OUT_BCS_BATCH(batch, 0);
603     OUT_BCS_BATCH(batch, 0);
604     OUT_BCS_BATCH(batch, 0);
605     OUT_BCS_BATCH(batch, 0);
606     OUT_BCS_BATCH(batch, 0);
607     ADVANCE_BCS_BATCH(batch);
608 }
609
610 static void
611 gen75_mfd_avc_qm_state(VADriverContextP ctx,
612                       struct decode_state *decode_state,
613                       struct gen7_mfd_context *gen7_mfd_context)
614 {
615     VAIQMatrixBufferH264 *iq_matrix;
616     VAPictureParameterBufferH264 *pic_param;
617
618     if (decode_state->iq_matrix && decode_state->iq_matrix->buffer)
619         iq_matrix = (VAIQMatrixBufferH264 *)decode_state->iq_matrix->buffer;
620     else
621         iq_matrix = &gen7_mfd_context->iq_matrix.h264;
622
623     assert(decode_state->pic_param && decode_state->pic_param->buffer);
624     pic_param = (VAPictureParameterBufferH264 *)decode_state->pic_param->buffer;
625
626     gen75_mfd_qm_state(ctx, MFX_QM_AVC_4X4_INTRA_MATRIX, &iq_matrix->ScalingList4x4[0][0], 3 * 16, gen7_mfd_context);
627     gen75_mfd_qm_state(ctx, MFX_QM_AVC_4X4_INTER_MATRIX, &iq_matrix->ScalingList4x4[3][0], 3 * 16, gen7_mfd_context);
628
629     if (pic_param->pic_fields.bits.transform_8x8_mode_flag) {
630         gen75_mfd_qm_state(ctx, MFX_QM_AVC_8x8_INTRA_MATRIX, &iq_matrix->ScalingList8x8[0][0], 64, gen7_mfd_context);
631         gen75_mfd_qm_state(ctx, MFX_QM_AVC_8x8_INTER_MATRIX, &iq_matrix->ScalingList8x8[1][0], 64, gen7_mfd_context);
632     }
633 }
634
635 static void
636 gen75_mfd_avc_picid_state(VADriverContextP ctx,
637                       struct decode_state *decode_state,
638                       struct gen7_mfd_context *gen7_mfd_context)
639 {
640     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
641
642     BEGIN_BCS_BATCH(batch, 10);
643     OUT_BCS_BATCH(batch, MFD_AVC_PICID_STATE | (10 - 2));
644     OUT_BCS_BATCH(batch, 1); // disable Picture ID Remapping
645     OUT_BCS_BATCH(batch, 0);
646     OUT_BCS_BATCH(batch, 0);
647     OUT_BCS_BATCH(batch, 0);
648     OUT_BCS_BATCH(batch, 0);
649     OUT_BCS_BATCH(batch, 0);
650     OUT_BCS_BATCH(batch, 0);
651     OUT_BCS_BATCH(batch, 0);
652     OUT_BCS_BATCH(batch, 0);
653     ADVANCE_BCS_BATCH(batch);
654 }
655
656 static void
657 gen75_mfd_avc_directmode_state_bplus(VADriverContextP ctx,
658                               struct decode_state *decode_state,
659                               VAPictureParameterBufferH264 *pic_param,
660                               VASliceParameterBufferH264 *slice_param,
661                               struct gen7_mfd_context *gen7_mfd_context)
662 {
663     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
664     struct object_surface *obj_surface;
665     GenAvcSurface *gen7_avc_surface;
666     VAPictureH264 *va_pic;
667     int i, j;
668
669     BEGIN_BCS_BATCH(batch, 71);
670     OUT_BCS_BATCH(batch, MFX_AVC_DIRECTMODE_STATE | (71 - 2));
671
672     /* reference surfaces 0..15 */
673     for (i = 0; i < ARRAY_ELEMS(gen7_mfd_context->reference_surface); i++) {
674         if (gen7_mfd_context->reference_surface[i].surface_id != VA_INVALID_ID &&
675             gen7_mfd_context->reference_surface[i].obj_surface &&
676             gen7_mfd_context->reference_surface[i].obj_surface->private_data) {
677
678             obj_surface = gen7_mfd_context->reference_surface[i].obj_surface;
679             gen7_avc_surface = obj_surface->private_data;
680             OUT_BCS_RELOC(batch, gen7_avc_surface->dmv_top,
681                           I915_GEM_DOMAIN_INSTRUCTION, 0,
682                           0);
683             OUT_BCS_BATCH(batch, 0);
684         } else {
685             OUT_BCS_BATCH(batch, 0);
686             OUT_BCS_BATCH(batch, 0);
687         }
688     }
689
690     OUT_BCS_BATCH(batch, 0);
691
692     /* the current decoding frame/field */
693     va_pic = &pic_param->CurrPic;
694     obj_surface = decode_state->render_object;
695     assert(obj_surface->bo && obj_surface->private_data);
696     gen7_avc_surface = obj_surface->private_data;
697
698     OUT_BCS_RELOC(batch, gen7_avc_surface->dmv_top,
699                   I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
700                   0);
701
702     OUT_BCS_BATCH(batch, 0);
703     OUT_BCS_BATCH(batch, 0);
704
705     /* POC List */
706     for (i = 0; i < ARRAY_ELEMS(gen7_mfd_context->reference_surface); i++) {
707         if (gen7_mfd_context->reference_surface[i].surface_id != VA_INVALID_ID) {
708             int found = 0;
709
710             assert(gen7_mfd_context->reference_surface[i].obj_surface != NULL);
711
712             for (j = 0; j < ARRAY_ELEMS(pic_param->ReferenceFrames); j++) {
713                 va_pic = &pic_param->ReferenceFrames[j];
714                 
715                 if (va_pic->flags & VA_PICTURE_H264_INVALID)
716                     continue;
717
718                 if (va_pic->picture_id == gen7_mfd_context->reference_surface[i].surface_id) {
719                     found = 1;
720                     break;
721                 }
722             }
723
724             assert(found == 1);
725             assert(!(va_pic->flags & VA_PICTURE_H264_INVALID));
726             
727             OUT_BCS_BATCH(batch, va_pic->TopFieldOrderCnt);
728             OUT_BCS_BATCH(batch, va_pic->BottomFieldOrderCnt);
729         } else {
730             OUT_BCS_BATCH(batch, 0);
731             OUT_BCS_BATCH(batch, 0);
732         }
733     }
734
735     va_pic = &pic_param->CurrPic;
736     OUT_BCS_BATCH(batch, va_pic->TopFieldOrderCnt);
737     OUT_BCS_BATCH(batch, va_pic->BottomFieldOrderCnt);
738
739     ADVANCE_BCS_BATCH(batch);
740 }
741
742 static void
743 gen75_mfd_avc_directmode_state(VADriverContextP ctx,
744                               struct decode_state *decode_state,
745                               VAPictureParameterBufferH264 *pic_param,
746                               VASliceParameterBufferH264 *slice_param,
747                               struct gen7_mfd_context *gen7_mfd_context)
748 {
749     struct i965_driver_data *i965 = i965_driver_data(ctx);
750     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
751     struct object_surface *obj_surface;
752     GenAvcSurface *gen7_avc_surface;
753     VAPictureH264 *va_pic;
754     int i, j;
755
756     if (IS_STEPPING_BPLUS(i965)) {
757         gen75_mfd_avc_directmode_state_bplus(ctx, decode_state, pic_param, slice_param,
758                                              gen7_mfd_context);
759
760         return;
761     }
762
763     BEGIN_BCS_BATCH(batch, 69);
764     OUT_BCS_BATCH(batch, MFX_AVC_DIRECTMODE_STATE | (69 - 2));
765
766     /* reference surfaces 0..15 */
767     for (i = 0; i < ARRAY_ELEMS(gen7_mfd_context->reference_surface); i++) {
768         if (gen7_mfd_context->reference_surface[i].surface_id != VA_INVALID_ID &&
769             gen7_mfd_context->reference_surface[i].obj_surface &&
770             gen7_mfd_context->reference_surface[i].obj_surface->private_data) {
771
772             obj_surface = gen7_mfd_context->reference_surface[i].obj_surface;
773             gen7_avc_surface = obj_surface->private_data;
774
775             OUT_BCS_RELOC(batch, gen7_avc_surface->dmv_top,
776                           I915_GEM_DOMAIN_INSTRUCTION, 0,
777                           0);
778
779             if (gen7_avc_surface->dmv_bottom_flag == 1)
780                 OUT_BCS_RELOC(batch, gen7_avc_surface->dmv_bottom,
781                               I915_GEM_DOMAIN_INSTRUCTION, 0,
782                               0);
783             else
784                 OUT_BCS_RELOC(batch, gen7_avc_surface->dmv_top,
785                               I915_GEM_DOMAIN_INSTRUCTION, 0,
786                               0);
787         } else {
788             OUT_BCS_BATCH(batch, 0);
789             OUT_BCS_BATCH(batch, 0);
790         }
791     }
792
793     /* the current decoding frame/field */
794     va_pic = &pic_param->CurrPic;
795     obj_surface = decode_state->render_object;
796     assert(obj_surface->bo && obj_surface->private_data);
797     gen7_avc_surface = obj_surface->private_data;
798
799     OUT_BCS_RELOC(batch, gen7_avc_surface->dmv_top,
800                   I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
801                   0);
802
803     if (gen7_avc_surface->dmv_bottom_flag == 1)
804         OUT_BCS_RELOC(batch, gen7_avc_surface->dmv_bottom,
805                       I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
806                       0);
807     else
808         OUT_BCS_RELOC(batch, gen7_avc_surface->dmv_top,
809                       I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
810                       0);
811
812     /* POC List */
813     for (i = 0; i < ARRAY_ELEMS(gen7_mfd_context->reference_surface); i++) {
814         if (gen7_mfd_context->reference_surface[i].surface_id != VA_INVALID_ID) {
815             int found = 0;
816
817             assert(gen7_mfd_context->reference_surface[i].obj_surface != NULL);
818
819             for (j = 0; j < ARRAY_ELEMS(pic_param->ReferenceFrames); j++) {
820                 va_pic = &pic_param->ReferenceFrames[j];
821                 
822                 if (va_pic->flags & VA_PICTURE_H264_INVALID)
823                     continue;
824
825                 if (va_pic->picture_id == gen7_mfd_context->reference_surface[i].surface_id) {
826                     found = 1;
827                     break;
828                 }
829             }
830
831             assert(found == 1);
832             assert(!(va_pic->flags & VA_PICTURE_H264_INVALID));
833             
834             OUT_BCS_BATCH(batch, va_pic->TopFieldOrderCnt);
835             OUT_BCS_BATCH(batch, va_pic->BottomFieldOrderCnt);
836         } else {
837             OUT_BCS_BATCH(batch, 0);
838             OUT_BCS_BATCH(batch, 0);
839         }
840     }
841
842     va_pic = &pic_param->CurrPic;
843     OUT_BCS_BATCH(batch, va_pic->TopFieldOrderCnt);
844     OUT_BCS_BATCH(batch, va_pic->BottomFieldOrderCnt);
845
846     ADVANCE_BCS_BATCH(batch);
847 }
848
849 static void
850 gen75_mfd_avc_slice_state(VADriverContextP ctx,
851                          VAPictureParameterBufferH264 *pic_param,
852                          VASliceParameterBufferH264 *slice_param,
853                          VASliceParameterBufferH264 *next_slice_param,
854                          struct gen7_mfd_context *gen7_mfd_context)
855 {
856     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
857     int width_in_mbs = pic_param->picture_width_in_mbs_minus1 + 1;
858     int height_in_mbs = pic_param->picture_height_in_mbs_minus1 + 1;
859     int slice_hor_pos, slice_ver_pos, next_slice_hor_pos, next_slice_ver_pos;
860     int num_ref_idx_l0, num_ref_idx_l1;
861     int mbaff_picture = (!pic_param->pic_fields.bits.field_pic_flag &&
862                          pic_param->seq_fields.bits.mb_adaptive_frame_field_flag);
863     int first_mb_in_slice = 0, first_mb_in_next_slice = 0;
864     int slice_type;
865
866     if (slice_param->slice_type == SLICE_TYPE_I ||
867         slice_param->slice_type == SLICE_TYPE_SI) {
868         slice_type = SLICE_TYPE_I;
869     } else if (slice_param->slice_type == SLICE_TYPE_P ||
870                slice_param->slice_type == SLICE_TYPE_SP) {
871         slice_type = SLICE_TYPE_P;
872     } else { 
873         assert(slice_param->slice_type == SLICE_TYPE_B);
874         slice_type = SLICE_TYPE_B;
875     }
876
877     if (slice_type == SLICE_TYPE_I) {
878         assert(slice_param->num_ref_idx_l0_active_minus1 == 0);
879         assert(slice_param->num_ref_idx_l1_active_minus1 == 0);
880         num_ref_idx_l0 = 0;
881         num_ref_idx_l1 = 0;
882     } else if (slice_type == SLICE_TYPE_P) {
883         assert(slice_param->num_ref_idx_l1_active_minus1 == 0);
884         num_ref_idx_l0 = slice_param->num_ref_idx_l0_active_minus1 + 1;
885         num_ref_idx_l1 = 0;
886     } else {
887         num_ref_idx_l0 = slice_param->num_ref_idx_l0_active_minus1 + 1;
888         num_ref_idx_l1 = slice_param->num_ref_idx_l1_active_minus1 + 1;
889     }
890
891     first_mb_in_slice = slice_param->first_mb_in_slice << mbaff_picture;
892     slice_hor_pos = first_mb_in_slice % width_in_mbs; 
893     slice_ver_pos = first_mb_in_slice / width_in_mbs;
894
895     if (next_slice_param) {
896         first_mb_in_next_slice = next_slice_param->first_mb_in_slice << mbaff_picture;
897         next_slice_hor_pos = first_mb_in_next_slice % width_in_mbs; 
898         next_slice_ver_pos = first_mb_in_next_slice / width_in_mbs;
899     } else {
900         next_slice_hor_pos = 0;
901         next_slice_ver_pos = height_in_mbs / (1 + !!pic_param->pic_fields.bits.field_pic_flag);
902     }
903
904     BEGIN_BCS_BATCH(batch, 11); /* FIXME: is it 10??? */
905     OUT_BCS_BATCH(batch, MFX_AVC_SLICE_STATE | (11 - 2));
906     OUT_BCS_BATCH(batch, slice_type);
907     OUT_BCS_BATCH(batch, 
908                   (num_ref_idx_l1 << 24) |
909                   (num_ref_idx_l0 << 16) |
910                   (slice_param->chroma_log2_weight_denom << 8) |
911                   (slice_param->luma_log2_weight_denom << 0));
912     OUT_BCS_BATCH(batch, 
913                   (slice_param->direct_spatial_mv_pred_flag << 29) |
914                   (slice_param->disable_deblocking_filter_idc << 27) |
915                   (slice_param->cabac_init_idc << 24) |
916                   ((pic_param->pic_init_qp_minus26 + 26 + slice_param->slice_qp_delta) << 16) |
917                   ((slice_param->slice_beta_offset_div2 & 0xf) << 8) |
918                   ((slice_param->slice_alpha_c0_offset_div2 & 0xf) << 0));
919     OUT_BCS_BATCH(batch, 
920                   (slice_ver_pos << 24) |
921                   (slice_hor_pos << 16) | 
922                   (first_mb_in_slice << 0));
923     OUT_BCS_BATCH(batch,
924                   (next_slice_ver_pos << 16) |
925                   (next_slice_hor_pos << 0));
926     OUT_BCS_BATCH(batch, 
927                   (next_slice_param == NULL) << 19); /* last slice flag */
928     OUT_BCS_BATCH(batch, 0);
929     OUT_BCS_BATCH(batch, 0);
930     OUT_BCS_BATCH(batch, 0);
931     OUT_BCS_BATCH(batch, 0);
932     ADVANCE_BCS_BATCH(batch);
933 }
934
935 static inline void
936 gen75_mfd_avc_ref_idx_state(VADriverContextP ctx,
937                            VAPictureParameterBufferH264 *pic_param,
938                            VASliceParameterBufferH264 *slice_param,
939                            struct gen7_mfd_context *gen7_mfd_context)
940 {
941     gen6_send_avc_ref_idx_state(
942         gen7_mfd_context->base.batch,
943         slice_param,
944         gen7_mfd_context->reference_surface
945     );
946 }
947
948 static void
949 gen75_mfd_avc_weightoffset_state(VADriverContextP ctx,
950                                 VAPictureParameterBufferH264 *pic_param,
951                                 VASliceParameterBufferH264 *slice_param,
952                                 struct gen7_mfd_context *gen7_mfd_context)
953 {
954     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
955     int i, j, num_weight_offset_table = 0;
956     short weightoffsets[32 * 6];
957
958     if ((slice_param->slice_type == SLICE_TYPE_P ||
959          slice_param->slice_type == SLICE_TYPE_SP) &&
960         (pic_param->pic_fields.bits.weighted_pred_flag == 1)) {
961         num_weight_offset_table = 1;
962     }
963     
964     if ((slice_param->slice_type == SLICE_TYPE_B) &&
965         (pic_param->pic_fields.bits.weighted_bipred_idc == 1)) {
966         num_weight_offset_table = 2;
967     }
968
969     for (i = 0; i < num_weight_offset_table; i++) {
970         BEGIN_BCS_BATCH(batch, 98);
971         OUT_BCS_BATCH(batch, MFX_AVC_WEIGHTOFFSET_STATE | (98 - 2));
972         OUT_BCS_BATCH(batch, i);
973
974         if (i == 0) {
975             for (j = 0; j < 32; j++) {
976                 weightoffsets[j * 6 + 0] = slice_param->luma_weight_l0[j];
977                 weightoffsets[j * 6 + 1] = slice_param->luma_offset_l0[j];
978                 weightoffsets[j * 6 + 2] = slice_param->chroma_weight_l0[j][0];
979                 weightoffsets[j * 6 + 3] = slice_param->chroma_offset_l0[j][0];
980                 weightoffsets[j * 6 + 4] = slice_param->chroma_weight_l0[j][1];
981                 weightoffsets[j * 6 + 5] = slice_param->chroma_offset_l0[j][1];
982             }
983         } else {
984             for (j = 0; j < 32; j++) {
985                 weightoffsets[j * 6 + 0] = slice_param->luma_weight_l1[j];
986                 weightoffsets[j * 6 + 1] = slice_param->luma_offset_l1[j];
987                 weightoffsets[j * 6 + 2] = slice_param->chroma_weight_l1[j][0];
988                 weightoffsets[j * 6 + 3] = slice_param->chroma_offset_l1[j][0];
989                 weightoffsets[j * 6 + 4] = slice_param->chroma_weight_l1[j][1];
990                 weightoffsets[j * 6 + 5] = slice_param->chroma_offset_l1[j][1];
991             }
992         }
993
994         intel_batchbuffer_data(batch, weightoffsets, sizeof(weightoffsets));
995         ADVANCE_BCS_BATCH(batch);
996     }
997 }
998
999 static void
1000 gen75_mfd_avc_bsd_object(VADriverContextP ctx,
1001                         VAPictureParameterBufferH264 *pic_param,
1002                         VASliceParameterBufferH264 *slice_param,
1003                         dri_bo *slice_data_bo,
1004                         VASliceParameterBufferH264 *next_slice_param,
1005                         struct gen7_mfd_context *gen7_mfd_context)
1006 {
1007     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
1008     int slice_data_bit_offset = avc_get_first_mb_bit_offset(slice_data_bo,
1009                                                             slice_param,
1010                                                             pic_param->pic_fields.bits.entropy_coding_mode_flag);
1011
1012     /* the input bitsteam format on GEN7 differs from GEN6 */
1013     BEGIN_BCS_BATCH(batch, 6);
1014     OUT_BCS_BATCH(batch, MFD_AVC_BSD_OBJECT | (6 - 2));
1015     OUT_BCS_BATCH(batch, 
1016                   (slice_param->slice_data_size - slice_param->slice_data_offset));
1017     OUT_BCS_BATCH(batch, slice_param->slice_data_offset);
1018     OUT_BCS_BATCH(batch,
1019                   (0 << 31) |
1020                   (0 << 14) |
1021                   (0 << 12) |
1022                   (0 << 10) |
1023                   (0 << 8));
1024     OUT_BCS_BATCH(batch,
1025                   ((slice_data_bit_offset >> 3) << 16) |
1026                   (1 << 7)  |
1027                   (0 << 5)  |
1028                   (0 << 4)  |
1029                   ((next_slice_param == NULL) << 3) | /* LastSlice Flag */
1030                   (slice_data_bit_offset & 0x7));
1031     OUT_BCS_BATCH(batch, 0);
1032     ADVANCE_BCS_BATCH(batch);
1033 }
1034
1035 static inline void
1036 gen75_mfd_avc_context_init(
1037     VADriverContextP         ctx,
1038     struct gen7_mfd_context *gen7_mfd_context
1039 )
1040 {
1041     /* Initialize flat scaling lists */
1042     avc_gen_default_iq_matrix(&gen7_mfd_context->iq_matrix.h264);
1043 }
1044
1045 static void
1046 gen75_mfd_avc_decode_init(VADriverContextP ctx,
1047                          struct decode_state *decode_state,
1048                          struct gen7_mfd_context *gen7_mfd_context)
1049 {
1050     VAPictureParameterBufferH264 *pic_param;
1051     VASliceParameterBufferH264 *slice_param;
1052     struct i965_driver_data *i965 = i965_driver_data(ctx);
1053     struct object_surface *obj_surface;
1054     dri_bo *bo;
1055     int i, j, enable_avc_ildb = 0;
1056     unsigned int width_in_mbs, height_in_mbs;
1057
1058     for (j = 0; j < decode_state->num_slice_params && enable_avc_ildb == 0; j++) {
1059         assert(decode_state->slice_params && decode_state->slice_params[j]->buffer);
1060         slice_param = (VASliceParameterBufferH264 *)decode_state->slice_params[j]->buffer;
1061
1062         for (i = 0; i < decode_state->slice_params[j]->num_elements; i++) {
1063             assert(slice_param->slice_data_flag == VA_SLICE_DATA_FLAG_ALL);
1064             assert((slice_param->slice_type == SLICE_TYPE_I) ||
1065                    (slice_param->slice_type == SLICE_TYPE_SI) ||
1066                    (slice_param->slice_type == SLICE_TYPE_P) ||
1067                    (slice_param->slice_type == SLICE_TYPE_SP) ||
1068                    (slice_param->slice_type == SLICE_TYPE_B));
1069
1070             if (slice_param->disable_deblocking_filter_idc != 1) {
1071                 enable_avc_ildb = 1;
1072                 break;
1073             }
1074
1075             slice_param++;
1076         }
1077     }
1078
1079     assert(decode_state->pic_param && decode_state->pic_param->buffer);
1080     pic_param = (VAPictureParameterBufferH264 *)decode_state->pic_param->buffer;
1081     intel_update_avc_frame_store_index(ctx, decode_state, pic_param, gen7_mfd_context->reference_surface);
1082     width_in_mbs = pic_param->picture_width_in_mbs_minus1 + 1;
1083     height_in_mbs = pic_param->picture_height_in_mbs_minus1 + 1;
1084     assert(width_in_mbs > 0 && width_in_mbs <= 256); /* 4K */
1085     assert(height_in_mbs > 0 && height_in_mbs <= 256);
1086
1087     /* Current decoded picture */
1088     obj_surface = decode_state->render_object;
1089     obj_surface->flags &= ~SURFACE_REF_DIS_MASK;
1090     obj_surface->flags |= (pic_param->pic_fields.bits.reference_pic_flag ? SURFACE_REFERENCED : 0);
1091
1092     avc_ensure_surface_bo(ctx, decode_state, obj_surface, pic_param);
1093     gen75_mfd_init_avc_surface(ctx, pic_param, obj_surface);
1094
1095     dri_bo_unreference(gen7_mfd_context->post_deblocking_output.bo);
1096     gen7_mfd_context->post_deblocking_output.bo = obj_surface->bo;
1097     dri_bo_reference(gen7_mfd_context->post_deblocking_output.bo);
1098     gen7_mfd_context->post_deblocking_output.valid = enable_avc_ildb;
1099
1100     dri_bo_unreference(gen7_mfd_context->pre_deblocking_output.bo);
1101     gen7_mfd_context->pre_deblocking_output.bo = obj_surface->bo;
1102     dri_bo_reference(gen7_mfd_context->pre_deblocking_output.bo);
1103     gen7_mfd_context->pre_deblocking_output.valid = !enable_avc_ildb;
1104
1105     dri_bo_unreference(gen7_mfd_context->intra_row_store_scratch_buffer.bo);
1106     bo = dri_bo_alloc(i965->intel.bufmgr,
1107                       "intra row store",
1108                       width_in_mbs * 64,
1109                       0x1000);
1110     assert(bo);
1111     gen7_mfd_context->intra_row_store_scratch_buffer.bo = bo;
1112     gen7_mfd_context->intra_row_store_scratch_buffer.valid = 1;
1113
1114     dri_bo_unreference(gen7_mfd_context->deblocking_filter_row_store_scratch_buffer.bo);
1115     bo = dri_bo_alloc(i965->intel.bufmgr,
1116                       "deblocking filter row store",
1117                       width_in_mbs * 64 * 4,
1118                       0x1000);
1119     assert(bo);
1120     gen7_mfd_context->deblocking_filter_row_store_scratch_buffer.bo = bo;
1121     gen7_mfd_context->deblocking_filter_row_store_scratch_buffer.valid = 1;
1122
1123     dri_bo_unreference(gen7_mfd_context->bsd_mpc_row_store_scratch_buffer.bo);
1124     bo = dri_bo_alloc(i965->intel.bufmgr,
1125                       "bsd mpc row store",
1126                       width_in_mbs * 64 * 2,
1127                       0x1000);
1128     assert(bo);
1129     gen7_mfd_context->bsd_mpc_row_store_scratch_buffer.bo = bo;
1130     gen7_mfd_context->bsd_mpc_row_store_scratch_buffer.valid = 1;
1131
1132     dri_bo_unreference(gen7_mfd_context->mpr_row_store_scratch_buffer.bo);
1133     bo = dri_bo_alloc(i965->intel.bufmgr,
1134                       "mpr row store",
1135                       width_in_mbs * 64 * 2,
1136                       0x1000);
1137     assert(bo);
1138     gen7_mfd_context->mpr_row_store_scratch_buffer.bo = bo;
1139     gen7_mfd_context->mpr_row_store_scratch_buffer.valid = 1;
1140
1141     gen7_mfd_context->bitplane_read_buffer.valid = 0;
1142 }
1143
1144 static void
1145 gen75_mfd_avc_decode_picture(VADriverContextP ctx,
1146                             struct decode_state *decode_state,
1147                             struct gen7_mfd_context *gen7_mfd_context)
1148 {
1149     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
1150     VAPictureParameterBufferH264 *pic_param;
1151     VASliceParameterBufferH264 *slice_param, *next_slice_param, *next_slice_group_param;
1152     dri_bo *slice_data_bo;
1153     int i, j;
1154
1155     assert(decode_state->pic_param && decode_state->pic_param->buffer);
1156     pic_param = (VAPictureParameterBufferH264 *)decode_state->pic_param->buffer;
1157     gen75_mfd_avc_decode_init(ctx, decode_state, gen7_mfd_context);
1158
1159     intel_batchbuffer_start_atomic_bcs(batch, 0x1000);
1160     intel_batchbuffer_emit_mi_flush(batch);
1161     gen75_mfd_pipe_mode_select(ctx, decode_state, MFX_FORMAT_AVC, gen7_mfd_context);
1162     gen75_mfd_surface_state(ctx, decode_state, MFX_FORMAT_AVC, gen7_mfd_context);
1163     gen75_mfd_pipe_buf_addr_state(ctx, decode_state, MFX_FORMAT_AVC, gen7_mfd_context);
1164     gen75_mfd_bsp_buf_base_addr_state(ctx, decode_state, MFX_FORMAT_AVC, gen7_mfd_context);
1165     gen75_mfd_avc_qm_state(ctx, decode_state, gen7_mfd_context);
1166     gen75_mfd_avc_img_state(ctx, decode_state, gen7_mfd_context);
1167     gen75_mfd_avc_picid_state(ctx, decode_state, gen7_mfd_context);
1168
1169     for (j = 0; j < decode_state->num_slice_params; j++) {
1170         assert(decode_state->slice_params && decode_state->slice_params[j]->buffer);
1171         slice_param = (VASliceParameterBufferH264 *)decode_state->slice_params[j]->buffer;
1172         slice_data_bo = decode_state->slice_datas[j]->bo;
1173         gen75_mfd_ind_obj_base_addr_state(ctx, slice_data_bo, MFX_FORMAT_AVC, gen7_mfd_context);
1174
1175         if (j == decode_state->num_slice_params - 1)
1176             next_slice_group_param = NULL;
1177         else
1178             next_slice_group_param = (VASliceParameterBufferH264 *)decode_state->slice_params[j + 1]->buffer;
1179
1180         for (i = 0; i < decode_state->slice_params[j]->num_elements; i++) {
1181             assert(slice_param->slice_data_flag == VA_SLICE_DATA_FLAG_ALL);
1182             assert((slice_param->slice_type == SLICE_TYPE_I) ||
1183                    (slice_param->slice_type == SLICE_TYPE_SI) ||
1184                    (slice_param->slice_type == SLICE_TYPE_P) ||
1185                    (slice_param->slice_type == SLICE_TYPE_SP) ||
1186                    (slice_param->slice_type == SLICE_TYPE_B));
1187
1188             if (i < decode_state->slice_params[j]->num_elements - 1)
1189                 next_slice_param = slice_param + 1;
1190             else
1191                 next_slice_param = next_slice_group_param;
1192
1193             gen75_mfd_avc_directmode_state(ctx, decode_state, pic_param, slice_param, gen7_mfd_context);
1194             gen75_mfd_avc_ref_idx_state(ctx, pic_param, slice_param, gen7_mfd_context);
1195             gen75_mfd_avc_weightoffset_state(ctx, pic_param, slice_param, gen7_mfd_context);
1196             gen75_mfd_avc_slice_state(ctx, pic_param, slice_param, next_slice_param, gen7_mfd_context);
1197             gen75_mfd_avc_bsd_object(ctx, pic_param, slice_param, slice_data_bo, next_slice_param, gen7_mfd_context);
1198             slice_param++;
1199         }
1200     }
1201
1202     intel_batchbuffer_end_atomic(batch);
1203     intel_batchbuffer_flush(batch);
1204 }
1205
1206 static void
1207 gen75_mfd_mpeg2_decode_init(VADriverContextP ctx,
1208                            struct decode_state *decode_state,
1209                            struct gen7_mfd_context *gen7_mfd_context)
1210 {
1211     VAPictureParameterBufferMPEG2 *pic_param;
1212     struct i965_driver_data *i965 = i965_driver_data(ctx);
1213     struct object_surface *obj_surface;
1214     dri_bo *bo;
1215     unsigned int width_in_mbs;
1216
1217     assert(decode_state->pic_param && decode_state->pic_param->buffer);
1218     pic_param = (VAPictureParameterBufferMPEG2 *)decode_state->pic_param->buffer;
1219     width_in_mbs = ALIGN(pic_param->horizontal_size, 16) / 16;
1220
1221     mpeg2_set_reference_surfaces(
1222         ctx,
1223         gen7_mfd_context->reference_surface,
1224         decode_state,
1225         pic_param
1226     );
1227
1228     /* Current decoded picture */
1229     obj_surface = decode_state->render_object;
1230     i965_check_alloc_surface_bo(ctx, obj_surface, 1, VA_FOURCC_NV12, SUBSAMPLE_YUV420);
1231
1232     dri_bo_unreference(gen7_mfd_context->pre_deblocking_output.bo);
1233     gen7_mfd_context->pre_deblocking_output.bo = obj_surface->bo;
1234     dri_bo_reference(gen7_mfd_context->pre_deblocking_output.bo);
1235     gen7_mfd_context->pre_deblocking_output.valid = 1;
1236
1237     dri_bo_unreference(gen7_mfd_context->bsd_mpc_row_store_scratch_buffer.bo);
1238     bo = dri_bo_alloc(i965->intel.bufmgr,
1239                       "bsd mpc row store",
1240                       width_in_mbs * 96,
1241                       0x1000);
1242     assert(bo);
1243     gen7_mfd_context->bsd_mpc_row_store_scratch_buffer.bo = bo;
1244     gen7_mfd_context->bsd_mpc_row_store_scratch_buffer.valid = 1;
1245
1246     gen7_mfd_context->post_deblocking_output.valid = 0;
1247     gen7_mfd_context->intra_row_store_scratch_buffer.valid = 0;
1248     gen7_mfd_context->deblocking_filter_row_store_scratch_buffer.valid = 0;
1249     gen7_mfd_context->mpr_row_store_scratch_buffer.valid = 0;
1250     gen7_mfd_context->bitplane_read_buffer.valid = 0;
1251 }
1252
1253 static void
1254 gen75_mfd_mpeg2_pic_state(VADriverContextP ctx,
1255                          struct decode_state *decode_state,
1256                          struct gen7_mfd_context *gen7_mfd_context)
1257 {
1258     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
1259     VAPictureParameterBufferMPEG2 *pic_param;
1260     unsigned int slice_concealment_disable_bit = 0;
1261
1262     assert(decode_state->pic_param && decode_state->pic_param->buffer);
1263     pic_param = (VAPictureParameterBufferMPEG2 *)decode_state->pic_param->buffer;
1264
1265     slice_concealment_disable_bit = 1;
1266
1267     BEGIN_BCS_BATCH(batch, 13);
1268     OUT_BCS_BATCH(batch, MFX_MPEG2_PIC_STATE | (13 - 2));
1269     OUT_BCS_BATCH(batch,
1270                   (pic_param->f_code & 0xf) << 28 | /* f_code[1][1] */
1271                   ((pic_param->f_code >> 4) & 0xf) << 24 | /* f_code[1][0] */
1272                   ((pic_param->f_code >> 8) & 0xf) << 20 | /* f_code[0][1] */
1273                   ((pic_param->f_code >> 12) & 0xf) << 16 | /* f_code[0][0] */
1274                   pic_param->picture_coding_extension.bits.intra_dc_precision << 14 |
1275                   pic_param->picture_coding_extension.bits.picture_structure << 12 |
1276                   pic_param->picture_coding_extension.bits.top_field_first << 11 |
1277                   pic_param->picture_coding_extension.bits.frame_pred_frame_dct << 10 |
1278                   pic_param->picture_coding_extension.bits.concealment_motion_vectors << 9 |
1279                   pic_param->picture_coding_extension.bits.q_scale_type << 8 |
1280                   pic_param->picture_coding_extension.bits.intra_vlc_format << 7 | 
1281                   pic_param->picture_coding_extension.bits.alternate_scan << 6);
1282     OUT_BCS_BATCH(batch,
1283                   pic_param->picture_coding_type << 9);
1284     OUT_BCS_BATCH(batch,
1285                   (slice_concealment_disable_bit << 31) |
1286                   ((ALIGN(pic_param->vertical_size, 16) / 16) - 1) << 16 |
1287                   ((ALIGN(pic_param->horizontal_size, 16) / 16) - 1));
1288     OUT_BCS_BATCH(batch, 0);
1289     OUT_BCS_BATCH(batch, 0);
1290     OUT_BCS_BATCH(batch, 0);
1291     OUT_BCS_BATCH(batch, 0);
1292     OUT_BCS_BATCH(batch, 0);
1293     OUT_BCS_BATCH(batch, 0);
1294     OUT_BCS_BATCH(batch, 0);
1295     OUT_BCS_BATCH(batch, 0);
1296     OUT_BCS_BATCH(batch, 0);
1297     ADVANCE_BCS_BATCH(batch);
1298 }
1299
1300 static void
1301 gen75_mfd_mpeg2_qm_state(VADriverContextP ctx,
1302                         struct decode_state *decode_state,
1303                         struct gen7_mfd_context *gen7_mfd_context)
1304 {
1305     VAIQMatrixBufferMPEG2 * const gen_iq_matrix = &gen7_mfd_context->iq_matrix.mpeg2;
1306     int i, j;
1307
1308     /* Update internal QM state */
1309     if (decode_state->iq_matrix && decode_state->iq_matrix->buffer) {
1310         VAIQMatrixBufferMPEG2 * const iq_matrix =
1311             (VAIQMatrixBufferMPEG2 *)decode_state->iq_matrix->buffer;
1312
1313         if (gen_iq_matrix->load_intra_quantiser_matrix == -1 ||
1314             iq_matrix->load_intra_quantiser_matrix) {
1315             gen_iq_matrix->load_intra_quantiser_matrix =
1316                 iq_matrix->load_intra_quantiser_matrix;
1317             if (iq_matrix->load_intra_quantiser_matrix) {
1318                 for (j = 0; j < 64; j++)
1319                     gen_iq_matrix->intra_quantiser_matrix[zigzag_direct[j]] =
1320                         iq_matrix->intra_quantiser_matrix[j];
1321             }
1322         }
1323
1324         if (gen_iq_matrix->load_non_intra_quantiser_matrix == -1 ||
1325             iq_matrix->load_non_intra_quantiser_matrix) {
1326             gen_iq_matrix->load_non_intra_quantiser_matrix =
1327                 iq_matrix->load_non_intra_quantiser_matrix;
1328             if (iq_matrix->load_non_intra_quantiser_matrix) {
1329                 for (j = 0; j < 64; j++)
1330                     gen_iq_matrix->non_intra_quantiser_matrix[zigzag_direct[j]] =
1331                         iq_matrix->non_intra_quantiser_matrix[j];
1332             }
1333         }
1334     }
1335
1336     /* Commit QM state to HW */
1337     for (i = 0; i < 2; i++) {
1338         unsigned char *qm = NULL;
1339         int qm_type;
1340
1341         if (i == 0) {
1342             if (gen_iq_matrix->load_intra_quantiser_matrix) {
1343                 qm = gen_iq_matrix->intra_quantiser_matrix;
1344                 qm_type = MFX_QM_MPEG_INTRA_QUANTIZER_MATRIX;
1345             }
1346         } else {
1347             if (gen_iq_matrix->load_non_intra_quantiser_matrix) {
1348                 qm = gen_iq_matrix->non_intra_quantiser_matrix;
1349                 qm_type = MFX_QM_MPEG_NON_INTRA_QUANTIZER_MATRIX;
1350             }
1351         }
1352
1353         if (!qm)
1354             continue;
1355
1356         gen75_mfd_qm_state(ctx, qm_type, qm, 64, gen7_mfd_context);
1357     }
1358 }
1359
1360 static void
1361 gen75_mfd_mpeg2_bsd_object(VADriverContextP ctx,
1362                           VAPictureParameterBufferMPEG2 *pic_param,
1363                           VASliceParameterBufferMPEG2 *slice_param,
1364                           VASliceParameterBufferMPEG2 *next_slice_param,
1365                           struct gen7_mfd_context *gen7_mfd_context)
1366 {
1367     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
1368     unsigned int width_in_mbs = ALIGN(pic_param->horizontal_size, 16) / 16;
1369     int mb_count, vpos0, hpos0, vpos1, hpos1, is_field_pic_wa, is_field_pic = 0;
1370
1371     if (pic_param->picture_coding_extension.bits.picture_structure == MPEG_TOP_FIELD ||
1372         pic_param->picture_coding_extension.bits.picture_structure == MPEG_BOTTOM_FIELD)
1373         is_field_pic = 1;
1374     is_field_pic_wa = is_field_pic &&
1375         gen7_mfd_context->wa_mpeg2_slice_vertical_position > 0;
1376
1377     vpos0 = slice_param->slice_vertical_position / (1 + is_field_pic_wa);
1378     hpos0 = slice_param->slice_horizontal_position;
1379
1380     if (next_slice_param == NULL) {
1381         vpos1 = ALIGN(pic_param->vertical_size, 16) / 16 / (1 + is_field_pic);
1382         hpos1 = 0;
1383     } else {
1384         vpos1 = next_slice_param->slice_vertical_position / (1 + is_field_pic_wa);
1385         hpos1 = next_slice_param->slice_horizontal_position;
1386     }
1387
1388     mb_count = (vpos1 * width_in_mbs + hpos1) - (vpos0 * width_in_mbs + hpos0);
1389
1390     BEGIN_BCS_BATCH(batch, 5);
1391     OUT_BCS_BATCH(batch, MFD_MPEG2_BSD_OBJECT | (5 - 2));
1392     OUT_BCS_BATCH(batch, 
1393                   slice_param->slice_data_size - (slice_param->macroblock_offset >> 3));
1394     OUT_BCS_BATCH(batch, 
1395                   slice_param->slice_data_offset + (slice_param->macroblock_offset >> 3));
1396     OUT_BCS_BATCH(batch,
1397                   hpos0 << 24 |
1398                   vpos0 << 16 |
1399                   mb_count << 8 |
1400                   (next_slice_param == NULL) << 5 |
1401                   (next_slice_param == NULL) << 3 |
1402                   (slice_param->macroblock_offset & 0x7));
1403     OUT_BCS_BATCH(batch,
1404                   (slice_param->quantiser_scale_code << 24) |
1405                   (vpos1 << 8 | hpos1));
1406     ADVANCE_BCS_BATCH(batch);
1407 }
1408
1409 static void
1410 gen75_mfd_mpeg2_decode_picture(VADriverContextP ctx,
1411                               struct decode_state *decode_state,
1412                               struct gen7_mfd_context *gen7_mfd_context)
1413 {
1414     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
1415     VAPictureParameterBufferMPEG2 *pic_param;
1416     VASliceParameterBufferMPEG2 *slice_param, *next_slice_param, *next_slice_group_param;
1417     dri_bo *slice_data_bo;
1418     int i, j;
1419
1420     assert(decode_state->pic_param && decode_state->pic_param->buffer);
1421     pic_param = (VAPictureParameterBufferMPEG2 *)decode_state->pic_param->buffer;
1422
1423     gen75_mfd_mpeg2_decode_init(ctx, decode_state, gen7_mfd_context);
1424     intel_batchbuffer_start_atomic_bcs(batch, 0x1000);
1425     intel_batchbuffer_emit_mi_flush(batch);
1426     gen75_mfd_pipe_mode_select(ctx, decode_state, MFX_FORMAT_MPEG2, gen7_mfd_context);
1427     gen75_mfd_surface_state(ctx, decode_state, MFX_FORMAT_MPEG2, gen7_mfd_context);
1428     gen75_mfd_pipe_buf_addr_state(ctx, decode_state, MFX_FORMAT_MPEG2, gen7_mfd_context);
1429     gen75_mfd_bsp_buf_base_addr_state(ctx, decode_state, MFX_FORMAT_MPEG2, gen7_mfd_context);
1430     gen75_mfd_mpeg2_pic_state(ctx, decode_state, gen7_mfd_context);
1431     gen75_mfd_mpeg2_qm_state(ctx, decode_state, gen7_mfd_context);
1432
1433     if (gen7_mfd_context->wa_mpeg2_slice_vertical_position < 0)
1434         gen7_mfd_context->wa_mpeg2_slice_vertical_position =
1435             mpeg2_wa_slice_vertical_position(decode_state, pic_param);
1436
1437     for (j = 0; j < decode_state->num_slice_params; j++) {
1438         assert(decode_state->slice_params && decode_state->slice_params[j]->buffer);
1439         slice_param = (VASliceParameterBufferMPEG2 *)decode_state->slice_params[j]->buffer;
1440         slice_data_bo = decode_state->slice_datas[j]->bo;
1441         gen75_mfd_ind_obj_base_addr_state(ctx, slice_data_bo, MFX_FORMAT_MPEG2, gen7_mfd_context);
1442
1443         if (j == decode_state->num_slice_params - 1)
1444             next_slice_group_param = NULL;
1445         else
1446             next_slice_group_param = (VASliceParameterBufferMPEG2 *)decode_state->slice_params[j + 1]->buffer;
1447
1448         for (i = 0; i < decode_state->slice_params[j]->num_elements; i++) {
1449             assert(slice_param->slice_data_flag == VA_SLICE_DATA_FLAG_ALL);
1450
1451             if (i < decode_state->slice_params[j]->num_elements - 1)
1452                 next_slice_param = slice_param + 1;
1453             else
1454                 next_slice_param = next_slice_group_param;
1455
1456             gen75_mfd_mpeg2_bsd_object(ctx, pic_param, slice_param, next_slice_param, gen7_mfd_context);
1457             slice_param++;
1458         }
1459     }
1460
1461     intel_batchbuffer_end_atomic(batch);
1462     intel_batchbuffer_flush(batch);
1463 }
1464
1465 static const int va_to_gen7_vc1_pic_type[5] = {
1466     GEN7_VC1_I_PICTURE,
1467     GEN7_VC1_P_PICTURE,
1468     GEN7_VC1_B_PICTURE,
1469     GEN7_VC1_BI_PICTURE,
1470     GEN7_VC1_P_PICTURE,
1471 };
1472
1473 static const int va_to_gen7_vc1_mv[4] = {
1474     1, /* 1-MV */
1475     2, /* 1-MV half-pel */
1476     3, /* 1-MV half-pef bilinear */
1477     0, /* Mixed MV */
1478 };
1479
1480 static const int b_picture_scale_factor[21] = {
1481     128, 85,  170, 64,  192,
1482     51,  102, 153, 204, 43,
1483     215, 37,  74,  111, 148,
1484     185, 222, 32,  96,  160, 
1485     224,
1486 };
1487
1488 static const int va_to_gen7_vc1_condover[3] = {
1489     0,
1490     2,
1491     3
1492 };
1493
1494 static const int va_to_gen7_vc1_profile[4] = {
1495     GEN7_VC1_SIMPLE_PROFILE,
1496     GEN7_VC1_MAIN_PROFILE,
1497     GEN7_VC1_RESERVED_PROFILE,
1498     GEN7_VC1_ADVANCED_PROFILE
1499 };
1500
1501 static void 
1502 gen75_mfd_free_vc1_surface(void **data)
1503 {
1504     struct gen7_vc1_surface *gen7_vc1_surface = *data;
1505
1506     if (!gen7_vc1_surface)
1507         return;
1508
1509     dri_bo_unreference(gen7_vc1_surface->dmv);
1510     free(gen7_vc1_surface);
1511     *data = NULL;
1512 }
1513
1514 static void
1515 gen75_mfd_init_vc1_surface(VADriverContextP ctx, 
1516                           VAPictureParameterBufferVC1 *pic_param,
1517                           struct object_surface *obj_surface)
1518 {
1519     struct i965_driver_data *i965 = i965_driver_data(ctx);
1520     struct gen7_vc1_surface *gen7_vc1_surface = obj_surface->private_data;
1521     int width_in_mbs = ALIGN(pic_param->coded_width, 16) / 16;
1522     int height_in_mbs = ALIGN(pic_param->coded_height, 16) / 16;
1523
1524     obj_surface->free_private_data = gen75_mfd_free_vc1_surface;
1525
1526     if (!gen7_vc1_surface) {
1527         gen7_vc1_surface = calloc(sizeof(struct gen7_vc1_surface), 1);
1528         assert((obj_surface->size & 0x3f) == 0);
1529         obj_surface->private_data = gen7_vc1_surface;
1530     }
1531
1532     gen7_vc1_surface->picture_type = pic_param->picture_fields.bits.picture_type;
1533
1534     if (gen7_vc1_surface->dmv == NULL) {
1535         gen7_vc1_surface->dmv = dri_bo_alloc(i965->intel.bufmgr,
1536                                              "direct mv w/r buffer",
1537                                              width_in_mbs * height_in_mbs * 64,
1538                                              0x1000);
1539     }
1540 }
1541
1542 static void
1543 gen75_mfd_vc1_decode_init(VADriverContextP ctx,
1544                          struct decode_state *decode_state,
1545                          struct gen7_mfd_context *gen7_mfd_context)
1546 {
1547     VAPictureParameterBufferVC1 *pic_param;
1548     struct i965_driver_data *i965 = i965_driver_data(ctx);
1549     struct object_surface *obj_surface;
1550     dri_bo *bo;
1551     int width_in_mbs;
1552     int picture_type;
1553
1554     assert(decode_state->pic_param && decode_state->pic_param->buffer);
1555     pic_param = (VAPictureParameterBufferVC1 *)decode_state->pic_param->buffer;
1556     width_in_mbs = ALIGN(pic_param->coded_width, 16) / 16;
1557     picture_type = pic_param->picture_fields.bits.picture_type;
1558  
1559     intel_update_vc1_frame_store_index(ctx,
1560                                        decode_state,
1561                                        pic_param,
1562                                        gen7_mfd_context->reference_surface);
1563
1564     /* Current decoded picture */
1565     obj_surface = decode_state->render_object;
1566     i965_check_alloc_surface_bo(ctx, obj_surface, 1, VA_FOURCC_NV12, SUBSAMPLE_YUV420);
1567     gen75_mfd_init_vc1_surface(ctx, pic_param, obj_surface);
1568
1569     dri_bo_unreference(gen7_mfd_context->post_deblocking_output.bo);
1570     gen7_mfd_context->post_deblocking_output.bo = obj_surface->bo;
1571     dri_bo_reference(gen7_mfd_context->post_deblocking_output.bo);
1572     gen7_mfd_context->post_deblocking_output.valid = pic_param->entrypoint_fields.bits.loopfilter;
1573
1574     dri_bo_unreference(gen7_mfd_context->pre_deblocking_output.bo);
1575     gen7_mfd_context->pre_deblocking_output.bo = obj_surface->bo;
1576     dri_bo_reference(gen7_mfd_context->pre_deblocking_output.bo);
1577     gen7_mfd_context->pre_deblocking_output.valid = !pic_param->entrypoint_fields.bits.loopfilter;
1578
1579     dri_bo_unreference(gen7_mfd_context->intra_row_store_scratch_buffer.bo);
1580     bo = dri_bo_alloc(i965->intel.bufmgr,
1581                       "intra row store",
1582                       width_in_mbs * 64,
1583                       0x1000);
1584     assert(bo);
1585     gen7_mfd_context->intra_row_store_scratch_buffer.bo = bo;
1586     gen7_mfd_context->intra_row_store_scratch_buffer.valid = 1;
1587
1588     dri_bo_unreference(gen7_mfd_context->deblocking_filter_row_store_scratch_buffer.bo);
1589     bo = dri_bo_alloc(i965->intel.bufmgr,
1590                       "deblocking filter row store",
1591                       width_in_mbs * 7 * 64,
1592                       0x1000);
1593     assert(bo);
1594     gen7_mfd_context->deblocking_filter_row_store_scratch_buffer.bo = bo;
1595     gen7_mfd_context->deblocking_filter_row_store_scratch_buffer.valid = 1;
1596
1597     dri_bo_unreference(gen7_mfd_context->bsd_mpc_row_store_scratch_buffer.bo);
1598     bo = dri_bo_alloc(i965->intel.bufmgr,
1599                       "bsd mpc row store",
1600                       width_in_mbs * 96,
1601                       0x1000);
1602     assert(bo);
1603     gen7_mfd_context->bsd_mpc_row_store_scratch_buffer.bo = bo;
1604     gen7_mfd_context->bsd_mpc_row_store_scratch_buffer.valid = 1;
1605
1606     gen7_mfd_context->mpr_row_store_scratch_buffer.valid = 0;
1607
1608     gen7_mfd_context->bitplane_read_buffer.valid = !!pic_param->bitplane_present.value;
1609     dri_bo_unreference(gen7_mfd_context->bitplane_read_buffer.bo);
1610     
1611     if (gen7_mfd_context->bitplane_read_buffer.valid) {
1612         int width_in_mbs = ALIGN(pic_param->coded_width, 16) / 16;
1613         int height_in_mbs = ALIGN(pic_param->coded_height, 16) / 16;
1614         int bitplane_width = ALIGN(width_in_mbs, 2) / 2;
1615         int src_w, src_h;
1616         uint8_t *src = NULL, *dst = NULL;
1617
1618         assert(decode_state->bit_plane->buffer);
1619         src = decode_state->bit_plane->buffer;
1620
1621         bo = dri_bo_alloc(i965->intel.bufmgr,
1622                           "VC-1 Bitplane",
1623                           bitplane_width * height_in_mbs,
1624                           0x1000);
1625         assert(bo);
1626         gen7_mfd_context->bitplane_read_buffer.bo = bo;
1627
1628         dri_bo_map(bo, True);
1629         assert(bo->virtual);
1630         dst = bo->virtual;
1631
1632         for (src_h = 0; src_h < height_in_mbs; src_h++) {
1633             for(src_w = 0; src_w < width_in_mbs; src_w++) {
1634                 int src_index, dst_index;
1635                 int src_shift;
1636                 uint8_t src_value;
1637
1638                 src_index = (src_h * width_in_mbs + src_w) / 2;
1639                 src_shift = !((src_h * width_in_mbs + src_w) & 1) * 4;
1640                 src_value = ((src[src_index] >> src_shift) & 0xf);
1641
1642                 if (picture_type == GEN7_VC1_SKIPPED_PICTURE){
1643                     src_value |= 0x2;
1644                 }
1645
1646                 dst_index = src_w / 2;
1647                 dst[dst_index] = ((dst[dst_index] >> 4) | (src_value << 4));
1648             }
1649
1650             if (src_w & 1)
1651                 dst[src_w / 2] >>= 4;
1652
1653             dst += bitplane_width;
1654         }
1655
1656         dri_bo_unmap(bo);
1657     } else
1658         gen7_mfd_context->bitplane_read_buffer.bo = NULL;
1659 }
1660
1661 static void
1662 gen75_mfd_vc1_pic_state(VADriverContextP ctx,
1663                        struct decode_state *decode_state,
1664                        struct gen7_mfd_context *gen7_mfd_context)
1665 {
1666     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
1667     VAPictureParameterBufferVC1 *pic_param;
1668     struct object_surface *obj_surface;
1669     int alt_pquant_config = 0, alt_pquant_edge_mask = 0, alt_pq;
1670     int dquant, dquantfrm, dqprofile, dqdbedge, dqsbedge, dqbilevel;
1671     int unified_mv_mode;
1672     int ref_field_pic_polarity = 0;
1673     int scale_factor = 0;
1674     int trans_ac_y = 0;
1675     int dmv_surface_valid = 0;
1676     int brfd = 0;
1677     int fcm = 0;
1678     int picture_type;
1679     int profile;
1680     int overlap;
1681     int interpolation_mode = 0;
1682
1683     assert(decode_state->pic_param && decode_state->pic_param->buffer);
1684     pic_param = (VAPictureParameterBufferVC1 *)decode_state->pic_param->buffer;
1685
1686     profile = va_to_gen7_vc1_profile[pic_param->sequence_fields.bits.profile];
1687     dquant = pic_param->pic_quantizer_fields.bits.dquant;
1688     dquantfrm = pic_param->pic_quantizer_fields.bits.dq_frame;
1689     dqprofile = pic_param->pic_quantizer_fields.bits.dq_profile;
1690     dqdbedge = pic_param->pic_quantizer_fields.bits.dq_db_edge;
1691     dqsbedge = pic_param->pic_quantizer_fields.bits.dq_sb_edge;
1692     dqbilevel = pic_param->pic_quantizer_fields.bits.dq_binary_level;
1693     alt_pq = pic_param->pic_quantizer_fields.bits.alt_pic_quantizer;
1694
1695     if (dquant == 0) {
1696         alt_pquant_config = 0;
1697         alt_pquant_edge_mask = 0;
1698     } else if (dquant == 2) {
1699         alt_pquant_config = 1;
1700         alt_pquant_edge_mask = 0xf;
1701     } else {
1702         assert(dquant == 1);
1703         if (dquantfrm == 0) {
1704             alt_pquant_config = 0;
1705             alt_pquant_edge_mask = 0;
1706             alt_pq = 0;
1707         } else {
1708             assert(dquantfrm == 1);
1709             alt_pquant_config = 1;
1710
1711             switch (dqprofile) {
1712             case 3:
1713                 if (dqbilevel == 0) {
1714                     alt_pquant_config = 2;
1715                     alt_pquant_edge_mask = 0;
1716                 } else {
1717                     assert(dqbilevel == 1);
1718                     alt_pquant_config = 3;
1719                     alt_pquant_edge_mask = 0;
1720                 }
1721                 break;
1722                 
1723             case 0:
1724                 alt_pquant_edge_mask = 0xf;
1725                 break;
1726
1727             case 1:
1728                 if (dqdbedge == 3)
1729                     alt_pquant_edge_mask = 0x9;
1730                 else
1731                     alt_pquant_edge_mask = (0x3 << dqdbedge);
1732
1733                 break;
1734
1735             case 2:
1736                 alt_pquant_edge_mask = (0x1 << dqsbedge);
1737                 break;
1738
1739             default:
1740                 assert(0);
1741             }
1742         }
1743     }
1744
1745     if (pic_param->mv_fields.bits.mv_mode == VAMvModeIntensityCompensation) {
1746         assert(pic_param->mv_fields.bits.mv_mode2 < 4);
1747         unified_mv_mode = va_to_gen7_vc1_mv[pic_param->mv_fields.bits.mv_mode2];
1748     } else {
1749         assert(pic_param->mv_fields.bits.mv_mode < 4);
1750         unified_mv_mode = va_to_gen7_vc1_mv[pic_param->mv_fields.bits.mv_mode];
1751     }
1752
1753     if (pic_param->sequence_fields.bits.interlace == 1 &&
1754         pic_param->picture_fields.bits.frame_coding_mode != 0) { /* frame-interlace or field-interlace */
1755         /* FIXME: calculate reference field picture polarity */
1756         assert(0);
1757         ref_field_pic_polarity = 0;
1758     }
1759
1760     if (pic_param->b_picture_fraction < 21)
1761         scale_factor = b_picture_scale_factor[pic_param->b_picture_fraction];
1762
1763     picture_type = va_to_gen7_vc1_pic_type[pic_param->picture_fields.bits.picture_type];
1764     
1765     if (profile == GEN7_VC1_ADVANCED_PROFILE && 
1766         picture_type == GEN7_VC1_I_PICTURE)
1767         picture_type = GEN7_VC1_BI_PICTURE;
1768
1769     if (picture_type == GEN7_VC1_I_PICTURE || picture_type == GEN7_VC1_BI_PICTURE) /* I picture */
1770         trans_ac_y = pic_param->transform_fields.bits.transform_ac_codingset_idx2;
1771     else {
1772         trans_ac_y = pic_param->transform_fields.bits.transform_ac_codingset_idx1;
1773
1774         /*
1775          * 8.3.6.2.1 Transform Type Selection
1776          * If variable-sized transform coding is not enabled,
1777          * then the 8x8 transform shall be used for all blocks.
1778          * it is also MFX_VC1_PIC_STATE requirement.
1779          */
1780         if (pic_param->transform_fields.bits.variable_sized_transform_flag == 0) {
1781             pic_param->transform_fields.bits.mb_level_transform_type_flag   = 1;
1782             pic_param->transform_fields.bits.frame_level_transform_type     = 0;
1783         }
1784     }
1785
1786     if (picture_type == GEN7_VC1_B_PICTURE) {
1787         struct gen7_vc1_surface *gen7_vc1_surface = NULL;
1788
1789         obj_surface = decode_state->reference_objects[1];
1790
1791         if (obj_surface)
1792             gen7_vc1_surface = obj_surface->private_data;
1793
1794         if (!gen7_vc1_surface || 
1795             (va_to_gen7_vc1_pic_type[gen7_vc1_surface->picture_type] == GEN7_VC1_I_PICTURE ||
1796              va_to_gen7_vc1_pic_type[gen7_vc1_surface->picture_type] == GEN7_VC1_BI_PICTURE))
1797             dmv_surface_valid = 0;
1798         else
1799             dmv_surface_valid = 1;
1800     }
1801
1802     assert(pic_param->picture_fields.bits.frame_coding_mode < 3);
1803
1804     if (pic_param->picture_fields.bits.frame_coding_mode < 2)
1805         fcm = pic_param->picture_fields.bits.frame_coding_mode;
1806     else {
1807         if (pic_param->picture_fields.bits.top_field_first)
1808             fcm = 2;
1809         else
1810             fcm = 3;
1811     }
1812
1813     if (pic_param->picture_fields.bits.picture_type == GEN7_VC1_B_PICTURE) { /* B picture */
1814         brfd = pic_param->reference_fields.bits.reference_distance;
1815         brfd = (scale_factor * brfd) >> 8;
1816         brfd = pic_param->reference_fields.bits.reference_distance - brfd - 1;
1817
1818         if (brfd < 0)
1819             brfd = 0;
1820     }
1821
1822     overlap = 0;
1823     if (profile != GEN7_VC1_ADVANCED_PROFILE){
1824         if (pic_param->pic_quantizer_fields.bits.pic_quantizer_scale >= 9 &&
1825             pic_param->picture_fields.bits.picture_type != GEN7_VC1_B_PICTURE) {
1826             overlap = 1; 
1827         }
1828     }else {
1829         if (pic_param->picture_fields.bits.picture_type == GEN7_VC1_P_PICTURE &&
1830              pic_param->pic_quantizer_fields.bits.pic_quantizer_scale >= 9){
1831               overlap = 1; 
1832         }
1833         if (pic_param->picture_fields.bits.picture_type == GEN7_VC1_I_PICTURE ||
1834             pic_param->picture_fields.bits.picture_type == GEN7_VC1_BI_PICTURE){
1835              if (pic_param->pic_quantizer_fields.bits.pic_quantizer_scale >= 9){
1836                 overlap = 1; 
1837              } else if (va_to_gen7_vc1_condover[pic_param->conditional_overlap_flag] == 2 ||
1838                         va_to_gen7_vc1_condover[pic_param->conditional_overlap_flag] == 3) {
1839                  overlap = 1;
1840              }
1841         }
1842     } 
1843
1844     assert(pic_param->conditional_overlap_flag < 3);
1845     assert(pic_param->mv_fields.bits.mv_table < 4); /* FIXME: interlace mode */
1846
1847     if (pic_param->mv_fields.bits.mv_mode == VAMvMode1MvHalfPelBilinear ||
1848         (pic_param->mv_fields.bits.mv_mode == VAMvModeIntensityCompensation &&
1849          pic_param->mv_fields.bits.mv_mode2 == VAMvMode1MvHalfPelBilinear))
1850         interpolation_mode = 9; /* Half-pel bilinear */
1851     else if (pic_param->mv_fields.bits.mv_mode == VAMvMode1MvHalfPel ||
1852              (pic_param->mv_fields.bits.mv_mode == VAMvModeIntensityCompensation &&
1853               pic_param->mv_fields.bits.mv_mode2 == VAMvMode1MvHalfPel))
1854         interpolation_mode = 1; /* Half-pel bicubic */
1855     else
1856         interpolation_mode = 0; /* Quarter-pel bicubic */
1857
1858     BEGIN_BCS_BATCH(batch, 6);
1859     OUT_BCS_BATCH(batch, MFD_VC1_LONG_PIC_STATE | (6 - 2));
1860     OUT_BCS_BATCH(batch,
1861                   (((ALIGN(pic_param->coded_height, 16) / 16) - 1) << 16) |
1862                   ((ALIGN(pic_param->coded_width, 16) / 16) - 1));
1863     OUT_BCS_BATCH(batch,
1864                   ((ALIGN(pic_param->coded_width, 16) / 16 + 1) / 2 - 1) << 24 |
1865                   dmv_surface_valid << 15 |
1866                   (pic_param->pic_quantizer_fields.bits.quantizer == 0) << 14 | /* implicit quantizer */
1867                   pic_param->rounding_control << 13 |
1868                   pic_param->sequence_fields.bits.syncmarker << 12 |
1869                   interpolation_mode << 8 |
1870                   0 << 7 | /* FIXME: scale up or down ??? */
1871                   pic_param->range_reduction_frame << 6 |
1872                   pic_param->entrypoint_fields.bits.loopfilter << 5 |
1873                   overlap << 4 |
1874                   !pic_param->picture_fields.bits.is_first_field << 3 |
1875                   (pic_param->sequence_fields.bits.profile == 3) << 0);
1876     OUT_BCS_BATCH(batch,
1877                   va_to_gen7_vc1_condover[pic_param->conditional_overlap_flag] << 29 |
1878                   picture_type << 26 |
1879                   fcm << 24 |
1880                   alt_pq << 16 |
1881                   pic_param->pic_quantizer_fields.bits.pic_quantizer_scale << 8 |
1882                   scale_factor << 0);
1883     OUT_BCS_BATCH(batch,
1884                   unified_mv_mode << 28 |
1885                   pic_param->mv_fields.bits.four_mv_switch << 27 |
1886                   pic_param->fast_uvmc_flag << 26 |
1887                   ref_field_pic_polarity << 25 |
1888                   pic_param->reference_fields.bits.num_reference_pictures << 24 |
1889                   pic_param->reference_fields.bits.reference_distance << 20 |
1890                   pic_param->reference_fields.bits.reference_distance << 16 | /* FIXME: ??? */
1891                   pic_param->mv_fields.bits.extended_dmv_range << 10 |
1892                   pic_param->mv_fields.bits.extended_mv_range << 8 |
1893                   alt_pquant_edge_mask << 4 |
1894                   alt_pquant_config << 2 |
1895                   pic_param->pic_quantizer_fields.bits.half_qp << 1 |                  
1896                   pic_param->pic_quantizer_fields.bits.pic_quantizer_type << 0);
1897     OUT_BCS_BATCH(batch,
1898                   !!pic_param->bitplane_present.value << 31 |
1899                   !pic_param->bitplane_present.flags.bp_forward_mb << 30 |
1900                   !pic_param->bitplane_present.flags.bp_mv_type_mb << 29 |
1901                   !pic_param->bitplane_present.flags.bp_skip_mb << 28 |
1902                   !pic_param->bitplane_present.flags.bp_direct_mb << 27 |
1903                   !pic_param->bitplane_present.flags.bp_overflags << 26 |
1904                   !pic_param->bitplane_present.flags.bp_ac_pred << 25 |
1905                   !pic_param->bitplane_present.flags.bp_field_tx << 24 |
1906                   pic_param->mv_fields.bits.mv_table << 20 |
1907                   pic_param->mv_fields.bits.four_mv_block_pattern_table << 18 |
1908                   pic_param->mv_fields.bits.two_mv_block_pattern_table << 16 |
1909                   pic_param->transform_fields.bits.frame_level_transform_type << 12 |                  
1910                   pic_param->transform_fields.bits.mb_level_transform_type_flag << 11 |
1911                   pic_param->mb_mode_table << 8 |
1912                   trans_ac_y << 6 |
1913                   pic_param->transform_fields.bits.transform_ac_codingset_idx1 << 4 |
1914                   pic_param->transform_fields.bits.intra_transform_dc_table << 3 |
1915                   pic_param->cbp_table << 0);
1916     ADVANCE_BCS_BATCH(batch);
1917 }
1918
1919 static void
1920 gen75_mfd_vc1_pred_pipe_state(VADriverContextP ctx,
1921                              struct decode_state *decode_state,
1922                              struct gen7_mfd_context *gen7_mfd_context)
1923 {
1924     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
1925     VAPictureParameterBufferVC1 *pic_param;
1926     int intensitycomp_single;
1927
1928     assert(decode_state->pic_param && decode_state->pic_param->buffer);
1929     pic_param = (VAPictureParameterBufferVC1 *)decode_state->pic_param->buffer;
1930
1931     assert(decode_state->pic_param && decode_state->pic_param->buffer);
1932     pic_param = (VAPictureParameterBufferVC1 *)decode_state->pic_param->buffer;
1933     intensitycomp_single = (pic_param->mv_fields.bits.mv_mode == VAMvModeIntensityCompensation);
1934
1935     BEGIN_BCS_BATCH(batch, 6);
1936     OUT_BCS_BATCH(batch, MFX_VC1_PRED_PIPE_STATE | (6 - 2));
1937     OUT_BCS_BATCH(batch,
1938                   0 << 14 | /* FIXME: double ??? */
1939                   0 << 12 |
1940                   intensitycomp_single << 10 |
1941                   intensitycomp_single << 8 |
1942                   0 << 4 | /* FIXME: interlace mode */
1943                   0);
1944     OUT_BCS_BATCH(batch,
1945                   pic_param->luma_shift << 16 |
1946                   pic_param->luma_scale << 0); /* FIXME: Luma Scaling */
1947     OUT_BCS_BATCH(batch, 0);
1948     OUT_BCS_BATCH(batch, 0);
1949     OUT_BCS_BATCH(batch, 0);
1950     ADVANCE_BCS_BATCH(batch);
1951 }
1952
1953 static void
1954 gen75_mfd_vc1_directmode_state_bplus(VADriverContextP ctx,
1955                               struct decode_state *decode_state,
1956                               struct gen7_mfd_context *gen7_mfd_context)
1957 {
1958     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
1959     struct object_surface *obj_surface;
1960     dri_bo *dmv_read_buffer = NULL, *dmv_write_buffer = NULL;
1961
1962     obj_surface = decode_state->render_object;
1963
1964     if (obj_surface && obj_surface->private_data) {
1965         dmv_write_buffer = ((struct gen7_vc1_surface *)(obj_surface->private_data))->dmv;
1966     }
1967
1968     obj_surface = decode_state->reference_objects[1];
1969
1970     if (obj_surface && obj_surface->private_data) {
1971         dmv_read_buffer = ((struct gen7_vc1_surface *)(obj_surface->private_data))->dmv;
1972     }
1973
1974     BEGIN_BCS_BATCH(batch, 7);
1975     OUT_BCS_BATCH(batch, MFX_VC1_DIRECTMODE_STATE | (7 - 2));
1976
1977     if (dmv_write_buffer)
1978         OUT_BCS_RELOC(batch, dmv_write_buffer,
1979                       I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
1980                       0);
1981     else
1982         OUT_BCS_BATCH(batch, 0);
1983
1984         OUT_BCS_BATCH(batch, 0);
1985         OUT_BCS_BATCH(batch, 0);
1986
1987     if (dmv_read_buffer)
1988         OUT_BCS_RELOC(batch, dmv_read_buffer,
1989                       I915_GEM_DOMAIN_INSTRUCTION, 0,
1990                       0);
1991     else
1992         OUT_BCS_BATCH(batch, 0);
1993         OUT_BCS_BATCH(batch, 0);
1994         OUT_BCS_BATCH(batch, 0);
1995                   
1996     ADVANCE_BCS_BATCH(batch);
1997 }
1998
1999 static void
2000 gen75_mfd_vc1_directmode_state(VADriverContextP ctx,
2001                               struct decode_state *decode_state,
2002                               struct gen7_mfd_context *gen7_mfd_context)
2003 {
2004     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
2005     struct i965_driver_data *i965 = i965_driver_data(ctx);
2006     struct object_surface *obj_surface;
2007     dri_bo *dmv_read_buffer = NULL, *dmv_write_buffer = NULL;
2008
2009     if (IS_STEPPING_BPLUS(i965)) {
2010         gen75_mfd_vc1_directmode_state_bplus(ctx, decode_state, gen7_mfd_context);
2011         return;
2012     }
2013
2014     obj_surface = decode_state->render_object;
2015
2016     if (obj_surface && obj_surface->private_data) {
2017         dmv_write_buffer = ((struct gen7_vc1_surface *)(obj_surface->private_data))->dmv;
2018     }
2019
2020     obj_surface = decode_state->reference_objects[1];
2021
2022     if (obj_surface && obj_surface->private_data) {
2023         dmv_read_buffer = ((struct gen7_vc1_surface *)(obj_surface->private_data))->dmv;
2024     }
2025
2026     BEGIN_BCS_BATCH(batch, 3);
2027     OUT_BCS_BATCH(batch, MFX_VC1_DIRECTMODE_STATE | (3 - 2));
2028
2029     if (dmv_write_buffer)
2030         OUT_BCS_RELOC(batch, dmv_write_buffer,
2031                       I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
2032                       0);
2033     else
2034         OUT_BCS_BATCH(batch, 0);
2035
2036     if (dmv_read_buffer)
2037         OUT_BCS_RELOC(batch, dmv_read_buffer,
2038                       I915_GEM_DOMAIN_INSTRUCTION, 0,
2039                       0);
2040     else
2041         OUT_BCS_BATCH(batch, 0);
2042                   
2043     ADVANCE_BCS_BATCH(batch);
2044 }
2045
2046 static int
2047 gen75_mfd_vc1_get_macroblock_bit_offset(uint8_t *buf, int in_slice_data_bit_offset, int profile)
2048 {
2049     int out_slice_data_bit_offset;
2050     int slice_header_size = in_slice_data_bit_offset / 8;
2051     int i, j;
2052
2053     if (profile != 3)
2054         out_slice_data_bit_offset = in_slice_data_bit_offset;
2055     else {
2056         for (i = 0, j = 0; i < slice_header_size; i++, j++) {
2057             if (!buf[j] && !buf[j + 1] && buf[j + 2] == 3 && buf[j + 3] < 4) {
2058                 i++, j += 2;
2059             }
2060         }
2061
2062         out_slice_data_bit_offset = 8 * j + in_slice_data_bit_offset % 8;
2063     }
2064
2065     return out_slice_data_bit_offset;
2066 }
2067
2068 static void
2069 gen75_mfd_vc1_bsd_object(VADriverContextP ctx,
2070                         VAPictureParameterBufferVC1 *pic_param,
2071                         VASliceParameterBufferVC1 *slice_param,
2072                         VASliceParameterBufferVC1 *next_slice_param,
2073                         dri_bo *slice_data_bo,
2074                         struct gen7_mfd_context *gen7_mfd_context)
2075 {
2076     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
2077     int next_slice_start_vert_pos;
2078     int macroblock_offset;
2079     uint8_t *slice_data = NULL;
2080
2081     dri_bo_map(slice_data_bo, 0);
2082     slice_data = (uint8_t *)(slice_data_bo->virtual + slice_param->slice_data_offset);
2083     macroblock_offset = gen75_mfd_vc1_get_macroblock_bit_offset(slice_data, 
2084                                                                slice_param->macroblock_offset,
2085                                                                pic_param->sequence_fields.bits.profile);
2086     dri_bo_unmap(slice_data_bo);
2087
2088     if (next_slice_param)
2089         next_slice_start_vert_pos = next_slice_param->slice_vertical_position;
2090     else
2091         next_slice_start_vert_pos = ALIGN(pic_param->coded_height, 16) / 16;
2092
2093     BEGIN_BCS_BATCH(batch, 5);
2094     OUT_BCS_BATCH(batch, MFD_VC1_BSD_OBJECT | (5 - 2));
2095     OUT_BCS_BATCH(batch, 
2096                   slice_param->slice_data_size - (macroblock_offset >> 3));
2097     OUT_BCS_BATCH(batch, 
2098                   slice_param->slice_data_offset + (macroblock_offset >> 3));
2099     OUT_BCS_BATCH(batch,
2100                   slice_param->slice_vertical_position << 16 |
2101                   next_slice_start_vert_pos << 0);
2102     OUT_BCS_BATCH(batch,
2103                   (macroblock_offset & 0x7));
2104     ADVANCE_BCS_BATCH(batch);
2105 }
2106
2107 static void
2108 gen75_mfd_vc1_decode_picture(VADriverContextP ctx,
2109                             struct decode_state *decode_state,
2110                             struct gen7_mfd_context *gen7_mfd_context)
2111 {
2112     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
2113     VAPictureParameterBufferVC1 *pic_param;
2114     VASliceParameterBufferVC1 *slice_param, *next_slice_param, *next_slice_group_param;
2115     dri_bo *slice_data_bo;
2116     int i, j;
2117
2118     assert(decode_state->pic_param && decode_state->pic_param->buffer);
2119     pic_param = (VAPictureParameterBufferVC1 *)decode_state->pic_param->buffer;
2120
2121     gen75_mfd_vc1_decode_init(ctx, decode_state, gen7_mfd_context);
2122     intel_batchbuffer_start_atomic_bcs(batch, 0x1000);
2123     intel_batchbuffer_emit_mi_flush(batch);
2124     gen75_mfd_pipe_mode_select(ctx, decode_state, MFX_FORMAT_VC1, gen7_mfd_context);
2125     gen75_mfd_surface_state(ctx, decode_state, MFX_FORMAT_VC1, gen7_mfd_context);
2126     gen75_mfd_pipe_buf_addr_state(ctx, decode_state, MFX_FORMAT_VC1, gen7_mfd_context);
2127     gen75_mfd_bsp_buf_base_addr_state(ctx, decode_state, MFX_FORMAT_VC1, gen7_mfd_context);
2128     gen75_mfd_vc1_pic_state(ctx, decode_state, gen7_mfd_context);
2129     gen75_mfd_vc1_pred_pipe_state(ctx, decode_state, gen7_mfd_context);
2130     gen75_mfd_vc1_directmode_state(ctx, decode_state, gen7_mfd_context);
2131
2132     for (j = 0; j < decode_state->num_slice_params; j++) {
2133         assert(decode_state->slice_params && decode_state->slice_params[j]->buffer);
2134         slice_param = (VASliceParameterBufferVC1 *)decode_state->slice_params[j]->buffer;
2135         slice_data_bo = decode_state->slice_datas[j]->bo;
2136         gen75_mfd_ind_obj_base_addr_state(ctx, slice_data_bo, MFX_FORMAT_VC1, gen7_mfd_context);
2137
2138         if (j == decode_state->num_slice_params - 1)
2139             next_slice_group_param = NULL;
2140         else
2141             next_slice_group_param = (VASliceParameterBufferVC1 *)decode_state->slice_params[j + 1]->buffer;
2142
2143         for (i = 0; i < decode_state->slice_params[j]->num_elements; i++) {
2144             assert(slice_param->slice_data_flag == VA_SLICE_DATA_FLAG_ALL);
2145
2146             if (i < decode_state->slice_params[j]->num_elements - 1)
2147                 next_slice_param = slice_param + 1;
2148             else
2149                 next_slice_param = next_slice_group_param;
2150
2151             gen75_mfd_vc1_bsd_object(ctx, pic_param, slice_param, next_slice_param, slice_data_bo, gen7_mfd_context);
2152             slice_param++;
2153         }
2154     }
2155
2156     intel_batchbuffer_end_atomic(batch);
2157     intel_batchbuffer_flush(batch);
2158 }
2159
2160 static void
2161 gen75_mfd_jpeg_decode_init(VADriverContextP ctx,
2162                           struct decode_state *decode_state,
2163                           struct gen7_mfd_context *gen7_mfd_context)
2164 {
2165     struct object_surface *obj_surface;
2166     VAPictureParameterBufferJPEGBaseline *pic_param;
2167     int subsampling = SUBSAMPLE_YUV420;
2168     int fourcc = VA_FOURCC_IMC3;
2169
2170     pic_param = (VAPictureParameterBufferJPEGBaseline *)decode_state->pic_param->buffer;
2171
2172     if (pic_param->num_components == 1) {
2173         subsampling = SUBSAMPLE_YUV400;
2174         fourcc = VA_FOURCC_Y800;
2175     } else if (pic_param->num_components == 3) {
2176         int h1 = pic_param->components[0].h_sampling_factor;
2177         int h2 = pic_param->components[1].h_sampling_factor;
2178         int h3 = pic_param->components[2].h_sampling_factor;
2179         int v1 = pic_param->components[0].v_sampling_factor;
2180         int v2 = pic_param->components[1].v_sampling_factor;
2181         int v3 = pic_param->components[2].v_sampling_factor;
2182
2183         if (h1 == 2 && h2 == 1 && h3 == 1 &&
2184             v1 == 2 && v2 == 1 && v3 == 1) {
2185             subsampling = SUBSAMPLE_YUV420;
2186             fourcc = VA_FOURCC_IMC3;
2187         } else if (h1 == 2 && h2 == 1 && h3 == 1 &&
2188                    v1 == 1 && v2 == 1 && v3 == 1) {
2189             subsampling = SUBSAMPLE_YUV422H;
2190             fourcc = VA_FOURCC_422H;
2191         } else if (h1 == 1 && h2 == 1 && h3 == 1 &&
2192                    v1 == 1 && v2 == 1 && v3 == 1) {
2193             subsampling = SUBSAMPLE_YUV444;
2194             fourcc = VA_FOURCC_444P;
2195         } else if (h1 == 4 && h2 == 1 && h3 == 1 &&
2196                    v1 == 1 && v2 == 1 && v3 == 1) {
2197             subsampling = SUBSAMPLE_YUV411;
2198             fourcc = VA_FOURCC_411P;
2199         } else if (h1 == 1 && h2 == 1 && h3 == 1 &&
2200                    v1 == 2 && v2 == 1 && v3 == 1) {
2201             subsampling = SUBSAMPLE_YUV422V;
2202             fourcc = VA_FOURCC_422V;
2203         } else if (h1 == 2 && h2 == 1 && h3 == 1 &&
2204                    v1 == 2 && v2 == 2 && v3 == 2) {
2205             subsampling = SUBSAMPLE_YUV422H;
2206             fourcc = VA_FOURCC_422H;
2207         } else if (h2 == 2 && h2 == 2 && h3 == 2 &&
2208                    v1 == 2 && v2 == 1 && v3 == 1) {
2209             subsampling = SUBSAMPLE_YUV422V;
2210             fourcc = VA_FOURCC_422V;
2211         } else
2212             assert(0);
2213     } else {
2214         assert(0);
2215     }
2216
2217     /* Current decoded picture */
2218     obj_surface = decode_state->render_object;
2219     i965_check_alloc_surface_bo(ctx, obj_surface, 1, fourcc, subsampling);
2220
2221     dri_bo_unreference(gen7_mfd_context->pre_deblocking_output.bo);
2222     gen7_mfd_context->pre_deblocking_output.bo = obj_surface->bo;
2223     dri_bo_reference(gen7_mfd_context->pre_deblocking_output.bo);
2224     gen7_mfd_context->pre_deblocking_output.valid = 1;
2225
2226     gen7_mfd_context->post_deblocking_output.bo = NULL;
2227     gen7_mfd_context->post_deblocking_output.valid = 0;
2228
2229     gen7_mfd_context->intra_row_store_scratch_buffer.bo = NULL;
2230     gen7_mfd_context->intra_row_store_scratch_buffer.valid = 0;
2231
2232     gen7_mfd_context->deblocking_filter_row_store_scratch_buffer.bo = NULL;
2233     gen7_mfd_context->deblocking_filter_row_store_scratch_buffer.valid = 0;
2234
2235     gen7_mfd_context->bsd_mpc_row_store_scratch_buffer.bo = NULL;
2236     gen7_mfd_context->bsd_mpc_row_store_scratch_buffer.valid = 0;
2237
2238     gen7_mfd_context->mpr_row_store_scratch_buffer.bo = NULL;
2239     gen7_mfd_context->mpr_row_store_scratch_buffer.valid = 0;
2240
2241     gen7_mfd_context->bitplane_read_buffer.bo = NULL;
2242     gen7_mfd_context->bitplane_read_buffer.valid = 0;
2243 }
2244
2245 static const int va_to_gen7_jpeg_rotation[4] = {
2246     GEN7_JPEG_ROTATION_0,
2247     GEN7_JPEG_ROTATION_90,
2248     GEN7_JPEG_ROTATION_180,
2249     GEN7_JPEG_ROTATION_270
2250 };
2251
2252 static void
2253 gen75_mfd_jpeg_pic_state(VADriverContextP ctx,
2254                         struct decode_state *decode_state,
2255                         struct gen7_mfd_context *gen7_mfd_context)
2256 {
2257     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
2258     VAPictureParameterBufferJPEGBaseline *pic_param;
2259     int chroma_type = GEN7_YUV420;
2260     int frame_width_in_blks;
2261     int frame_height_in_blks;
2262
2263     assert(decode_state->pic_param && decode_state->pic_param->buffer);
2264     pic_param = (VAPictureParameterBufferJPEGBaseline *)decode_state->pic_param->buffer;
2265
2266     if (pic_param->num_components == 1)
2267         chroma_type = GEN7_YUV400;
2268     else if (pic_param->num_components == 3) {
2269         int h1 = pic_param->components[0].h_sampling_factor;
2270         int h2 = pic_param->components[1].h_sampling_factor;
2271         int h3 = pic_param->components[2].h_sampling_factor;
2272         int v1 = pic_param->components[0].v_sampling_factor;
2273         int v2 = pic_param->components[1].v_sampling_factor;
2274         int v3 = pic_param->components[2].v_sampling_factor;
2275
2276         if (h1 == 2 && h2 == 1 && h3 == 1 &&
2277             v1 == 2 && v2 == 1 && v3 == 1)
2278             chroma_type = GEN7_YUV420;
2279         else if (h1 == 2 && h2 == 1 && h3 == 1 &&
2280                  v1 == 1 && v2 == 1 && v3 == 1)
2281             chroma_type = GEN7_YUV422H_2Y;
2282         else if (h1 == 1 && h2 == 1 && h3 == 1 &&
2283                  v1 == 1 && v2 == 1 && v3 == 1)
2284             chroma_type = GEN7_YUV444;
2285         else if (h1 == 4 && h2 == 1 && h3 == 1 &&
2286                  v1 == 1 && v2 == 1 && v3 == 1)
2287             chroma_type = GEN7_YUV411;
2288         else if (h1 == 1 && h2 == 1 && h3 == 1 &&
2289                  v1 == 2 && v2 == 1 && v3 == 1)
2290             chroma_type = GEN7_YUV422V_2Y;
2291         else if (h1 == 2 && h2 == 1 && h3 == 1 &&
2292                  v1 == 2 && v2 == 2 && v3 == 2)
2293             chroma_type = GEN7_YUV422H_4Y;
2294         else if (h2 == 2 && h2 == 2 && h3 == 2 &&
2295                  v1 == 2 && v2 == 1 && v3 == 1)
2296             chroma_type = GEN7_YUV422V_4Y;
2297         else
2298             assert(0);
2299     }
2300
2301     if (chroma_type == GEN7_YUV400 ||
2302         chroma_type == GEN7_YUV444 ||
2303         chroma_type == GEN7_YUV422V_2Y) {
2304         frame_width_in_blks = ((pic_param->picture_width + 7) / 8);
2305         frame_height_in_blks = ((pic_param->picture_height + 7) / 8);
2306     } else if (chroma_type == GEN7_YUV411) {
2307         frame_width_in_blks = ((pic_param->picture_width + 31) / 32) * 4;
2308         frame_height_in_blks = ((pic_param->picture_height + 31) / 32) * 4;
2309     } else {
2310         frame_width_in_blks = ((pic_param->picture_width + 15) / 16) * 2;
2311         frame_height_in_blks = ((pic_param->picture_height + 15) / 16) * 2;
2312     }
2313
2314     BEGIN_BCS_BATCH(batch, 3);
2315     OUT_BCS_BATCH(batch, MFX_JPEG_PIC_STATE | (3 - 2));
2316     OUT_BCS_BATCH(batch,
2317                   (va_to_gen7_jpeg_rotation[0] << 4) |    /* without rotation */
2318                   (chroma_type << 0));
2319     OUT_BCS_BATCH(batch,
2320                   ((frame_height_in_blks - 1) << 16) |   /* FrameHeightInBlks */
2321                   ((frame_width_in_blks - 1) << 0));    /* FrameWidthInBlks */
2322     ADVANCE_BCS_BATCH(batch);
2323 }
2324
2325 static const int va_to_gen7_jpeg_hufftable[2] = {
2326     MFX_HUFFTABLE_ID_Y,
2327     MFX_HUFFTABLE_ID_UV
2328 };
2329
2330 static void
2331 gen75_mfd_jpeg_huff_table_state(VADriverContextP ctx,
2332                                struct decode_state *decode_state,
2333                                struct gen7_mfd_context *gen7_mfd_context,
2334                                int num_tables)
2335 {
2336     VAHuffmanTableBufferJPEGBaseline *huffman_table;
2337     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
2338     int index;
2339
2340     if (!decode_state->huffman_table || !decode_state->huffman_table->buffer)
2341         return;
2342
2343     huffman_table = (VAHuffmanTableBufferJPEGBaseline *)decode_state->huffman_table->buffer;
2344
2345     for (index = 0; index < num_tables; index++) {
2346         int id = va_to_gen7_jpeg_hufftable[index];
2347
2348         if (!huffman_table->load_huffman_table[index])
2349             continue;
2350
2351         BEGIN_BCS_BATCH(batch, 53);
2352         OUT_BCS_BATCH(batch, MFX_JPEG_HUFF_TABLE_STATE | (53 - 2));
2353         OUT_BCS_BATCH(batch, id);
2354         intel_batchbuffer_data(batch, huffman_table->huffman_table[index].num_dc_codes, 12);
2355         intel_batchbuffer_data(batch, huffman_table->huffman_table[index].dc_values, 12);
2356         intel_batchbuffer_data(batch, huffman_table->huffman_table[index].num_ac_codes, 16);
2357         intel_batchbuffer_data(batch, huffman_table->huffman_table[index].ac_values, 164);
2358         ADVANCE_BCS_BATCH(batch);
2359     }
2360 }
2361
2362 static const int va_to_gen7_jpeg_qm[5] = {
2363     -1,
2364     MFX_QM_JPEG_LUMA_Y_QUANTIZER_MATRIX,
2365     MFX_QM_JPEG_CHROMA_CB_QUANTIZER_MATRIX,
2366     MFX_QM_JPEG_CHROMA_CR_QUANTIZER_MATRIX,
2367     MFX_QM_JPEG_ALPHA_QUANTIZER_MATRIX
2368 };
2369
2370 static void
2371 gen75_mfd_jpeg_qm_state(VADriverContextP ctx,
2372                        struct decode_state *decode_state,
2373                        struct gen7_mfd_context *gen7_mfd_context)
2374 {
2375     VAPictureParameterBufferJPEGBaseline *pic_param;
2376     VAIQMatrixBufferJPEGBaseline *iq_matrix;
2377     int index;
2378
2379     if (!decode_state->iq_matrix || !decode_state->iq_matrix->buffer)
2380         return;
2381
2382     iq_matrix = (VAIQMatrixBufferJPEGBaseline *)decode_state->iq_matrix->buffer;
2383     pic_param = (VAPictureParameterBufferJPEGBaseline *)decode_state->pic_param->buffer;
2384
2385     assert(pic_param->num_components <= 3);
2386
2387     for (index = 0; index < pic_param->num_components; index++) {
2388         int id = pic_param->components[index].component_id - pic_param->components[0].component_id + 1;
2389         int qm_type;
2390         unsigned char *qm = iq_matrix->quantiser_table[pic_param->components[index].quantiser_table_selector];
2391         unsigned char raster_qm[64];
2392         int j;
2393
2394         if (id > 4 || id < 1)
2395             continue;
2396
2397         if (!iq_matrix->load_quantiser_table[pic_param->components[index].quantiser_table_selector])
2398             continue;
2399
2400         qm_type = va_to_gen7_jpeg_qm[id];
2401
2402         for (j = 0; j < 64; j++)
2403             raster_qm[zigzag_direct[j]] = qm[j];
2404
2405         gen75_mfd_qm_state(ctx, qm_type, raster_qm, 64, gen7_mfd_context);
2406     }
2407 }
2408
2409 static void
2410 gen75_mfd_jpeg_bsd_object(VADriverContextP ctx,
2411                          VAPictureParameterBufferJPEGBaseline *pic_param,
2412                          VASliceParameterBufferJPEGBaseline *slice_param,
2413                          VASliceParameterBufferJPEGBaseline *next_slice_param,
2414                          dri_bo *slice_data_bo,
2415                          struct gen7_mfd_context *gen7_mfd_context)
2416 {
2417     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
2418     int scan_component_mask = 0;
2419     int i;
2420
2421     assert(slice_param->num_components > 0);
2422     assert(slice_param->num_components < 4);
2423     assert(slice_param->num_components <= pic_param->num_components);
2424
2425     for (i = 0; i < slice_param->num_components; i++) {
2426         switch (slice_param->components[i].component_selector - pic_param->components[0].component_id + 1) {
2427         case 1:
2428             scan_component_mask |= (1 << 0);
2429             break;
2430         case 2:
2431             scan_component_mask |= (1 << 1);
2432             break;
2433         case 3:
2434             scan_component_mask |= (1 << 2);
2435             break;
2436         default:
2437             assert(0);
2438             break;
2439         }
2440     }
2441
2442     BEGIN_BCS_BATCH(batch, 6);
2443     OUT_BCS_BATCH(batch, MFD_JPEG_BSD_OBJECT | (6 - 2));
2444     OUT_BCS_BATCH(batch, 
2445                   slice_param->slice_data_size);
2446     OUT_BCS_BATCH(batch, 
2447                   slice_param->slice_data_offset);
2448     OUT_BCS_BATCH(batch,
2449                   slice_param->slice_horizontal_position << 16 |
2450                   slice_param->slice_vertical_position << 0);
2451     OUT_BCS_BATCH(batch,
2452                   ((slice_param->num_components != 1) << 30) |  /* interleaved */
2453                   (scan_component_mask << 27) |                 /* scan components */
2454                   (0 << 26) |   /* disable interrupt allowed */
2455                   (slice_param->num_mcus << 0));                /* MCU count */
2456     OUT_BCS_BATCH(batch,
2457                   (slice_param->restart_interval << 0));    /* RestartInterval */
2458     ADVANCE_BCS_BATCH(batch);
2459 }
2460
2461 /* Workaround for JPEG decoding on Ivybridge */
2462
2463 VAStatus 
2464 i965_CreateSurfaces(VADriverContextP ctx,
2465                     int width,
2466                     int height,
2467                     int format,
2468                     int num_surfaces,
2469                     VASurfaceID *surfaces);
2470
2471 static struct {
2472     int width;
2473     int height;
2474     unsigned char data[32];
2475     int data_size;
2476     int data_bit_offset;
2477     int qp;
2478 } gen7_jpeg_wa_clip = {
2479     16,
2480     16,
2481     {
2482         0x65, 0xb8, 0x40, 0x32, 0x13, 0xfd, 0x06, 0x6c,
2483         0xfc, 0x0a, 0x50, 0x71, 0x5c, 0x00
2484     },
2485     14,
2486     40,
2487     28,
2488 };
2489
2490 static void
2491 gen75_jpeg_wa_init(VADriverContextP ctx,
2492                   struct gen7_mfd_context *gen7_mfd_context)
2493 {
2494     struct i965_driver_data *i965 = i965_driver_data(ctx);
2495     VAStatus status;
2496     struct object_surface *obj_surface;
2497
2498     if (gen7_mfd_context->jpeg_wa_surface_id != VA_INVALID_SURFACE)
2499         i965_DestroySurfaces(ctx,
2500                              &gen7_mfd_context->jpeg_wa_surface_id,
2501                              1);
2502
2503     status = i965_CreateSurfaces(ctx,
2504                                  gen7_jpeg_wa_clip.width,
2505                                  gen7_jpeg_wa_clip.height,
2506                                  VA_RT_FORMAT_YUV420,
2507                                  1,
2508                                  &gen7_mfd_context->jpeg_wa_surface_id);
2509     assert(status == VA_STATUS_SUCCESS);
2510
2511     obj_surface = SURFACE(gen7_mfd_context->jpeg_wa_surface_id);
2512     assert(obj_surface);
2513     i965_check_alloc_surface_bo(ctx, obj_surface, 1, VA_FOURCC_NV12, SUBSAMPLE_YUV420);
2514     gen7_mfd_context->jpeg_wa_surface_object = obj_surface;
2515
2516     if (!gen7_mfd_context->jpeg_wa_slice_data_bo) {
2517         gen7_mfd_context->jpeg_wa_slice_data_bo = dri_bo_alloc(i965->intel.bufmgr,
2518                                                                "JPEG WA data",
2519                                                                0x1000,
2520                                                                0x1000);
2521         dri_bo_subdata(gen7_mfd_context->jpeg_wa_slice_data_bo,
2522                        0,
2523                        gen7_jpeg_wa_clip.data_size,
2524                        gen7_jpeg_wa_clip.data);
2525     }
2526 }
2527
2528 static void
2529 gen75_jpeg_wa_pipe_mode_select(VADriverContextP ctx,
2530                               struct gen7_mfd_context *gen7_mfd_context)
2531 {
2532     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
2533
2534     BEGIN_BCS_BATCH(batch, 5);
2535     OUT_BCS_BATCH(batch, MFX_PIPE_MODE_SELECT | (5 - 2));
2536     OUT_BCS_BATCH(batch,
2537                   (MFX_LONG_MODE << 17) | /* Currently only support long format */
2538                   (MFD_MODE_VLD << 15) | /* VLD mode */
2539                   (0 << 10) | /* disable Stream-Out */
2540                   (0 << 9)  | /* Post Deblocking Output */
2541                   (1 << 8)  | /* Pre Deblocking Output */
2542                   (0 << 5)  | /* not in stitch mode */
2543                   (MFX_CODEC_DECODE << 4)  | /* decoding mode */
2544                   (MFX_FORMAT_AVC << 0));
2545     OUT_BCS_BATCH(batch,
2546                   (0 << 4)  | /* terminate if AVC motion and POC table error occurs */
2547                   (0 << 3)  | /* terminate if AVC mbdata error occurs */
2548                   (0 << 2)  | /* terminate if AVC CABAC/CAVLC decode error occurs */
2549                   (0 << 1)  |
2550                   (0 << 0));
2551     OUT_BCS_BATCH(batch, 0); /* pic status/error report id */ 
2552     OUT_BCS_BATCH(batch, 0); /* reserved */
2553     ADVANCE_BCS_BATCH(batch);
2554 }
2555
2556 static void
2557 gen75_jpeg_wa_surface_state(VADriverContextP ctx,
2558                            struct gen7_mfd_context *gen7_mfd_context)
2559 {
2560     struct object_surface *obj_surface = gen7_mfd_context->jpeg_wa_surface_object;
2561     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
2562
2563     BEGIN_BCS_BATCH(batch, 6);
2564     OUT_BCS_BATCH(batch, MFX_SURFACE_STATE | (6 - 2));
2565     OUT_BCS_BATCH(batch, 0);
2566     OUT_BCS_BATCH(batch,
2567                   ((obj_surface->orig_width - 1) << 18) |
2568                   ((obj_surface->orig_height - 1) << 4));
2569     OUT_BCS_BATCH(batch,
2570                   (MFX_SURFACE_PLANAR_420_8 << 28) | /* 420 planar YUV surface */
2571                   (1 << 27) | /* interleave chroma, set to 0 for JPEG */
2572                   (0 << 22) | /* surface object control state, ignored */
2573                   ((obj_surface->width - 1) << 3) | /* pitch */
2574                   (0 << 2)  | /* must be 0 */
2575                   (1 << 1)  | /* must be tiled */
2576                   (I965_TILEWALK_YMAJOR << 0));  /* tile walk, must be 1 */
2577     OUT_BCS_BATCH(batch,
2578                   (0 << 16) | /* X offset for U(Cb), must be 0 */
2579                   (obj_surface->y_cb_offset << 0)); /* Y offset for U(Cb) */
2580     OUT_BCS_BATCH(batch,
2581                   (0 << 16) | /* X offset for V(Cr), must be 0 */
2582                   (0 << 0)); /* Y offset for V(Cr), must be 0 for video codec, non-zoro for JPEG */
2583     ADVANCE_BCS_BATCH(batch);
2584 }
2585
2586 static void
2587 gen75_jpeg_wa_pipe_buf_addr_state_bplus(VADriverContextP ctx,
2588                                  struct gen7_mfd_context *gen7_mfd_context)
2589 {
2590     struct i965_driver_data *i965 = i965_driver_data(ctx);
2591     struct object_surface *obj_surface = gen7_mfd_context->jpeg_wa_surface_object;
2592     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
2593     dri_bo *intra_bo;
2594     int i;
2595
2596     intra_bo = dri_bo_alloc(i965->intel.bufmgr,
2597                             "intra row store",
2598                             128 * 64,
2599                             0x1000);
2600
2601     BEGIN_BCS_BATCH(batch, 61);
2602     OUT_BCS_BATCH(batch, MFX_PIPE_BUF_ADDR_STATE | (61 - 2));
2603     OUT_BCS_RELOC(batch,
2604                   obj_surface->bo,
2605                   I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
2606                   0);
2607         OUT_BCS_BATCH(batch, 0);
2608         OUT_BCS_BATCH(batch, 0);
2609     
2610
2611     OUT_BCS_BATCH(batch, 0); /* post deblocking */
2612         OUT_BCS_BATCH(batch, 0);
2613         OUT_BCS_BATCH(batch, 0);
2614
2615         /* uncompressed-video & stream out 7-12 */
2616     OUT_BCS_BATCH(batch, 0); /* ignore for decoding */
2617     OUT_BCS_BATCH(batch, 0); /* ignore for decoding */
2618         OUT_BCS_BATCH(batch, 0);
2619         OUT_BCS_BATCH(batch, 0);
2620         OUT_BCS_BATCH(batch, 0);
2621         OUT_BCS_BATCH(batch, 0);
2622
2623         /* the DW 13-15 is for intra row store scratch */
2624     OUT_BCS_RELOC(batch,
2625                   intra_bo,
2626                   I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
2627                   0);
2628         OUT_BCS_BATCH(batch, 0);
2629         OUT_BCS_BATCH(batch, 0);
2630
2631         /* the DW 16-18 is for deblocking filter */ 
2632     OUT_BCS_BATCH(batch, 0);
2633         OUT_BCS_BATCH(batch, 0);
2634         OUT_BCS_BATCH(batch, 0);
2635
2636     /* DW 19..50 */
2637     for (i = 0; i < MAX_GEN_REFERENCE_FRAMES; i++) {
2638         OUT_BCS_BATCH(batch, 0);
2639         OUT_BCS_BATCH(batch, 0);
2640     }
2641     OUT_BCS_BATCH(batch, 0);
2642
2643         /* the DW52-54 is for mb status address */
2644     OUT_BCS_BATCH(batch, 0);
2645         OUT_BCS_BATCH(batch, 0);
2646         OUT_BCS_BATCH(batch, 0);
2647         /* the DW56-60 is for ILDB & second ILDB address */
2648     OUT_BCS_BATCH(batch, 0);
2649         OUT_BCS_BATCH(batch, 0);
2650         OUT_BCS_BATCH(batch, 0);
2651     OUT_BCS_BATCH(batch, 0);
2652         OUT_BCS_BATCH(batch, 0);
2653         OUT_BCS_BATCH(batch, 0);
2654
2655     ADVANCE_BCS_BATCH(batch);
2656
2657     dri_bo_unreference(intra_bo);
2658 }
2659
2660 static void
2661 gen75_jpeg_wa_pipe_buf_addr_state(VADriverContextP ctx,
2662                                  struct gen7_mfd_context *gen7_mfd_context)
2663 {
2664     struct i965_driver_data *i965 = i965_driver_data(ctx);
2665     struct object_surface *obj_surface = gen7_mfd_context->jpeg_wa_surface_object;
2666     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
2667     dri_bo *intra_bo;
2668     int i;
2669
2670     if (IS_STEPPING_BPLUS(i965)) {
2671         gen75_jpeg_wa_pipe_buf_addr_state_bplus(ctx, gen7_mfd_context);
2672         return;
2673     }
2674
2675     intra_bo = dri_bo_alloc(i965->intel.bufmgr,
2676                             "intra row store",
2677                             128 * 64,
2678                             0x1000);
2679
2680     BEGIN_BCS_BATCH(batch, 25);
2681     OUT_BCS_BATCH(batch, MFX_PIPE_BUF_ADDR_STATE | (25 - 2));
2682     OUT_BCS_RELOC(batch,
2683                   obj_surface->bo,
2684                   I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
2685                   0);
2686     
2687     OUT_BCS_BATCH(batch, 0); /* post deblocking */
2688
2689     OUT_BCS_BATCH(batch, 0); /* ignore for decoding */
2690     OUT_BCS_BATCH(batch, 0); /* ignore for decoding */
2691
2692     OUT_BCS_RELOC(batch,
2693                   intra_bo,
2694                   I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
2695                   0);
2696
2697     OUT_BCS_BATCH(batch, 0);
2698
2699     /* DW 7..22 */
2700     for (i = 0; i < MAX_GEN_REFERENCE_FRAMES; i++) {
2701         OUT_BCS_BATCH(batch, 0);
2702     }
2703
2704     OUT_BCS_BATCH(batch, 0);   /* ignore DW23 for decoding */
2705     OUT_BCS_BATCH(batch, 0);
2706     ADVANCE_BCS_BATCH(batch);
2707
2708     dri_bo_unreference(intra_bo);
2709 }
2710
2711 static void
2712 gen75_jpeg_wa_bsp_buf_base_addr_state_bplus(VADriverContextP ctx,
2713                                      struct gen7_mfd_context *gen7_mfd_context)
2714 {
2715     struct i965_driver_data *i965 = i965_driver_data(ctx);
2716     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
2717     dri_bo *bsd_mpc_bo, *mpr_bo;
2718
2719     bsd_mpc_bo = dri_bo_alloc(i965->intel.bufmgr,
2720                               "bsd mpc row store",
2721                               11520, /* 1.5 * 120 * 64 */
2722                               0x1000);
2723
2724     mpr_bo = dri_bo_alloc(i965->intel.bufmgr,
2725                           "mpr row store",
2726                           7680, /* 1. 0 * 120 * 64 */
2727                           0x1000);
2728
2729     BEGIN_BCS_BATCH(batch, 10);
2730     OUT_BCS_BATCH(batch, MFX_BSP_BUF_BASE_ADDR_STATE | (10 - 2));
2731
2732     OUT_BCS_RELOC(batch,
2733                   bsd_mpc_bo,
2734                   I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
2735                   0);
2736
2737     OUT_BCS_BATCH(batch, 0);
2738     OUT_BCS_BATCH(batch, 0);
2739
2740     OUT_BCS_RELOC(batch,
2741                   mpr_bo,
2742                   I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
2743                   0);
2744     OUT_BCS_BATCH(batch, 0);
2745     OUT_BCS_BATCH(batch, 0);
2746
2747     OUT_BCS_BATCH(batch, 0);
2748     OUT_BCS_BATCH(batch, 0);
2749     OUT_BCS_BATCH(batch, 0);
2750
2751     ADVANCE_BCS_BATCH(batch);
2752
2753     dri_bo_unreference(bsd_mpc_bo);
2754     dri_bo_unreference(mpr_bo);
2755 }
2756
2757 static void
2758 gen75_jpeg_wa_bsp_buf_base_addr_state(VADriverContextP ctx,
2759                                      struct gen7_mfd_context *gen7_mfd_context)
2760 {
2761     struct i965_driver_data *i965 = i965_driver_data(ctx);
2762     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
2763     dri_bo *bsd_mpc_bo, *mpr_bo;
2764
2765     if (IS_STEPPING_BPLUS(i965)) {
2766         gen75_jpeg_wa_bsp_buf_base_addr_state_bplus(ctx, gen7_mfd_context);
2767         return;
2768     }
2769
2770     bsd_mpc_bo = dri_bo_alloc(i965->intel.bufmgr,
2771                               "bsd mpc row store",
2772                               11520, /* 1.5 * 120 * 64 */
2773                               0x1000);
2774
2775     mpr_bo = dri_bo_alloc(i965->intel.bufmgr,
2776                           "mpr row store",
2777                           7680, /* 1. 0 * 120 * 64 */
2778                           0x1000);
2779
2780     BEGIN_BCS_BATCH(batch, 4);
2781     OUT_BCS_BATCH(batch, MFX_BSP_BUF_BASE_ADDR_STATE | (4 - 2));
2782
2783     OUT_BCS_RELOC(batch,
2784                   bsd_mpc_bo,
2785                   I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
2786                   0);
2787
2788     OUT_BCS_RELOC(batch,
2789                   mpr_bo,
2790                   I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
2791                   0);
2792     OUT_BCS_BATCH(batch, 0);
2793
2794     ADVANCE_BCS_BATCH(batch);
2795
2796     dri_bo_unreference(bsd_mpc_bo);
2797     dri_bo_unreference(mpr_bo);
2798 }
2799
2800 static void
2801 gen75_jpeg_wa_avc_qm_state(VADriverContextP ctx,
2802                           struct gen7_mfd_context *gen7_mfd_context)
2803 {
2804
2805 }
2806
2807 static void
2808 gen75_jpeg_wa_avc_img_state(VADriverContextP ctx,
2809                            struct gen7_mfd_context *gen7_mfd_context)
2810 {
2811     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
2812     int img_struct = 0;
2813     int mbaff_frame_flag = 0;
2814     unsigned int width_in_mbs = 1, height_in_mbs = 1;
2815
2816     BEGIN_BCS_BATCH(batch, 16);
2817     OUT_BCS_BATCH(batch, MFX_AVC_IMG_STATE | (16 - 2));
2818     OUT_BCS_BATCH(batch, 
2819                   (width_in_mbs * height_in_mbs - 1));
2820     OUT_BCS_BATCH(batch, 
2821                   ((height_in_mbs - 1) << 16) | 
2822                   ((width_in_mbs - 1) << 0));
2823     OUT_BCS_BATCH(batch, 
2824                   (0 << 24) |
2825                   (0 << 16) |
2826                   (0 << 14) |
2827                   (0 << 13) |
2828                   (0 << 12) | /* differ from GEN6 */
2829                   (0 << 10) |
2830                   (img_struct << 8));
2831     OUT_BCS_BATCH(batch,
2832                   (1 << 10) | /* 4:2:0 */
2833                   (1 << 7) |  /* CABAC */
2834                   (0 << 6) |
2835                   (0 << 5) |
2836                   (0 << 4) |
2837                   (0 << 3) |
2838                   (1 << 2) |
2839                   (mbaff_frame_flag << 1) |
2840                   (0 << 0));
2841     OUT_BCS_BATCH(batch, 0);
2842     OUT_BCS_BATCH(batch, 0);
2843     OUT_BCS_BATCH(batch, 0);
2844     OUT_BCS_BATCH(batch, 0);
2845     OUT_BCS_BATCH(batch, 0);
2846     OUT_BCS_BATCH(batch, 0);
2847     OUT_BCS_BATCH(batch, 0);
2848     OUT_BCS_BATCH(batch, 0);
2849     OUT_BCS_BATCH(batch, 0);
2850     OUT_BCS_BATCH(batch, 0);
2851     OUT_BCS_BATCH(batch, 0);
2852     ADVANCE_BCS_BATCH(batch);
2853 }
2854
2855 static void
2856 gen75_jpeg_wa_avc_directmode_state_bplus(VADriverContextP ctx,
2857                                   struct gen7_mfd_context *gen7_mfd_context)
2858 {
2859     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
2860     int i;
2861
2862     BEGIN_BCS_BATCH(batch, 71);
2863     OUT_BCS_BATCH(batch, MFX_AVC_DIRECTMODE_STATE | (71 - 2));
2864
2865     /* reference surfaces 0..15 */
2866     for (i = 0; i < MAX_GEN_REFERENCE_FRAMES; i++) {
2867         OUT_BCS_BATCH(batch, 0); /* top */
2868         OUT_BCS_BATCH(batch, 0); /* bottom */
2869     }
2870         
2871         OUT_BCS_BATCH(batch, 0);
2872
2873     /* the current decoding frame/field */
2874     OUT_BCS_BATCH(batch, 0); /* top */
2875     OUT_BCS_BATCH(batch, 0);
2876     OUT_BCS_BATCH(batch, 0);
2877
2878     /* POC List */
2879     for (i = 0; i < MAX_GEN_REFERENCE_FRAMES; i++) {
2880         OUT_BCS_BATCH(batch, 0);
2881         OUT_BCS_BATCH(batch, 0);
2882     }
2883
2884     OUT_BCS_BATCH(batch, 0);
2885     OUT_BCS_BATCH(batch, 0);
2886
2887     ADVANCE_BCS_BATCH(batch);
2888 }
2889
2890 static void
2891 gen75_jpeg_wa_avc_directmode_state(VADriverContextP ctx,
2892                                   struct gen7_mfd_context *gen7_mfd_context)
2893 {
2894     struct i965_driver_data *i965 = i965_driver_data(ctx);
2895     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
2896     int i;
2897
2898     if (IS_STEPPING_BPLUS(i965)) {
2899         gen75_jpeg_wa_avc_directmode_state_bplus(ctx, gen7_mfd_context);
2900         return;
2901     }   
2902
2903     BEGIN_BCS_BATCH(batch, 69);
2904     OUT_BCS_BATCH(batch, MFX_AVC_DIRECTMODE_STATE | (69 - 2));
2905
2906     /* reference surfaces 0..15 */
2907     for (i = 0; i < MAX_GEN_REFERENCE_FRAMES; i++) {
2908         OUT_BCS_BATCH(batch, 0); /* top */
2909         OUT_BCS_BATCH(batch, 0); /* bottom */
2910     }
2911
2912     /* the current decoding frame/field */
2913     OUT_BCS_BATCH(batch, 0); /* top */
2914     OUT_BCS_BATCH(batch, 0); /* bottom */
2915
2916     /* POC List */
2917     for (i = 0; i < MAX_GEN_REFERENCE_FRAMES; i++) {
2918         OUT_BCS_BATCH(batch, 0);
2919         OUT_BCS_BATCH(batch, 0);
2920     }
2921
2922     OUT_BCS_BATCH(batch, 0);
2923     OUT_BCS_BATCH(batch, 0);
2924
2925     ADVANCE_BCS_BATCH(batch);
2926 }
2927
2928 static void 
2929 gen75_jpeg_wa_ind_obj_base_addr_state_bplus(VADriverContextP ctx,
2930                                      struct gen7_mfd_context *gen7_mfd_context)
2931 {
2932     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
2933
2934     BEGIN_BCS_BATCH(batch, 11);
2935     OUT_BCS_BATCH(batch, MFX_IND_OBJ_BASE_ADDR_STATE | (11 - 2));
2936     OUT_BCS_RELOC(batch,
2937                   gen7_mfd_context->jpeg_wa_slice_data_bo,
2938                   I915_GEM_DOMAIN_INSTRUCTION, 0,
2939                   0);
2940     OUT_BCS_BATCH(batch, 0x80000000); /* must set, up to 2G */
2941     OUT_BCS_BATCH(batch, 0); /* ignore for VLD mode */
2942     OUT_BCS_BATCH(batch, 0);
2943     OUT_BCS_BATCH(batch, 0); /* ignore for VLD mode */
2944     OUT_BCS_BATCH(batch, 0);
2945     OUT_BCS_BATCH(batch, 0); /* ignore for VLD mode */
2946     OUT_BCS_BATCH(batch, 0);
2947     OUT_BCS_BATCH(batch, 0); /* ignore for VLD mode */
2948     OUT_BCS_BATCH(batch, 0);
2949     ADVANCE_BCS_BATCH(batch);
2950 }
2951
2952 static void
2953 gen75_jpeg_wa_ind_obj_base_addr_state(VADriverContextP ctx,
2954                                      struct gen7_mfd_context *gen7_mfd_context)
2955 {
2956     struct i965_driver_data *i965 = i965_driver_data(ctx);
2957     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
2958
2959     if (IS_STEPPING_BPLUS(i965)) {
2960         gen75_jpeg_wa_ind_obj_base_addr_state_bplus(ctx, gen7_mfd_context);
2961         return;
2962     }   
2963
2964     BEGIN_BCS_BATCH(batch, 11);
2965     OUT_BCS_BATCH(batch, MFX_IND_OBJ_BASE_ADDR_STATE | (11 - 2));
2966     OUT_BCS_RELOC(batch,
2967                   gen7_mfd_context->jpeg_wa_slice_data_bo,
2968                   I915_GEM_DOMAIN_INSTRUCTION, 0,
2969                   0);
2970     OUT_BCS_BATCH(batch, 0x80000000); /* must set, up to 2G */
2971     OUT_BCS_BATCH(batch, 0); /* ignore for VLD mode */
2972     OUT_BCS_BATCH(batch, 0);
2973     OUT_BCS_BATCH(batch, 0); /* ignore for VLD mode */
2974     OUT_BCS_BATCH(batch, 0);
2975     OUT_BCS_BATCH(batch, 0); /* ignore for VLD mode */
2976     OUT_BCS_BATCH(batch, 0);
2977     OUT_BCS_BATCH(batch, 0); /* ignore for VLD mode */
2978     OUT_BCS_BATCH(batch, 0);
2979     ADVANCE_BCS_BATCH(batch);
2980 }
2981
2982 static void
2983 gen75_jpeg_wa_avc_bsd_object(VADriverContextP ctx,
2984                             struct gen7_mfd_context *gen7_mfd_context)
2985 {
2986     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
2987
2988     /* the input bitsteam format on GEN7 differs from GEN6 */
2989     BEGIN_BCS_BATCH(batch, 6);
2990     OUT_BCS_BATCH(batch, MFD_AVC_BSD_OBJECT | (6 - 2));
2991     OUT_BCS_BATCH(batch, gen7_jpeg_wa_clip.data_size);
2992     OUT_BCS_BATCH(batch, 0);
2993     OUT_BCS_BATCH(batch,
2994                   (0 << 31) |
2995                   (0 << 14) |
2996                   (0 << 12) |
2997                   (0 << 10) |
2998                   (0 << 8));
2999     OUT_BCS_BATCH(batch,
3000                   ((gen7_jpeg_wa_clip.data_bit_offset >> 3) << 16) |
3001                   (0 << 5)  |
3002                   (0 << 4)  |
3003                   (1 << 3) | /* LastSlice Flag */
3004                   (gen7_jpeg_wa_clip.data_bit_offset & 0x7));
3005     OUT_BCS_BATCH(batch, 0);
3006     ADVANCE_BCS_BATCH(batch);
3007 }
3008
3009 static void
3010 gen75_jpeg_wa_avc_slice_state(VADriverContextP ctx,
3011                              struct gen7_mfd_context *gen7_mfd_context)
3012 {
3013     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
3014     int slice_hor_pos = 0, slice_ver_pos = 0, next_slice_hor_pos = 0, next_slice_ver_pos = 1;
3015     int num_ref_idx_l0 = 0, num_ref_idx_l1 = 0;
3016     int first_mb_in_slice = 0;
3017     int slice_type = SLICE_TYPE_I;
3018
3019     BEGIN_BCS_BATCH(batch, 11);
3020     OUT_BCS_BATCH(batch, MFX_AVC_SLICE_STATE | (11 - 2));
3021     OUT_BCS_BATCH(batch, slice_type);
3022     OUT_BCS_BATCH(batch, 
3023                   (num_ref_idx_l1 << 24) |
3024                   (num_ref_idx_l0 << 16) |
3025                   (0 << 8) |
3026                   (0 << 0));
3027     OUT_BCS_BATCH(batch, 
3028                   (0 << 29) |
3029                   (1 << 27) |   /* disable Deblocking */
3030                   (0 << 24) |
3031                   (gen7_jpeg_wa_clip.qp << 16) |
3032                   (0 << 8) |
3033                   (0 << 0));
3034     OUT_BCS_BATCH(batch, 
3035                   (slice_ver_pos << 24) |
3036                   (slice_hor_pos << 16) | 
3037                   (first_mb_in_slice << 0));
3038     OUT_BCS_BATCH(batch,
3039                   (next_slice_ver_pos << 16) |
3040                   (next_slice_hor_pos << 0));
3041     OUT_BCS_BATCH(batch, (1 << 19)); /* last slice flag */
3042     OUT_BCS_BATCH(batch, 0);
3043     OUT_BCS_BATCH(batch, 0);
3044     OUT_BCS_BATCH(batch, 0);
3045     OUT_BCS_BATCH(batch, 0);
3046     ADVANCE_BCS_BATCH(batch);
3047 }
3048
3049 static void
3050 gen75_mfd_jpeg_wa(VADriverContextP ctx,
3051                  struct gen7_mfd_context *gen7_mfd_context)
3052 {
3053     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
3054     gen75_jpeg_wa_init(ctx, gen7_mfd_context);
3055     intel_batchbuffer_emit_mi_flush(batch);
3056     gen75_jpeg_wa_pipe_mode_select(ctx, gen7_mfd_context);
3057     gen75_jpeg_wa_surface_state(ctx, gen7_mfd_context);
3058     gen75_jpeg_wa_pipe_buf_addr_state(ctx, gen7_mfd_context);
3059     gen75_jpeg_wa_bsp_buf_base_addr_state(ctx, gen7_mfd_context);
3060     gen75_jpeg_wa_avc_qm_state(ctx, gen7_mfd_context);
3061     gen75_jpeg_wa_avc_img_state(ctx, gen7_mfd_context);
3062     gen75_jpeg_wa_ind_obj_base_addr_state(ctx, gen7_mfd_context);
3063
3064     gen75_jpeg_wa_avc_directmode_state(ctx, gen7_mfd_context);
3065     gen75_jpeg_wa_avc_slice_state(ctx, gen7_mfd_context);
3066     gen75_jpeg_wa_avc_bsd_object(ctx, gen7_mfd_context);
3067 }
3068
3069 void
3070 gen75_mfd_jpeg_decode_picture(VADriverContextP ctx,
3071                              struct decode_state *decode_state,
3072                              struct gen7_mfd_context *gen7_mfd_context)
3073 {
3074     struct intel_batchbuffer *batch = gen7_mfd_context->base.batch;
3075     VAPictureParameterBufferJPEGBaseline *pic_param;
3076     VASliceParameterBufferJPEGBaseline *slice_param, *next_slice_param, *next_slice_group_param;
3077     dri_bo *slice_data_bo;
3078     int i, j, max_selector = 0;
3079
3080     assert(decode_state->pic_param && decode_state->pic_param->buffer);
3081     pic_param = (VAPictureParameterBufferJPEGBaseline *)decode_state->pic_param->buffer;
3082
3083     /* Currently only support Baseline DCT */
3084     gen75_mfd_jpeg_decode_init(ctx, decode_state, gen7_mfd_context);
3085     intel_batchbuffer_start_atomic_bcs(batch, 0x1000);
3086     gen75_mfd_jpeg_wa(ctx, gen7_mfd_context);
3087     intel_batchbuffer_emit_mi_flush(batch);
3088     gen75_mfd_pipe_mode_select(ctx, decode_state, MFX_FORMAT_JPEG, gen7_mfd_context);
3089     gen75_mfd_surface_state(ctx, decode_state, MFX_FORMAT_JPEG, gen7_mfd_context);
3090     gen75_mfd_pipe_buf_addr_state(ctx, decode_state, MFX_FORMAT_JPEG, gen7_mfd_context);
3091     gen75_mfd_jpeg_pic_state(ctx, decode_state, gen7_mfd_context);
3092     gen75_mfd_jpeg_qm_state(ctx, decode_state, gen7_mfd_context);
3093
3094     for (j = 0; j < decode_state->num_slice_params; j++) {
3095         assert(decode_state->slice_params && decode_state->slice_params[j]->buffer);
3096         slice_param = (VASliceParameterBufferJPEGBaseline *)decode_state->slice_params[j]->buffer;
3097         slice_data_bo = decode_state->slice_datas[j]->bo;
3098         gen75_mfd_ind_obj_base_addr_state(ctx, slice_data_bo, MFX_FORMAT_JPEG, gen7_mfd_context);
3099
3100         if (j == decode_state->num_slice_params - 1)
3101             next_slice_group_param = NULL;
3102         else
3103             next_slice_group_param = (VASliceParameterBufferJPEGBaseline *)decode_state->slice_params[j + 1]->buffer;
3104
3105         for (i = 0; i < decode_state->slice_params[j]->num_elements; i++) {
3106             int component;
3107
3108             assert(slice_param->slice_data_flag == VA_SLICE_DATA_FLAG_ALL);
3109
3110             if (i < decode_state->slice_params[j]->num_elements - 1)
3111                 next_slice_param = slice_param + 1;
3112             else
3113                 next_slice_param = next_slice_group_param;
3114
3115             for (component = 0; component < slice_param->num_components; component++) {
3116                 if (max_selector < slice_param->components[component].dc_table_selector)
3117                     max_selector = slice_param->components[component].dc_table_selector;
3118
3119                 if (max_selector < slice_param->components[component].ac_table_selector)
3120                     max_selector = slice_param->components[component].ac_table_selector;
3121             }
3122
3123             slice_param++;
3124         }
3125     }
3126
3127     assert(max_selector < 2);
3128     gen75_mfd_jpeg_huff_table_state(ctx, decode_state, gen7_mfd_context, max_selector + 1);
3129
3130     for (j = 0; j < decode_state->num_slice_params; j++) {
3131         assert(decode_state->slice_params && decode_state->slice_params[j]->buffer);
3132         slice_param = (VASliceParameterBufferJPEGBaseline *)decode_state->slice_params[j]->buffer;
3133         slice_data_bo = decode_state->slice_datas[j]->bo;
3134         gen75_mfd_ind_obj_base_addr_state(ctx, slice_data_bo, MFX_FORMAT_JPEG, gen7_mfd_context);
3135
3136         if (j == decode_state->num_slice_params - 1)
3137             next_slice_group_param = NULL;
3138         else
3139             next_slice_group_param = (VASliceParameterBufferJPEGBaseline *)decode_state->slice_params[j + 1]->buffer;
3140
3141         for (i = 0; i < decode_state->slice_params[j]->num_elements; i++) {
3142             assert(slice_param->slice_data_flag == VA_SLICE_DATA_FLAG_ALL);
3143
3144             if (i < decode_state->slice_params[j]->num_elements - 1)
3145                 next_slice_param = slice_param + 1;
3146             else
3147                 next_slice_param = next_slice_group_param;
3148
3149             gen75_mfd_jpeg_bsd_object(ctx, pic_param, slice_param, next_slice_param, slice_data_bo, gen7_mfd_context);
3150             slice_param++;
3151         }
3152     }
3153
3154     intel_batchbuffer_end_atomic(batch);
3155     intel_batchbuffer_flush(batch);
3156 }
3157
3158 static VAStatus
3159 gen75_mfd_decode_picture(VADriverContextP ctx, 
3160                         VAProfile profile, 
3161                         union codec_state *codec_state,
3162                         struct hw_context *hw_context)
3163
3164 {
3165     struct gen7_mfd_context *gen7_mfd_context = (struct gen7_mfd_context *)hw_context;
3166     struct decode_state *decode_state = &codec_state->decode;
3167     VAStatus vaStatus;
3168
3169     assert(gen7_mfd_context);
3170
3171     vaStatus = intel_decoder_sanity_check_input(ctx, profile, decode_state);
3172
3173     if (vaStatus != VA_STATUS_SUCCESS)
3174         goto out;
3175
3176     gen7_mfd_context->wa_mpeg2_slice_vertical_position = -1;
3177
3178     switch (profile) {
3179     case VAProfileMPEG2Simple:
3180     case VAProfileMPEG2Main:
3181         gen75_mfd_mpeg2_decode_picture(ctx, decode_state, gen7_mfd_context);
3182         break;
3183         
3184     case VAProfileH264ConstrainedBaseline:
3185     case VAProfileH264Main:
3186     case VAProfileH264High:
3187         gen75_mfd_avc_decode_picture(ctx, decode_state, gen7_mfd_context);
3188         break;
3189
3190     case VAProfileVC1Simple:
3191     case VAProfileVC1Main:
3192     case VAProfileVC1Advanced:
3193         gen75_mfd_vc1_decode_picture(ctx, decode_state, gen7_mfd_context);
3194         break;
3195
3196     case VAProfileJPEGBaseline:
3197         gen75_mfd_jpeg_decode_picture(ctx, decode_state, gen7_mfd_context);
3198         break;
3199
3200     default:
3201         assert(0);
3202         break;
3203     }
3204
3205     vaStatus = VA_STATUS_SUCCESS;
3206
3207 out:
3208     return vaStatus;
3209 }
3210
3211 static void
3212 gen75_mfd_context_destroy(void *hw_context)
3213 {
3214     struct gen7_mfd_context *gen7_mfd_context = (struct gen7_mfd_context *)hw_context;
3215
3216     dri_bo_unreference(gen7_mfd_context->post_deblocking_output.bo);
3217     gen7_mfd_context->post_deblocking_output.bo = NULL;
3218
3219     dri_bo_unreference(gen7_mfd_context->pre_deblocking_output.bo);
3220     gen7_mfd_context->pre_deblocking_output.bo = NULL;
3221
3222     dri_bo_unreference(gen7_mfd_context->intra_row_store_scratch_buffer.bo);
3223     gen7_mfd_context->intra_row_store_scratch_buffer.bo = NULL;
3224
3225     dri_bo_unreference(gen7_mfd_context->deblocking_filter_row_store_scratch_buffer.bo);
3226     gen7_mfd_context->deblocking_filter_row_store_scratch_buffer.bo = NULL;
3227
3228     dri_bo_unreference(gen7_mfd_context->bsd_mpc_row_store_scratch_buffer.bo);
3229     gen7_mfd_context->bsd_mpc_row_store_scratch_buffer.bo = NULL;
3230
3231     dri_bo_unreference(gen7_mfd_context->mpr_row_store_scratch_buffer.bo);
3232     gen7_mfd_context->mpr_row_store_scratch_buffer.bo = NULL;
3233
3234     dri_bo_unreference(gen7_mfd_context->bitplane_read_buffer.bo);
3235     gen7_mfd_context->bitplane_read_buffer.bo = NULL;
3236
3237     dri_bo_unreference(gen7_mfd_context->jpeg_wa_slice_data_bo);
3238
3239     intel_batchbuffer_free(gen7_mfd_context->base.batch);
3240     free(gen7_mfd_context);
3241 }
3242
3243 static void gen75_mfd_mpeg2_context_init(VADriverContextP ctx,
3244                                     struct gen7_mfd_context *gen7_mfd_context)
3245 {
3246     gen7_mfd_context->iq_matrix.mpeg2.load_intra_quantiser_matrix = -1;
3247     gen7_mfd_context->iq_matrix.mpeg2.load_non_intra_quantiser_matrix = -1;
3248     gen7_mfd_context->iq_matrix.mpeg2.load_chroma_intra_quantiser_matrix = -1;
3249     gen7_mfd_context->iq_matrix.mpeg2.load_chroma_non_intra_quantiser_matrix = -1;
3250 }
3251
3252 struct hw_context *
3253 gen75_dec_hw_context_init(VADriverContextP ctx, struct object_config *obj_config)
3254 {
3255     struct intel_driver_data *intel = intel_driver_data(ctx);
3256     struct gen7_mfd_context *gen7_mfd_context = calloc(1, sizeof(struct gen7_mfd_context));
3257     int i;
3258
3259     gen7_mfd_context->base.destroy = gen75_mfd_context_destroy;
3260     gen7_mfd_context->base.run = gen75_mfd_decode_picture;
3261     gen7_mfd_context->base.batch = intel_batchbuffer_new(intel, I915_EXEC_RENDER, 0);
3262
3263     for (i = 0; i < ARRAY_ELEMS(gen7_mfd_context->reference_surface); i++) {
3264         gen7_mfd_context->reference_surface[i].surface_id = VA_INVALID_ID;
3265         gen7_mfd_context->reference_surface[i].frame_store_id = -1;
3266         gen7_mfd_context->reference_surface[i].obj_surface = NULL;
3267     }
3268
3269     gen7_mfd_context->jpeg_wa_surface_id = VA_INVALID_SURFACE;
3270     gen7_mfd_context->jpeg_wa_surface_object = NULL;
3271
3272     switch (obj_config->profile) {
3273     case VAProfileMPEG2Simple:
3274     case VAProfileMPEG2Main:
3275         gen75_mfd_mpeg2_context_init(ctx, gen7_mfd_context);
3276         break;
3277
3278     case VAProfileH264ConstrainedBaseline:
3279     case VAProfileH264Main:
3280     case VAProfileH264High:
3281         gen75_mfd_avc_context_init(ctx, gen7_mfd_context);
3282         break;
3283     default:
3284         break;
3285     }
3286     return (struct hw_context *)gen7_mfd_context;
3287 }