Remove the hard coded value to suppor the 4Kx4K encoding
[platform/upstream/libva-intel-driver.git] / src / gen6_mfc.c
1 /*
2  * Copyright © 2010-2011 Intel Corporation
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the
6  * "Software"), to deal in the Software without restriction, including
7  * without limitation the rights to use, copy, modify, merge, publish,
8  * distribute, sub license, and/or sell copies of the Software, and to
9  * permit persons to whom the Software is furnished to do so, subject to
10  * the following conditions:
11  *
12  * The above copyright notice and this permission notice (including the
13  * next paragraph) shall be included in all copies or substantial portions
14  * of the Software.
15  *
16  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
17  * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
19  * IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR
20  * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21  * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22  * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
23  *
24  * Authors:
25  *    Zhou Chang <chang.zhou@intel.com>
26  *
27  */
28
29 #ifndef HAVE_GEN_AVC_SURFACE
30 #define HAVE_GEN_AVC_SURFACE 1
31 #endif
32
33 #include <stdio.h>
34 #include <stdlib.h>
35 #include <string.h>
36 #include <assert.h>
37 #include <math.h>
38
39 #include "intel_batchbuffer.h"
40 #include "i965_defines.h"
41 #include "i965_structs.h"
42 #include "i965_drv_video.h"
43 #include "i965_encoder.h"
44 #include "i965_encoder_utils.h"
45 #include "gen6_mfc.h"
46 #include "gen6_vme.h"
47 #include "intel_media.h"
48
49 static const uint32_t gen6_mfc_batchbuffer_avc_intra[][4] = {
50 #include "shaders/utils/mfc_batchbuffer_avc_intra.g6b"
51 };
52
53 static const uint32_t gen6_mfc_batchbuffer_avc_inter[][4] = {
54 #include "shaders/utils/mfc_batchbuffer_avc_inter.g6b"
55 };
56
57 static struct i965_kernel gen6_mfc_kernels[] = {
58     {
59         "MFC AVC INTRA BATCHBUFFER ",
60         MFC_BATCHBUFFER_AVC_INTRA,
61         gen6_mfc_batchbuffer_avc_intra,
62         sizeof(gen6_mfc_batchbuffer_avc_intra),
63         NULL
64     },
65
66     {
67         "MFC AVC INTER BATCHBUFFER ",
68         MFC_BATCHBUFFER_AVC_INTER,
69         gen6_mfc_batchbuffer_avc_inter,
70         sizeof(gen6_mfc_batchbuffer_avc_inter),
71         NULL
72     },
73 };
74
75 static void
76 gen6_mfc_pipe_mode_select(VADriverContextP ctx,
77                           int standard_select,
78                           struct intel_encoder_context *encoder_context)
79 {
80     struct intel_batchbuffer *batch = encoder_context->base.batch;
81     struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
82
83     assert(standard_select == MFX_FORMAT_AVC);
84
85     BEGIN_BCS_BATCH(batch, 4);
86
87     OUT_BCS_BATCH(batch, MFX_PIPE_MODE_SELECT | (4 - 2));
88     OUT_BCS_BATCH(batch,
89                   (1 << 10) | /* disable Stream-Out , advanced QP/bitrate control need enable it*/
90                   ((!!mfc_context->post_deblocking_output.bo) << 9)  | /* Post Deblocking Output */
91                   ((!!mfc_context->pre_deblocking_output.bo) << 8)  | /* Pre Deblocking Output */
92                   (0 << 7)  | /* disable TLB prefectch */
93                   (0 << 5)  | /* not in stitch mode */
94                   (1 << 4)  | /* encoding mode */
95                   (2 << 0));  /* Standard Select: AVC */
96     OUT_BCS_BATCH(batch,
97                   (0 << 20) | /* round flag in PB slice */
98                   (0 << 19) | /* round flag in Intra8x8 */
99                   (0 << 7)  | /* expand NOA bus flag */
100                   (1 << 6)  | /* must be 1 */
101                   (0 << 5)  | /* disable clock gating for NOA */
102                   (0 << 4)  | /* terminate if AVC motion and POC table error occurs */
103                   (0 << 3)  | /* terminate if AVC mbdata error occurs */
104                   (0 << 2)  | /* terminate if AVC CABAC/CAVLC decode error occurs */
105                   (0 << 1)  | /* AVC long field motion vector */
106                   (0 << 0));  /* always calculate AVC ILDB boundary strength */
107     OUT_BCS_BATCH(batch, 0);
108
109     ADVANCE_BCS_BATCH(batch);
110 }
111
112 static void
113 gen6_mfc_surface_state(VADriverContextP ctx, struct intel_encoder_context *encoder_context)
114 {
115     struct intel_batchbuffer *batch = encoder_context->base.batch;
116     struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
117
118     BEGIN_BCS_BATCH(batch, 6);
119
120     OUT_BCS_BATCH(batch, MFX_SURFACE_STATE | (6 - 2));
121     OUT_BCS_BATCH(batch, 0);
122     OUT_BCS_BATCH(batch,
123                   ((mfc_context->surface_state.height - 1) << 19) |
124                   ((mfc_context->surface_state.width - 1) << 6));
125     OUT_BCS_BATCH(batch,
126                   (MFX_SURFACE_PLANAR_420_8 << 28) | /* 420 planar YUV surface */
127                   (1 << 27) | /* must be 1 for interleave U/V, hardware requirement */
128                   (0 << 22) | /* surface object control state, FIXME??? */
129                   ((mfc_context->surface_state.w_pitch - 1) << 3) | /* pitch */
130                   (0 << 2)  | /* must be 0 for interleave U/V */
131                   (1 << 1)  | /* must be y-tiled */
132                   (I965_TILEWALK_YMAJOR << 0));                         /* tile walk, TILEWALK_YMAJOR */
133     OUT_BCS_BATCH(batch,
134                   (0 << 16) |                                                           /* must be 0 for interleave U/V */
135                   (mfc_context->surface_state.h_pitch));                /* y offset for U(cb) */
136     OUT_BCS_BATCH(batch, 0);
137     ADVANCE_BCS_BATCH(batch);
138 }
139
140 static void
141 gen6_mfc_pipe_buf_addr_state(VADriverContextP ctx, struct intel_encoder_context *encoder_context)
142 {
143     struct intel_batchbuffer *batch = encoder_context->base.batch;
144     struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
145     int i;
146
147     BEGIN_BCS_BATCH(batch, 24);
148
149     OUT_BCS_BATCH(batch, MFX_PIPE_BUF_ADDR_STATE | (24 - 2));
150
151     if (mfc_context->pre_deblocking_output.bo)
152         OUT_BCS_RELOC(batch, mfc_context->pre_deblocking_output.bo,
153                       I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
154                       0);
155     else
156         OUT_BCS_BATCH(batch, 0);                                                                                        /* pre output addr   */
157
158     if (mfc_context->post_deblocking_output.bo)
159         OUT_BCS_RELOC(batch, mfc_context->post_deblocking_output.bo,
160                       I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
161                       0);                                                                                       /* post output addr  */ 
162     else
163         OUT_BCS_BATCH(batch, 0);
164
165     OUT_BCS_RELOC(batch, mfc_context->uncompressed_picture_source.bo,
166                   I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
167                   0);                                                                                   /* uncompressed data */
168     OUT_BCS_RELOC(batch, mfc_context->macroblock_status_buffer.bo,
169                   I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
170                   0);                                                                                   /* StreamOut data*/
171     OUT_BCS_RELOC(batch, mfc_context->intra_row_store_scratch_buffer.bo,
172                   I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
173                   0);   
174     OUT_BCS_RELOC(batch, mfc_context->deblocking_filter_row_store_scratch_buffer.bo,
175                   I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
176                   0);
177     /* 7..22 Reference pictures*/
178     for (i = 0; i < ARRAY_ELEMS(mfc_context->reference_surfaces); i++) {
179         if ( mfc_context->reference_surfaces[i].bo != NULL) {
180             OUT_BCS_RELOC(batch, mfc_context->reference_surfaces[i].bo,
181                           I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
182                           0);                   
183         } else {
184             OUT_BCS_BATCH(batch, 0);
185         }
186     }
187     OUT_BCS_RELOC(batch, mfc_context->macroblock_status_buffer.bo,
188                   I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
189                   0);                                                                                   /* Macroblock status buffer*/
190
191     ADVANCE_BCS_BATCH(batch);
192 }
193
194 static void
195 gen6_mfc_ind_obj_base_addr_state(VADriverContextP ctx, struct intel_encoder_context *encoder_context)
196 {
197     struct intel_batchbuffer *batch = encoder_context->base.batch;
198     struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
199     struct gen6_vme_context *vme_context = encoder_context->vme_context;
200
201     BEGIN_BCS_BATCH(batch, 11);
202
203     OUT_BCS_BATCH(batch, MFX_IND_OBJ_BASE_ADDR_STATE | (11 - 2));
204     OUT_BCS_BATCH(batch, 0);
205     OUT_BCS_BATCH(batch, 0);
206     /* MFX Indirect MV Object Base Address */
207     OUT_BCS_RELOC(batch, vme_context->vme_output.bo, I915_GEM_DOMAIN_INSTRUCTION, 0, 0);
208     OUT_BCS_BATCH(batch, 0);    
209     OUT_BCS_BATCH(batch, 0);
210     OUT_BCS_BATCH(batch, 0);
211     OUT_BCS_BATCH(batch, 0);
212     OUT_BCS_BATCH(batch, 0);
213     /*MFC Indirect PAK-BSE Object Base Address for Encoder*/    
214     OUT_BCS_RELOC(batch,
215                   mfc_context->mfc_indirect_pak_bse_object.bo,
216                   I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
217                   0);
218     OUT_BCS_RELOC(batch,
219                   mfc_context->mfc_indirect_pak_bse_object.bo,
220                   I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
221                   mfc_context->mfc_indirect_pak_bse_object.end_offset);
222
223     ADVANCE_BCS_BATCH(batch);
224 }
225
226 static void
227 gen6_mfc_bsp_buf_base_addr_state(VADriverContextP ctx, struct intel_encoder_context *encoder_context)
228 {
229     struct intel_batchbuffer *batch = encoder_context->base.batch;
230     struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
231
232     BEGIN_BCS_BATCH(batch, 4);
233
234     OUT_BCS_BATCH(batch, MFX_BSP_BUF_BASE_ADDR_STATE | (4 - 2));
235     OUT_BCS_RELOC(batch, mfc_context->bsd_mpc_row_store_scratch_buffer.bo,
236                   I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
237                   0);
238     OUT_BCS_BATCH(batch, 0);
239     OUT_BCS_BATCH(batch, 0);
240
241     ADVANCE_BCS_BATCH(batch);
242 }
243
244 static void
245 gen6_mfc_avc_img_state(VADriverContextP ctx,struct encode_state *encode_state,
246                        struct intel_encoder_context *encoder_context)
247 {
248     struct intel_batchbuffer *batch = encoder_context->base.batch;
249     struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
250     VAEncSequenceParameterBufferH264 *pSequenceParameter = (VAEncSequenceParameterBufferH264 *)encode_state->seq_param_ext->buffer;
251     VAEncPictureParameterBufferH264 *pPicParameter = (VAEncPictureParameterBufferH264 *)encode_state->pic_param_ext->buffer;
252     int width_in_mbs = (mfc_context->surface_state.width + 15) / 16;
253     int height_in_mbs = (mfc_context->surface_state.height + 15) / 16;
254
255     BEGIN_BCS_BATCH(batch, 13);
256     OUT_BCS_BATCH(batch, MFX_AVC_IMG_STATE | (13 - 2));
257     OUT_BCS_BATCH(batch, 
258                   ((width_in_mbs * height_in_mbs) & 0xFFFF));
259     OUT_BCS_BATCH(batch, 
260                   (height_in_mbs << 16) | 
261                   (width_in_mbs << 0));
262     OUT_BCS_BATCH(batch, 
263                   (0 << 24) |     /*Second Chroma QP Offset*/
264                   (0 << 16) |     /*Chroma QP Offset*/
265                   (0 << 14) |   /*Max-bit conformance Intra flag*/
266                   (0 << 13) |   /*Max Macroblock size conformance Inter flag*/
267                   (1 << 12) |   /*Should always be written as "1" */
268                   (0 << 10) |   /*QM Preset FLag */
269                   (0 << 8)  |   /*Image Structure*/
270                   (0 << 0) );   /*Current Decoed Image Frame Store ID, reserved in Encode mode*/
271     OUT_BCS_BATCH(batch,
272                   (400 << 16) |   /*Mininum Frame size*/        
273                   (0 << 15) |   /*Disable reading of Macroblock Status Buffer*/
274                   (0 << 14) |   /*Load BitStream Pointer only once, 1 slic 1 frame*/
275                   (0 << 13) |   /*CABAC 0 word insertion test enable*/
276                   (1 << 12) |   /*MVUnpackedEnable,compliant to DXVA*/
277                   (1 << 10) |   /*Chroma Format IDC, 4:2:0*/
278                   (pPicParameter->pic_fields.bits.entropy_coding_mode_flag << 7)  |   /*0:CAVLC encoding mode,1:CABAC*/
279                   (0 << 6)  |   /*Only valid for VLD decoding mode*/
280                   (0 << 5)  |   /*Constrained Intra Predition Flag, from PPS*/
281                   (pSequenceParameter->seq_fields.bits.direct_8x8_inference_flag << 4)  |   /*Direct 8x8 inference flag*/
282                   (pPicParameter->pic_fields.bits.transform_8x8_mode_flag << 3)  |   /*8x8 or 4x4 IDCT Transform Mode Flag*/
283                   (1 << 2)  |   /*Frame MB only flag*/
284                   (0 << 1)  |   /*MBAFF mode is in active*/
285                   (0 << 0) );   /*Field picture flag*/
286     OUT_BCS_BATCH(batch, 
287                   (1<<16)   |   /*Frame Size Rate Control Flag*/  
288                   (1<<12)   |   
289                   (1<<9)    |   /*MB level Rate Control Enabling Flag*/
290                   (1 << 3)  |   /*FrameBitRateMinReportMask*/
291                   (1 << 2)  |   /*FrameBitRateMaxReportMask*/
292                   (1 << 1)  |   /*InterMBMaxSizeReportMask*/
293                   (1 << 0) );   /*IntraMBMaxSizeReportMask*/
294     OUT_BCS_BATCH(batch,                        /*Inter and Intra Conformance Max size limit*/
295                   (0x0600 << 16) |              /*InterMbMaxSz 192 Byte*/
296                   (0x0800) );                   /*IntraMbMaxSz 256 Byte*/
297     OUT_BCS_BATCH(batch, 0x00000000);   /*Reserved : MBZReserved*/
298     OUT_BCS_BATCH(batch, 0x01020304);   /*Slice QP Delta for bitrate control*/                  
299     OUT_BCS_BATCH(batch, 0xFEFDFCFB);           
300     OUT_BCS_BATCH(batch, 0x80601004);   /*MAX = 128KB, MIN = 64KB*/
301     OUT_BCS_BATCH(batch, 0x00800001);   
302     OUT_BCS_BATCH(batch, 0);
303
304     ADVANCE_BCS_BATCH(batch);
305 }
306
307 static void
308 gen6_mfc_avc_directmode_state(VADriverContextP ctx, struct intel_encoder_context *encoder_context)
309 {
310     struct intel_batchbuffer *batch = encoder_context->base.batch;
311     struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
312
313     int i;
314
315     BEGIN_BCS_BATCH(batch, 69);
316
317     OUT_BCS_BATCH(batch, MFX_AVC_DIRECTMODE_STATE | (69 - 2));
318
319     /* Reference frames and Current frames */
320     for(i = 0; i < NUM_MFC_DMV_BUFFERS; i++) {
321         if ( mfc_context->direct_mv_buffers[i].bo != NULL) { 
322             OUT_BCS_RELOC(batch, mfc_context->direct_mv_buffers[i].bo,
323                           I915_GEM_DOMAIN_INSTRUCTION, 0,
324                           0);
325         } else {
326             OUT_BCS_BATCH(batch, 0);
327         }
328     }
329
330     /* POL list */
331     for(i = 0; i < 32; i++) {
332         OUT_BCS_BATCH(batch, i/2);
333     }
334     OUT_BCS_BATCH(batch, 0);
335     OUT_BCS_BATCH(batch, 0);
336
337     ADVANCE_BCS_BATCH(batch);
338 }
339
340 static void
341 gen6_mfc_avc_slice_state(VADriverContextP ctx,
342                          VAEncPictureParameterBufferH264 *pic_param,
343                          VAEncSliceParameterBufferH264 *slice_param,
344                          struct encode_state *encode_state,
345                          struct intel_encoder_context *encoder_context,
346                          int rate_control_enable,
347                          int qp,
348                          struct intel_batchbuffer *batch)
349 {
350     struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
351     int width_in_mbs = (mfc_context->surface_state.width + 15) / 16;
352     int height_in_mbs = (mfc_context->surface_state.height + 15) / 16;
353     int beginmb = slice_param->macroblock_address;
354     int endmb = beginmb + slice_param->num_macroblocks;
355     int beginx = beginmb % width_in_mbs;
356     int beginy = beginmb / width_in_mbs;
357     int nextx =  endmb % width_in_mbs;
358     int nexty = endmb / width_in_mbs;
359     int slice_type = slice_param->slice_type;
360     int last_slice = (endmb == (width_in_mbs * height_in_mbs));
361     int bit_rate_control_target, maxQpN, maxQpP;
362     unsigned char correct[6], grow, shrink;
363     int i;
364     int weighted_pred_idc = 0;
365     unsigned int luma_log2_weight_denom = slice_param->luma_log2_weight_denom;
366     unsigned int chroma_log2_weight_denom = slice_param->chroma_log2_weight_denom;
367
368     if (batch == NULL)
369         batch = encoder_context->base.batch;
370
371     bit_rate_control_target = slice_type;
372     if (slice_type == SLICE_TYPE_SP)
373         bit_rate_control_target = SLICE_TYPE_P;
374     else if (slice_type == SLICE_TYPE_SI)
375         bit_rate_control_target = SLICE_TYPE_I;
376
377     if (slice_type == SLICE_TYPE_P) {
378         weighted_pred_idc = pic_param->pic_fields.bits.weighted_pred_flag;
379     } else if (slice_type == SLICE_TYPE_B) {
380         weighted_pred_idc = pic_param->pic_fields.bits.weighted_bipred_idc;
381
382         if (weighted_pred_idc == 2) {
383             /* 8.4.3 - Derivation process for prediction weights (8-279) */
384             luma_log2_weight_denom = 5;
385             chroma_log2_weight_denom = 5;
386         }
387     }
388
389     maxQpN = mfc_context->bit_rate_control_context[bit_rate_control_target].MaxQpNegModifier;
390     maxQpP = mfc_context->bit_rate_control_context[bit_rate_control_target].MaxQpPosModifier;
391
392     for (i = 0; i < 6; i++)
393         correct[i] = mfc_context->bit_rate_control_context[bit_rate_control_target].Correct[i];
394
395     grow = mfc_context->bit_rate_control_context[bit_rate_control_target].GrowInit + 
396         (mfc_context->bit_rate_control_context[bit_rate_control_target].GrowResistance << 4);
397     shrink = mfc_context->bit_rate_control_context[bit_rate_control_target].ShrinkInit + 
398         (mfc_context->bit_rate_control_context[bit_rate_control_target].ShrinkResistance << 4);
399
400     BEGIN_BCS_BATCH(batch, 11);;
401
402     OUT_BCS_BATCH(batch, MFX_AVC_SLICE_STATE | (11 - 2) );
403     OUT_BCS_BATCH(batch, slice_type);                   /*Slice Type: I:P:B Slice*/
404
405     if (slice_type == SLICE_TYPE_I) {
406         OUT_BCS_BATCH(batch, 0);                        /*no reference frames and pred_weight_table*/
407     } else {
408         OUT_BCS_BATCH(batch,
409                       (1 << 16) |                       /*1 reference frame*/
410                       (chroma_log2_weight_denom << 8) |
411                       (luma_log2_weight_denom << 0));
412     }
413
414     OUT_BCS_BATCH(batch, 
415                   (weighted_pred_idc << 30) |
416                   (slice_param->direct_spatial_mv_pred_flag<<29) |             /*Direct Prediction Type*/
417                   (slice_param->disable_deblocking_filter_idc << 27) |
418                   (slice_param->cabac_init_idc << 24) |
419                   (qp<<16) |                    /*Slice Quantization Parameter*/
420                   ((slice_param->slice_beta_offset_div2 & 0xf) << 8) |
421                   ((slice_param->slice_alpha_c0_offset_div2 & 0xf) << 0));
422     OUT_BCS_BATCH(batch,
423                   (beginy << 24) |                      /*First MB X&Y , the begin postion of current slice*/
424                   (beginx << 16) |
425                   slice_param->macroblock_address );
426     OUT_BCS_BATCH(batch, (nexty << 16) | nextx);                       /*Next slice first MB X&Y*/
427     OUT_BCS_BATCH(batch, 
428                   (0/*rate_control_enable*/ << 31) |            /*in CBR mode RateControlCounterEnable = enable*/
429                   (1 << 30) |           /*ResetRateControlCounter*/
430                   (0 << 28) |           /*RC Triggle Mode = Always Rate Control*/
431                   (4 << 24) |     /*RC Stable Tolerance, middle level*/
432                   (0/*rate_control_enable*/ << 23) |     /*RC Panic Enable*/                 
433                   (0 << 22) |     /*QP mode, don't modfiy CBP*/
434                   (0 << 21) |     /*MB Type Direct Conversion Enabled*/ 
435                   (0 << 20) |     /*MB Type Skip Conversion Enabled*/ 
436                   (last_slice << 19) |     /*IsLastSlice*/
437                   (0 << 18) |   /*BitstreamOutputFlag Compressed BitStream Output Disable Flag 0:enable 1:disable*/
438                   (1 << 17) |       /*HeaderPresentFlag*/       
439                   (1 << 16) |       /*SliceData PresentFlag*/
440                   (1 << 15) |       /*TailPresentFlag*/
441                   (1 << 13) |       /*RBSP NAL TYPE*/   
442                   (0 << 12) );    /*CabacZeroWordInsertionEnable*/
443     OUT_BCS_BATCH(batch, mfc_context->mfc_indirect_pak_bse_object.offset);
444     OUT_BCS_BATCH(batch,
445                   (maxQpN << 24) |     /*Target QP - 24 is lowest QP*/ 
446                   (maxQpP << 16) |     /*Target QP + 20 is highest QP*/
447                   (shrink << 8)  |
448                   (grow << 0));   
449     OUT_BCS_BATCH(batch,
450                   (correct[5] << 20) |
451                   (correct[4] << 16) |
452                   (correct[3] << 12) |
453                   (correct[2] << 8) |
454                   (correct[1] << 4) |
455                   (correct[0] << 0));
456     OUT_BCS_BATCH(batch, 0);
457
458     ADVANCE_BCS_BATCH(batch);
459 }
460
461 static void gen6_mfc_avc_qm_state(VADriverContextP ctx, struct intel_encoder_context *encoder_context)
462 {
463     struct intel_batchbuffer *batch = encoder_context->base.batch;
464     int i;
465
466     BEGIN_BCS_BATCH(batch, 58);
467
468     OUT_BCS_BATCH(batch, MFX_AVC_QM_STATE | 56);
469     OUT_BCS_BATCH(batch, 0xFF ) ; 
470     for( i = 0; i < 56; i++) {
471         OUT_BCS_BATCH(batch, 0x10101010); 
472     }   
473
474     ADVANCE_BCS_BATCH(batch);
475 }
476
477 static void gen6_mfc_avc_fqm_state(VADriverContextP ctx, struct intel_encoder_context *encoder_context)
478 {
479     struct intel_batchbuffer *batch = encoder_context->base.batch;
480     int i;
481
482     BEGIN_BCS_BATCH(batch, 113);
483     OUT_BCS_BATCH(batch, MFC_AVC_FQM_STATE | (113 - 2));
484
485     for(i = 0; i < 112;i++) {
486         OUT_BCS_BATCH(batch, 0x10001000);
487     }   
488
489     ADVANCE_BCS_BATCH(batch);   
490 }
491
492 static void
493 gen6_mfc_avc_ref_idx_state(VADriverContextP ctx, struct intel_encoder_context *encoder_context)
494 {
495     struct intel_batchbuffer *batch = encoder_context->base.batch;
496     int i;
497
498     BEGIN_BCS_BATCH(batch, 10);
499     OUT_BCS_BATCH(batch, MFX_AVC_REF_IDX_STATE | 8); 
500     OUT_BCS_BATCH(batch, 0);                  //Select L0
501     OUT_BCS_BATCH(batch, 0x80808020);         //Only 1 reference
502     for(i = 0; i < 7; i++) {
503         OUT_BCS_BATCH(batch, 0x80808080);
504     }   
505     ADVANCE_BCS_BATCH(batch);
506
507     BEGIN_BCS_BATCH(batch, 10);
508     OUT_BCS_BATCH(batch, MFX_AVC_REF_IDX_STATE | 8); 
509     OUT_BCS_BATCH(batch, 1);                  //Select L1
510     OUT_BCS_BATCH(batch, 0x80808022);         //Only 1 reference
511     for(i = 0; i < 7; i++) {
512         OUT_BCS_BATCH(batch, 0x80808080);
513     }   
514     ADVANCE_BCS_BATCH(batch);
515 }
516         
517 static void
518 gen6_mfc_avc_insert_object(VADriverContextP ctx, struct intel_encoder_context *encoder_context,
519                            unsigned int *insert_data, int lenght_in_dws, int data_bits_in_last_dw,
520                            int skip_emul_byte_count, int is_last_header, int is_end_of_slice, int emulation_flag,
521                            struct intel_batchbuffer *batch)
522 {
523     if (batch == NULL)
524         batch = encoder_context->base.batch;
525
526     BEGIN_BCS_BATCH(batch, lenght_in_dws + 2);
527
528     OUT_BCS_BATCH(batch, MFC_AVC_INSERT_OBJECT | (lenght_in_dws + 2 - 2));
529
530     OUT_BCS_BATCH(batch,
531                   (0 << 16) |   /* always start at offset 0 */
532                   (data_bits_in_last_dw << 8) |
533                   (skip_emul_byte_count << 4) |
534                   (!!emulation_flag << 3) |
535                   ((!!is_last_header) << 2) |
536                   ((!!is_end_of_slice) << 1) |
537                   (0 << 0));    /* FIXME: ??? */
538
539     intel_batchbuffer_data(batch, insert_data, lenght_in_dws * 4);
540     ADVANCE_BCS_BATCH(batch);
541 }
542
543 static void gen6_mfc_init(VADriverContextP ctx, 
544                             struct encode_state *encode_state,
545                             struct intel_encoder_context *encoder_context)
546 {
547     struct i965_driver_data *i965 = i965_driver_data(ctx);
548     struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
549     dri_bo *bo;
550     int i;
551     VAEncSequenceParameterBufferH264 *pSequenceParameter = (VAEncSequenceParameterBufferH264 *)encode_state->seq_param_ext->buffer;
552     int width_in_mbs = pSequenceParameter->picture_width_in_mbs;
553     int height_in_mbs = pSequenceParameter->picture_height_in_mbs;
554
555     /*Encode common setup for MFC*/
556     dri_bo_unreference(mfc_context->post_deblocking_output.bo);
557     mfc_context->post_deblocking_output.bo = NULL;
558
559     dri_bo_unreference(mfc_context->pre_deblocking_output.bo);
560     mfc_context->pre_deblocking_output.bo = NULL;
561
562     dri_bo_unreference(mfc_context->uncompressed_picture_source.bo);
563     mfc_context->uncompressed_picture_source.bo = NULL;
564
565     dri_bo_unreference(mfc_context->mfc_indirect_pak_bse_object.bo); 
566     mfc_context->mfc_indirect_pak_bse_object.bo = NULL;
567
568     for (i = 0; i < NUM_MFC_DMV_BUFFERS; i++){
569         if ( mfc_context->direct_mv_buffers[i].bo != NULL);
570         dri_bo_unreference(mfc_context->direct_mv_buffers[i].bo);
571         mfc_context->direct_mv_buffers[i].bo = NULL;
572     }
573
574     for (i = 0; i < MAX_MFC_REFERENCE_SURFACES; i++){
575         if (mfc_context->reference_surfaces[i].bo != NULL)
576             dri_bo_unreference(mfc_context->reference_surfaces[i].bo);
577         mfc_context->reference_surfaces[i].bo = NULL;  
578     }
579
580     dri_bo_unreference(mfc_context->intra_row_store_scratch_buffer.bo);
581     bo = dri_bo_alloc(i965->intel.bufmgr,
582                       "Buffer",
583                       width_in_mbs * 64,
584                       64);
585     assert(bo);
586     mfc_context->intra_row_store_scratch_buffer.bo = bo;
587
588     dri_bo_unreference(mfc_context->macroblock_status_buffer.bo);
589     bo = dri_bo_alloc(i965->intel.bufmgr,
590                       "Buffer",
591                       width_in_mbs * height_in_mbs * 16,
592                       64);
593     assert(bo);
594     mfc_context->macroblock_status_buffer.bo = bo;
595
596     dri_bo_unreference(mfc_context->deblocking_filter_row_store_scratch_buffer.bo);
597     bo = dri_bo_alloc(i965->intel.bufmgr,
598                       "Buffer",
599                       4 * width_in_mbs * 64,  /* 4 * width_in_mbs * 64 */
600                       64);
601     assert(bo);
602     mfc_context->deblocking_filter_row_store_scratch_buffer.bo = bo;
603
604     dri_bo_unreference(mfc_context->bsd_mpc_row_store_scratch_buffer.bo);
605     bo = dri_bo_alloc(i965->intel.bufmgr,
606                       "Buffer",
607                       128 * width_in_mbs, /* 2 * widht_in_mbs * 64 */
608                       0x1000);
609     assert(bo);
610     mfc_context->bsd_mpc_row_store_scratch_buffer.bo = bo;
611
612     dri_bo_unreference(mfc_context->mfc_batchbuffer_surface.bo);
613     mfc_context->mfc_batchbuffer_surface.bo = NULL;
614
615     dri_bo_unreference(mfc_context->aux_batchbuffer_surface.bo);
616     mfc_context->aux_batchbuffer_surface.bo = NULL;
617
618     if (mfc_context->aux_batchbuffer)
619         intel_batchbuffer_free(mfc_context->aux_batchbuffer);
620
621     mfc_context->aux_batchbuffer = intel_batchbuffer_new(&i965->intel, I915_EXEC_BSD);
622     mfc_context->aux_batchbuffer_surface.bo = mfc_context->aux_batchbuffer->buffer;
623     dri_bo_reference(mfc_context->aux_batchbuffer_surface.bo);
624     mfc_context->aux_batchbuffer_surface.pitch = 16;
625     mfc_context->aux_batchbuffer_surface.num_blocks = mfc_context->aux_batchbuffer->size / 16;
626     mfc_context->aux_batchbuffer_surface.size_block = 16;
627
628     i965_gpe_context_init(ctx, &mfc_context->gpe_context);
629 }
630
631 static void gen6_mfc_avc_pipeline_picture_programing( VADriverContextP ctx,
632                                       struct encode_state *encode_state,
633                                       struct intel_encoder_context *encoder_context)
634 {
635     struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
636
637     mfc_context->pipe_mode_select(ctx, MFX_FORMAT_AVC, encoder_context);
638     mfc_context->set_surface_state(ctx, encoder_context);
639     mfc_context->ind_obj_base_addr_state(ctx, encoder_context);
640     gen6_mfc_pipe_buf_addr_state(ctx, encoder_context);
641     gen6_mfc_bsp_buf_base_addr_state(ctx, encoder_context);
642     mfc_context->avc_img_state(ctx, encode_state, encoder_context);
643     mfc_context->avc_qm_state(ctx, encoder_context);
644     mfc_context->avc_fqm_state(ctx, encoder_context);
645     gen6_mfc_avc_directmode_state(ctx, encoder_context); 
646     gen6_mfc_avc_ref_idx_state(ctx, encoder_context);
647 }
648
649
650 static VAStatus gen6_mfc_avc_prepare(VADriverContextP ctx, 
651                                      struct encode_state *encode_state,
652                                      struct intel_encoder_context *encoder_context)
653 {
654     struct i965_driver_data *i965 = i965_driver_data(ctx);
655     struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
656     struct object_surface *obj_surface; 
657     struct object_buffer *obj_buffer;
658     GenAvcSurface *gen6_avc_surface;
659     dri_bo *bo;
660     VAEncPictureParameterBufferH264 *pPicParameter = (VAEncPictureParameterBufferH264 *)encode_state->pic_param_ext->buffer;
661     VAStatus vaStatus = VA_STATUS_SUCCESS;
662     int i, j, enable_avc_ildb = 0;
663     VAEncSliceParameterBufferH264 *slice_param;
664     VACodedBufferSegment *coded_buffer_segment;
665     unsigned char *flag = NULL;
666     VAEncSequenceParameterBufferH264 *pSequenceParameter = (VAEncSequenceParameterBufferH264 *)encode_state->seq_param_ext->buffer;
667     int width_in_mbs = pSequenceParameter->picture_width_in_mbs;
668     int height_in_mbs = pSequenceParameter->picture_height_in_mbs;
669
670     if (IS_GEN6(i965->intel.device_id)) {
671         /* On the SNB it should be fixed to 128 for the DMV buffer */
672         width_in_mbs = 128;
673     }
674
675     for (j = 0; j < encode_state->num_slice_params_ext && enable_avc_ildb == 0; j++) {
676         assert(encode_state->slice_params_ext && encode_state->slice_params_ext[j]->buffer);
677         slice_param = (VAEncSliceParameterBufferH264 *)encode_state->slice_params_ext[j]->buffer;
678
679         for (i = 0; i < encode_state->slice_params_ext[j]->num_elements; i++) {
680             assert((slice_param->slice_type == SLICE_TYPE_I) ||
681                    (slice_param->slice_type == SLICE_TYPE_SI) ||
682                    (slice_param->slice_type == SLICE_TYPE_P) ||
683                    (slice_param->slice_type == SLICE_TYPE_SP) ||
684                    (slice_param->slice_type == SLICE_TYPE_B));
685
686             if (slice_param->disable_deblocking_filter_idc != 1) {
687                 enable_avc_ildb = 1;
688                 break;
689             }
690
691             slice_param++;
692         }
693     }
694
695     /*Setup all the input&output object*/
696
697     /* Setup current frame and current direct mv buffer*/
698     obj_surface = SURFACE(pPicParameter->CurrPic.picture_id);
699     assert(obj_surface);
700     i965_check_alloc_surface_bo(ctx, obj_surface, 1, VA_FOURCC('N','V','1','2'), SUBSAMPLE_YUV420);
701
702     if ( obj_surface->private_data == NULL) {
703         gen6_avc_surface = calloc(sizeof(GenAvcSurface), 1);
704         gen6_avc_surface->dmv_top = 
705             dri_bo_alloc(i965->intel.bufmgr,
706                          "Buffer",
707                          68 * width_in_mbs * height_in_mbs, 
708                          64);
709         gen6_avc_surface->dmv_bottom = 
710             dri_bo_alloc(i965->intel.bufmgr,
711                          "Buffer",
712                          68 * width_in_mbs * height_in_mbs, 
713                          64);
714         assert(gen6_avc_surface->dmv_top);
715         assert(gen6_avc_surface->dmv_bottom);
716         obj_surface->private_data = (void *)gen6_avc_surface;
717         obj_surface->free_private_data = (void *)gen_free_avc_surface; 
718     }
719     gen6_avc_surface = (GenAvcSurface *) obj_surface->private_data;
720     mfc_context->direct_mv_buffers[NUM_MFC_DMV_BUFFERS - 2].bo = gen6_avc_surface->dmv_top;
721     mfc_context->direct_mv_buffers[NUM_MFC_DMV_BUFFERS - 1].bo = gen6_avc_surface->dmv_bottom;
722     dri_bo_reference(gen6_avc_surface->dmv_top);
723     dri_bo_reference(gen6_avc_surface->dmv_bottom);
724
725     if (enable_avc_ildb) {
726         mfc_context->post_deblocking_output.bo = obj_surface->bo;
727         dri_bo_reference(mfc_context->post_deblocking_output.bo);
728     } else {
729         mfc_context->pre_deblocking_output.bo = obj_surface->bo;
730         dri_bo_reference(mfc_context->pre_deblocking_output.bo);
731     }
732
733     mfc_context->surface_state.width = obj_surface->orig_width;
734     mfc_context->surface_state.height = obj_surface->orig_height;
735     mfc_context->surface_state.w_pitch = obj_surface->width;
736     mfc_context->surface_state.h_pitch = obj_surface->height;
737     
738     /* Setup reference frames and direct mv buffers*/
739     for(i = 0; i < MAX_MFC_REFERENCE_SURFACES; i++) {
740         if ( pPicParameter->ReferenceFrames[i].picture_id != VA_INVALID_ID ) { 
741             obj_surface = SURFACE(pPicParameter->ReferenceFrames[i].picture_id);
742             assert(obj_surface);
743             if (obj_surface->bo != NULL) {
744                 mfc_context->reference_surfaces[i].bo = obj_surface->bo;
745                 dri_bo_reference(obj_surface->bo);
746             }
747             /* Check DMV buffer */
748             if ( obj_surface->private_data == NULL) {
749                 
750                 gen6_avc_surface = calloc(sizeof(GenAvcSurface), 1);
751                 gen6_avc_surface->dmv_top = 
752                     dri_bo_alloc(i965->intel.bufmgr,
753                                  "Buffer",
754                                  68 * width_in_mbs * height_in_mbs, 
755                                  64);
756                 gen6_avc_surface->dmv_bottom = 
757                     dri_bo_alloc(i965->intel.bufmgr,
758                                  "Buffer",
759                                  68 * width_in_mbs * height_in_mbs, 
760                                  64);
761                 assert(gen6_avc_surface->dmv_top);
762                 assert(gen6_avc_surface->dmv_bottom);
763                 obj_surface->private_data = gen6_avc_surface;
764                 obj_surface->free_private_data = gen_free_avc_surface; 
765             }
766     
767             gen6_avc_surface = (GenAvcSurface *) obj_surface->private_data;
768             /* Setup DMV buffer */
769             mfc_context->direct_mv_buffers[i*2].bo = gen6_avc_surface->dmv_top;
770             mfc_context->direct_mv_buffers[i*2+1].bo = gen6_avc_surface->dmv_bottom; 
771             dri_bo_reference(gen6_avc_surface->dmv_top);
772             dri_bo_reference(gen6_avc_surface->dmv_bottom);
773         } else {
774             break;
775         }
776     }
777         
778     obj_surface = SURFACE(encoder_context->input_yuv_surface);
779     assert(obj_surface && obj_surface->bo);
780     mfc_context->uncompressed_picture_source.bo = obj_surface->bo;
781     dri_bo_reference(mfc_context->uncompressed_picture_source.bo);
782
783     obj_buffer = BUFFER (pPicParameter->coded_buf); /* FIXME: fix this later */
784     bo = obj_buffer->buffer_store->bo;
785     assert(bo);
786     mfc_context->mfc_indirect_pak_bse_object.bo = bo;
787     mfc_context->mfc_indirect_pak_bse_object.offset = I965_CODEDBUFFER_SIZE;
788     mfc_context->mfc_indirect_pak_bse_object.end_offset = ALIGN(obj_buffer->size_element - 0x1000, 0x1000);
789     dri_bo_reference(mfc_context->mfc_indirect_pak_bse_object.bo);
790     
791     dri_bo_map(bo, 1);
792     coded_buffer_segment = (VACodedBufferSegment *)bo->virtual;
793     flag = (unsigned char *)(coded_buffer_segment + 1);
794     *flag = 0;
795     dri_bo_unmap(bo);
796
797     return vaStatus;
798 }
799
800 static VAStatus gen6_mfc_run(VADriverContextP ctx, 
801                              struct encode_state *encode_state,
802                              struct intel_encoder_context *encoder_context)
803 {
804     struct intel_batchbuffer *batch = encoder_context->base.batch;
805
806     intel_batchbuffer_flush(batch);             //run the pipeline
807
808     return VA_STATUS_SUCCESS;
809 }
810
811 static VAStatus
812 gen6_mfc_stop(VADriverContextP ctx, 
813               struct encode_state *encode_state,
814               struct intel_encoder_context *encoder_context,
815               int *encoded_bits_size)
816 {
817     VAStatus vaStatus = VA_STATUS_ERROR_UNKNOWN;
818     VAEncPictureParameterBufferH264 *pPicParameter = (VAEncPictureParameterBufferH264 *)encode_state->pic_param_ext->buffer;
819     VACodedBufferSegment *coded_buffer_segment;
820     
821     vaStatus = i965_MapBuffer(ctx, pPicParameter->coded_buf, (void **)&coded_buffer_segment);
822     assert(vaStatus == VA_STATUS_SUCCESS);
823     *encoded_bits_size = coded_buffer_segment->size * 8;
824     i965_UnmapBuffer(ctx, pPicParameter->coded_buf);
825
826     return VA_STATUS_SUCCESS;
827 }
828
829 #if __SOFTWARE__
830
831 static int
832 gen6_mfc_avc_pak_object_intra(VADriverContextP ctx, int x, int y, int end_mb, int qp,unsigned int *msg,
833                               struct intel_encoder_context *encoder_context,
834                               unsigned char target_mb_size, unsigned char max_mb_size,
835                               struct intel_batchbuffer *batch)
836 {
837     int len_in_dwords = 11;
838
839     if (batch == NULL)
840         batch = encoder_context->base.batch;
841
842     BEGIN_BCS_BATCH(batch, len_in_dwords);
843
844     OUT_BCS_BATCH(batch, MFC_AVC_PAK_OBJECT | (len_in_dwords - 2));
845     OUT_BCS_BATCH(batch, 0);
846     OUT_BCS_BATCH(batch, 0);
847     OUT_BCS_BATCH(batch, 
848                   (0 << 24) |           /* PackedMvNum, Debug*/
849                   (0 << 20) |           /* No motion vector */
850                   (1 << 19) |           /* CbpDcY */
851                   (1 << 18) |           /* CbpDcU */
852                   (1 << 17) |           /* CbpDcV */
853                   (msg[0] & 0xFFFF) );
854
855     OUT_BCS_BATCH(batch, (0xFFFF << 16) | (y << 8) | x);                /* Code Block Pattern for Y*/
856     OUT_BCS_BATCH(batch, 0x000F000F);                                                   /* Code Block Pattern */                
857     OUT_BCS_BATCH(batch, (0 << 27) | (end_mb << 26) | qp);      /* Last MB */
858
859     /*Stuff for Intra MB*/
860     OUT_BCS_BATCH(batch, msg[1]);                       /* We using Intra16x16 no 4x4 predmode*/        
861     OUT_BCS_BATCH(batch, msg[2]);       
862     OUT_BCS_BATCH(batch, msg[3]&0xFC);          
863     
864     /*MaxSizeInWord and TargetSzieInWord*/
865     OUT_BCS_BATCH(batch, (max_mb_size << 24) |
866                   (target_mb_size << 16) );
867
868     ADVANCE_BCS_BATCH(batch);
869
870     return len_in_dwords;
871 }
872
873 static int
874 gen6_mfc_avc_pak_object_inter(VADriverContextP ctx, int x, int y, int end_mb, int qp,
875                               unsigned int *msg, unsigned int offset,
876                               struct intel_encoder_context *encoder_context,
877                               unsigned char target_mb_size,unsigned char max_mb_size, int slice_type,
878                               struct intel_batchbuffer *batch)
879 {
880     int len_in_dwords = 11;
881
882     if (batch == NULL)
883         batch = encoder_context->base.batch;
884
885     BEGIN_BCS_BATCH(batch, len_in_dwords);
886
887     OUT_BCS_BATCH(batch, MFC_AVC_PAK_OBJECT | (len_in_dwords - 2));
888
889     OUT_BCS_BATCH(batch, msg[2]);         /* 32 MV*/
890     OUT_BCS_BATCH(batch, offset);
891
892     OUT_BCS_BATCH(batch, msg[0]);
893
894     OUT_BCS_BATCH(batch, (0xFFFF<<16) | (y << 8) | x);        /* Code Block Pattern for Y*/
895     OUT_BCS_BATCH(batch, 0x000F000F);                         /* Code Block Pattern */  
896 #if 0 
897     if ( slice_type == SLICE_TYPE_B) {
898         OUT_BCS_BATCH(batch, (0xF<<28) | (end_mb << 26) | qp);  /* Last MB */
899     } else {
900         OUT_BCS_BATCH(batch, (end_mb << 26) | qp);      /* Last MB */
901     }
902 #else
903     OUT_BCS_BATCH(batch, (end_mb << 26) | qp);  /* Last MB */
904 #endif
905
906
907     /*Stuff for Inter MB*/
908     OUT_BCS_BATCH(batch, msg[1]);        
909     OUT_BCS_BATCH(batch, 0x0);    
910     OUT_BCS_BATCH(batch, 0x0);        
911
912     /*MaxSizeInWord and TargetSzieInWord*/
913     OUT_BCS_BATCH(batch, (max_mb_size << 24) |
914                   (target_mb_size << 16) );
915
916     ADVANCE_BCS_BATCH(batch);
917
918     return len_in_dwords;
919 }
920
921 static void 
922 gen6_mfc_avc_pipeline_slice_programing(VADriverContextP ctx,
923                                        struct encode_state *encode_state,
924                                        struct intel_encoder_context *encoder_context,
925                                        int slice_index,
926                                        struct intel_batchbuffer *slice_batch)
927 {
928     struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
929     struct gen6_vme_context *vme_context = encoder_context->vme_context;
930     VAEncSequenceParameterBufferH264 *pSequenceParameter = (VAEncSequenceParameterBufferH264 *)encode_state->seq_param_ext->buffer;
931     VAEncPictureParameterBufferH264 *pPicParameter = (VAEncPictureParameterBufferH264 *)encode_state->pic_param_ext->buffer;
932     VAEncSliceParameterBufferH264 *pSliceParameter = (VAEncSliceParameterBufferH264 *)encode_state->slice_params_ext[slice_index]->buffer; 
933     unsigned int *msg = NULL, offset = 0;
934     int is_intra = pSliceParameter->slice_type == SLICE_TYPE_I;
935     int width_in_mbs = (mfc_context->surface_state.width + 15) / 16;
936     int height_in_mbs = (mfc_context->surface_state.height + 15) / 16;
937     int last_slice = (pSliceParameter->macroblock_address + pSliceParameter->num_macroblocks) == (width_in_mbs * height_in_mbs);
938     int i,x,y;
939     int qp = pPicParameter->pic_init_qp + pSliceParameter->slice_qp_delta;
940     unsigned int rate_control_mode = encoder_context->rate_control_mode;
941     unsigned char *slice_header = NULL;
942     int slice_header_length_in_bits = 0;
943     unsigned int tail_data[] = { 0x0, 0x0 };
944     int slice_type = pSliceParameter->slice_type;
945
946
947     if (rate_control_mode == VA_RC_CBR) {
948         qp = mfc_context->bit_rate_control_context[slice_type].QpPrimeY;
949         pSliceParameter->slice_qp_delta = qp - pPicParameter->pic_init_qp;
950     }
951
952     /* only support for 8-bit pixel bit-depth */
953     assert(pSequenceParameter->bit_depth_luma_minus8 == 0);
954     assert(pSequenceParameter->bit_depth_chroma_minus8 == 0);
955     assert(pPicParameter->pic_init_qp >= 0 && pPicParameter->pic_init_qp < 52);
956     assert(qp >= 0 && qp < 52);
957
958     gen6_mfc_avc_slice_state(ctx, 
959                              pPicParameter,
960                              pSliceParameter,
961                              encode_state, encoder_context,
962                              (rate_control_mode == VA_RC_CBR), qp, slice_batch);
963
964     if ( slice_index == 0) 
965         intel_mfc_avc_pipeline_header_programing(ctx, encode_state, encoder_context, slice_batch);
966
967     slice_header_length_in_bits = build_avc_slice_header(pSequenceParameter, pPicParameter, pSliceParameter, &slice_header);
968
969     // slice hander
970     mfc_context->insert_object(ctx, encoder_context,
971                                (unsigned int *)slice_header, ALIGN(slice_header_length_in_bits, 32) >> 5, slice_header_length_in_bits & 0x1f,
972                                5,  /* first 5 bytes are start code + nal unit type */
973                                1, 0, 1, slice_batch);
974
975     dri_bo_map(vme_context->vme_output.bo , 1);
976     msg = (unsigned int *)vme_context->vme_output.bo->virtual;
977
978     if (is_intra) {
979         msg += pSliceParameter->macroblock_address * INTRA_VME_OUTPUT_IN_DWS;
980     } else {
981         msg += pSliceParameter->macroblock_address * INTER_VME_OUTPUT_IN_DWS;
982         msg += 32; /* the first 32 DWs are MVs */
983         offset = pSliceParameter->macroblock_address * INTER_VME_OUTPUT_IN_BYTES;
984     }
985    
986     for (i = pSliceParameter->macroblock_address; 
987          i < pSliceParameter->macroblock_address + pSliceParameter->num_macroblocks; i++) {
988         int last_mb = (i == (pSliceParameter->macroblock_address + pSliceParameter->num_macroblocks - 1) );
989         x = i % width_in_mbs;
990         y = i / width_in_mbs;
991
992         if (is_intra) {
993             assert(msg);
994             gen6_mfc_avc_pak_object_intra(ctx, x, y, last_mb, qp, msg, encoder_context, 0, 0, slice_batch);
995             msg += INTRA_VME_OUTPUT_IN_DWS;
996         } else {
997             if (msg[0] & INTRA_MB_FLAG_MASK) {
998                 gen6_mfc_avc_pak_object_intra(ctx, x, y, last_mb, qp, msg, encoder_context, 0, 0, slice_batch);
999             } else {
1000                 gen6_mfc_avc_pak_object_inter(ctx, x, y, last_mb, qp, msg, offset, encoder_context, 0, 0, pSliceParameter->slice_type, slice_batch);
1001             }
1002
1003             msg += INTER_VME_OUTPUT_IN_DWS;
1004             offset += INTER_VME_OUTPUT_IN_BYTES;
1005         }
1006     }
1007    
1008     dri_bo_unmap(vme_context->vme_output.bo);
1009
1010     if ( last_slice ) {    
1011         mfc_context->insert_object(ctx, encoder_context,
1012                                    tail_data, 2, 8,
1013                                    2, 1, 1, 0, slice_batch);
1014     } else {
1015         mfc_context->insert_object(ctx, encoder_context,
1016                                    tail_data, 1, 8,
1017                                    1, 1, 1, 0, slice_batch);
1018     }
1019
1020     free(slice_header);
1021
1022 }
1023
1024 static dri_bo *
1025 gen6_mfc_avc_software_batchbuffer(VADriverContextP ctx,
1026                                   struct encode_state *encode_state,
1027                                   struct intel_encoder_context *encoder_context)
1028 {
1029     struct i965_driver_data *i965 = i965_driver_data(ctx);
1030     struct intel_batchbuffer *batch = intel_batchbuffer_new(&i965->intel, I915_EXEC_BSD);
1031     dri_bo *batch_bo = batch->buffer;
1032     int i;
1033
1034     for (i = 0; i < encode_state->num_slice_params_ext; i++) {
1035         gen6_mfc_avc_pipeline_slice_programing(ctx, encode_state, encoder_context, i, batch);
1036     }
1037
1038     intel_batchbuffer_align(batch, 8);
1039     
1040     BEGIN_BCS_BATCH(batch, 2);
1041     OUT_BCS_BATCH(batch, 0);
1042     OUT_BCS_BATCH(batch, MI_BATCH_BUFFER_END);
1043     ADVANCE_BCS_BATCH(batch);
1044
1045     dri_bo_reference(batch_bo);
1046     intel_batchbuffer_free(batch);
1047
1048     return batch_bo;
1049 }
1050
1051 #else
1052
1053 static void
1054 gen6_mfc_batchbuffer_surfaces_input(VADriverContextP ctx,
1055                                     struct encode_state *encode_state,
1056                                     struct intel_encoder_context *encoder_context)
1057
1058 {
1059     struct gen6_vme_context *vme_context = encoder_context->vme_context;
1060     struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
1061
1062     assert(vme_context->vme_output.bo);
1063     mfc_context->buffer_suface_setup(ctx,
1064                                      &mfc_context->gpe_context,
1065                                      &vme_context->vme_output,
1066                                      BINDING_TABLE_OFFSET(BIND_IDX_VME_OUTPUT),
1067                                      SURFACE_STATE_OFFSET(BIND_IDX_VME_OUTPUT));
1068     assert(mfc_context->aux_batchbuffer_surface.bo);
1069     mfc_context->buffer_suface_setup(ctx,
1070                                      &mfc_context->gpe_context,
1071                                      &mfc_context->aux_batchbuffer_surface,
1072                                      BINDING_TABLE_OFFSET(BIND_IDX_MFC_SLICE_HEADER),
1073                                      SURFACE_STATE_OFFSET(BIND_IDX_MFC_SLICE_HEADER));
1074 }
1075
1076 static void
1077 gen6_mfc_batchbuffer_surfaces_output(VADriverContextP ctx,
1078                                      struct encode_state *encode_state,
1079                                      struct intel_encoder_context *encoder_context)
1080
1081 {
1082     struct i965_driver_data *i965 = i965_driver_data(ctx);
1083     struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
1084     VAEncSequenceParameterBufferH264 *pSequenceParameter = (VAEncSequenceParameterBufferH264 *)encode_state->seq_param_ext->buffer;
1085     int width_in_mbs = pSequenceParameter->picture_width_in_mbs;
1086     int height_in_mbs = pSequenceParameter->picture_height_in_mbs;
1087     mfc_context->mfc_batchbuffer_surface.num_blocks = width_in_mbs * height_in_mbs + encode_state->num_slice_params_ext * 8 + 1;
1088     mfc_context->mfc_batchbuffer_surface.size_block = 16 * CMD_LEN_IN_OWORD; /* 3 OWORDs */
1089     mfc_context->mfc_batchbuffer_surface.pitch = 16;
1090     mfc_context->mfc_batchbuffer_surface.bo = dri_bo_alloc(i965->intel.bufmgr, 
1091                                                            "MFC batchbuffer",
1092                                                            mfc_context->mfc_batchbuffer_surface.num_blocks * mfc_context->mfc_batchbuffer_surface.size_block,
1093                                                            0x1000);
1094     mfc_context->buffer_suface_setup(ctx,
1095                                      &mfc_context->gpe_context,
1096                                      &mfc_context->mfc_batchbuffer_surface,
1097                                      BINDING_TABLE_OFFSET(BIND_IDX_MFC_BATCHBUFFER),
1098                                      SURFACE_STATE_OFFSET(BIND_IDX_MFC_BATCHBUFFER));
1099 }
1100
1101 static void
1102 gen6_mfc_batchbuffer_surfaces_setup(VADriverContextP ctx, 
1103                                     struct encode_state *encode_state,
1104                                     struct intel_encoder_context *encoder_context)
1105 {
1106     gen6_mfc_batchbuffer_surfaces_input(ctx, encode_state, encoder_context);
1107     gen6_mfc_batchbuffer_surfaces_output(ctx, encode_state, encoder_context);
1108 }
1109
1110 static void
1111 gen6_mfc_batchbuffer_idrt_setup(VADriverContextP ctx, 
1112                                 struct encode_state *encode_state,
1113                                 struct intel_encoder_context *encoder_context)
1114 {
1115     struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
1116     struct gen6_interface_descriptor_data *desc;   
1117     int i;
1118     dri_bo *bo;
1119
1120     bo = mfc_context->gpe_context.idrt.bo;
1121     dri_bo_map(bo, 1);
1122     assert(bo->virtual);
1123     desc = bo->virtual;
1124
1125     for (i = 0; i < mfc_context->gpe_context.num_kernels; i++) {
1126         struct i965_kernel *kernel;
1127
1128         kernel = &mfc_context->gpe_context.kernels[i];
1129         assert(sizeof(*desc) == 32);
1130
1131         /*Setup the descritor table*/
1132         memset(desc, 0, sizeof(*desc));
1133         desc->desc0.kernel_start_pointer = (kernel->bo->offset >> 6);
1134         desc->desc2.sampler_count = 0;
1135         desc->desc2.sampler_state_pointer = 0;
1136         desc->desc3.binding_table_entry_count = 2;
1137         desc->desc3.binding_table_pointer = (BINDING_TABLE_OFFSET(0) >> 5);
1138         desc->desc4.constant_urb_entry_read_offset = 0;
1139         desc->desc4.constant_urb_entry_read_length = 4;
1140                 
1141         /*kernel start*/
1142         dri_bo_emit_reloc(bo,   
1143                           I915_GEM_DOMAIN_INSTRUCTION, 0,
1144                           0,
1145                           i * sizeof(*desc) + offsetof(struct gen6_interface_descriptor_data, desc0),
1146                           kernel->bo);
1147         desc++;
1148     }
1149
1150     dri_bo_unmap(bo);
1151 }
1152
1153 static void
1154 gen6_mfc_batchbuffer_constant_setup(VADriverContextP ctx, 
1155                                     struct encode_state *encode_state,
1156                                     struct intel_encoder_context *encoder_context)
1157 {
1158     struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
1159     
1160     (void)mfc_context;
1161 }
1162
1163 static void
1164 gen6_mfc_batchbuffer_emit_object_command(struct intel_batchbuffer *batch,
1165                                          int index,
1166                                          int head_offset,
1167                                          int batchbuffer_offset,
1168                                          int head_size,
1169                                          int tail_size,
1170                                          int number_mb_cmds,
1171                                          int first_object,
1172                                          int last_object,
1173                                          int last_slice,
1174                                          int mb_x,
1175                                          int mb_y,
1176                                          int width_in_mbs,
1177                                          int qp)
1178 {
1179     BEGIN_BATCH(batch, 12);
1180     
1181     OUT_BATCH(batch, CMD_MEDIA_OBJECT | (12 - 2));
1182     OUT_BATCH(batch, index);
1183     OUT_BATCH(batch, 0);
1184     OUT_BATCH(batch, 0);
1185     OUT_BATCH(batch, 0);
1186     OUT_BATCH(batch, 0);
1187    
1188     /*inline data */
1189     OUT_BATCH(batch, head_offset);
1190     OUT_BATCH(batch, batchbuffer_offset);
1191     OUT_BATCH(batch, 
1192               head_size << 16 |
1193               tail_size);
1194     OUT_BATCH(batch,
1195               number_mb_cmds << 16 |
1196               first_object << 2 |
1197               last_object << 1 |
1198               last_slice);
1199     OUT_BATCH(batch,
1200               mb_y << 8 |
1201               mb_x);
1202     OUT_BATCH(batch,
1203               qp << 16 |
1204               width_in_mbs);
1205
1206     ADVANCE_BATCH(batch);
1207 }
1208
1209 static void
1210 gen6_mfc_avc_batchbuffer_slice_command(VADriverContextP ctx,
1211                                        struct intel_encoder_context *encoder_context,
1212                                        VAEncSliceParameterBufferH264 *slice_param,
1213                                        int head_offset,
1214                                        unsigned short head_size,
1215                                        unsigned short tail_size,
1216                                        int batchbuffer_offset,
1217                                        int qp,
1218                                        int last_slice)
1219 {
1220     struct intel_batchbuffer *batch = encoder_context->base.batch;
1221     struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
1222     int width_in_mbs = (mfc_context->surface_state.width + 15) / 16;
1223     int total_mbs = slice_param->num_macroblocks;
1224     int number_mb_cmds = 128;
1225     int starting_mb = 0;
1226     int last_object = 0;
1227     int first_object = 1;
1228     int i;
1229     int mb_x, mb_y;
1230     int index = (slice_param->slice_type == SLICE_TYPE_I) ? MFC_BATCHBUFFER_AVC_INTRA : MFC_BATCHBUFFER_AVC_INTER;
1231
1232     for (i = 0; i < total_mbs / number_mb_cmds; i++) {
1233         last_object = (total_mbs - starting_mb) == number_mb_cmds;
1234         mb_x = (slice_param->macroblock_address + starting_mb) % width_in_mbs;
1235         mb_y = (slice_param->macroblock_address + starting_mb) / width_in_mbs;
1236         assert(mb_x <= 255 && mb_y <= 255);
1237
1238         starting_mb += number_mb_cmds;
1239
1240         gen6_mfc_batchbuffer_emit_object_command(batch,
1241                                                  index,
1242                                                  head_offset,
1243                                                  batchbuffer_offset,
1244                                                  head_size,
1245                                                  tail_size,
1246                                                  number_mb_cmds,
1247                                                  first_object,
1248                                                  last_object,
1249                                                  last_slice,
1250                                                  mb_x,
1251                                                  mb_y,
1252                                                  width_in_mbs,
1253                                                  qp);
1254
1255         if (first_object) {
1256             head_offset += head_size;
1257             batchbuffer_offset += head_size;
1258         }
1259
1260         if (last_object) {
1261             head_offset += tail_size;
1262             batchbuffer_offset += tail_size;
1263         }
1264
1265         batchbuffer_offset += number_mb_cmds * CMD_LEN_IN_OWORD;
1266
1267         first_object = 0;
1268     }
1269
1270     if (!last_object) {
1271         last_object = 1;
1272         number_mb_cmds = total_mbs % number_mb_cmds;
1273         mb_x = (slice_param->macroblock_address + starting_mb) % width_in_mbs;
1274         mb_y = (slice_param->macroblock_address + starting_mb) / width_in_mbs;
1275         assert(mb_x <= 255 && mb_y <= 255);
1276         starting_mb += number_mb_cmds;
1277
1278         gen6_mfc_batchbuffer_emit_object_command(batch,
1279                                                  index,
1280                                                  head_offset,
1281                                                  batchbuffer_offset,
1282                                                  head_size,
1283                                                  tail_size,
1284                                                  number_mb_cmds,
1285                                                  first_object,
1286                                                  last_object,
1287                                                  last_slice,
1288                                                  mb_x,
1289                                                  mb_y,
1290                                                  width_in_mbs,
1291                                                  qp);
1292     }
1293 }
1294                           
1295 /*
1296  * return size in Owords (16bytes)
1297  */         
1298 static int
1299 gen6_mfc_avc_batchbuffer_slice(VADriverContextP ctx,
1300                                struct encode_state *encode_state,
1301                                struct intel_encoder_context *encoder_context,
1302                                int slice_index,
1303                                int batchbuffer_offset)
1304 {
1305     struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
1306     struct intel_batchbuffer *slice_batch = mfc_context->aux_batchbuffer;
1307     VAEncSequenceParameterBufferH264 *pSequenceParameter = (VAEncSequenceParameterBufferH264 *)encode_state->seq_param_ext->buffer;
1308     VAEncPictureParameterBufferH264 *pPicParameter = (VAEncPictureParameterBufferH264 *)encode_state->pic_param_ext->buffer;
1309     VAEncSliceParameterBufferH264 *pSliceParameter = (VAEncSliceParameterBufferH264 *)encode_state->slice_params_ext[slice_index]->buffer; 
1310     int width_in_mbs = (mfc_context->surface_state.width + 15) / 16;
1311     int height_in_mbs = (mfc_context->surface_state.height + 15) / 16;
1312     int last_slice = (pSliceParameter->macroblock_address + pSliceParameter->num_macroblocks) == (width_in_mbs * height_in_mbs);
1313     int qp = pPicParameter->pic_init_qp + pSliceParameter->slice_qp_delta;
1314     unsigned int rate_control_mode = encoder_context->rate_control_mode;
1315     unsigned char *slice_header = NULL;
1316     int slice_header_length_in_bits = 0;
1317     unsigned int tail_data[] = { 0x0, 0x0 };
1318     long head_offset;
1319     int old_used = intel_batchbuffer_used_size(slice_batch), used;
1320     unsigned short head_size, tail_size;
1321     int slice_type = pSliceParameter->slice_type;
1322
1323     if (rate_control_mode == VA_RC_CBR) {
1324         qp = mfc_context->bit_rate_control_context[slice_type].QpPrimeY;
1325         pSliceParameter->slice_qp_delta = qp - pPicParameter->pic_init_qp;
1326     }
1327
1328     /* only support for 8-bit pixel bit-depth */
1329     assert(pSequenceParameter->bit_depth_luma_minus8 == 0);
1330     assert(pSequenceParameter->bit_depth_chroma_minus8 == 0);
1331     assert(pPicParameter->pic_init_qp >= 0 && pPicParameter->pic_init_qp < 52);
1332     assert(qp >= 0 && qp < 52);
1333
1334     head_offset = old_used / 16;
1335     gen6_mfc_avc_slice_state(ctx,
1336                              pPicParameter,
1337                              pSliceParameter,
1338                              encode_state,
1339                              encoder_context,
1340                              (rate_control_mode == VA_RC_CBR),
1341                              qp,
1342                              slice_batch);
1343
1344     if (slice_index == 0)
1345         intel_mfc_avc_pipeline_header_programing(ctx, encode_state, encoder_context, slice_batch);
1346
1347     slice_header_length_in_bits = build_avc_slice_header(pSequenceParameter, pPicParameter, pSliceParameter, &slice_header);
1348
1349     // slice hander
1350     mfc_context->insert_object(ctx,
1351                                encoder_context,
1352                                (unsigned int *)slice_header,
1353                                ALIGN(slice_header_length_in_bits, 32) >> 5,
1354                                slice_header_length_in_bits & 0x1f,
1355                                5,  /* first 5 bytes are start code + nal unit type */
1356                                1,
1357                                0,
1358                                1,
1359                                slice_batch);
1360     free(slice_header);
1361
1362     intel_batchbuffer_align(slice_batch, 16); /* aligned by an Oword */
1363     used = intel_batchbuffer_used_size(slice_batch);
1364     head_size = (used - old_used) / 16;
1365     old_used = used;
1366
1367     /* tail */
1368     if (last_slice) {    
1369         mfc_context->insert_object(ctx,
1370                                    encoder_context,
1371                                    tail_data,
1372                                    2,
1373                                    8,
1374                                    2,
1375                                    1,
1376                                    1,
1377                                    0,
1378                                    slice_batch);
1379     } else {
1380         mfc_context->insert_object(ctx,
1381                                    encoder_context,
1382                                    tail_data,
1383                                    1,
1384                                    8,
1385                                    1,
1386                                    1,
1387                                    1,
1388                                    0,
1389                                    slice_batch);
1390     }
1391
1392     intel_batchbuffer_align(slice_batch, 16); /* aligned by an Oword */
1393     used = intel_batchbuffer_used_size(slice_batch);
1394     tail_size = (used - old_used) / 16;
1395
1396    
1397     gen6_mfc_avc_batchbuffer_slice_command(ctx,
1398                                            encoder_context,
1399                                            pSliceParameter,
1400                                            head_offset,
1401                                            head_size,
1402                                            tail_size,
1403                                            batchbuffer_offset,
1404                                            qp,
1405                                            last_slice);
1406
1407     return head_size + tail_size + pSliceParameter->num_macroblocks * CMD_LEN_IN_OWORD;
1408 }
1409
1410 static void
1411 gen6_mfc_avc_batchbuffer_pipeline(VADriverContextP ctx,
1412                                   struct encode_state *encode_state,
1413                                   struct intel_encoder_context *encoder_context)
1414 {
1415     struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
1416     struct intel_batchbuffer *batch = encoder_context->base.batch;
1417     int i, size, offset = 0;
1418     intel_batchbuffer_start_atomic(batch, 0x4000); 
1419     gen6_gpe_pipeline_setup(ctx, &mfc_context->gpe_context, batch);
1420
1421     for ( i = 0; i < encode_state->num_slice_params_ext; i++) {
1422         size = gen6_mfc_avc_batchbuffer_slice(ctx, encode_state, encoder_context, i, offset);
1423         offset += size;
1424     }
1425
1426     intel_batchbuffer_end_atomic(batch);
1427     intel_batchbuffer_flush(batch);
1428 }
1429
1430 static void
1431 gen6_mfc_build_avc_batchbuffer(VADriverContextP ctx, 
1432                                struct encode_state *encode_state,
1433                                struct intel_encoder_context *encoder_context)
1434 {
1435     gen6_mfc_batchbuffer_surfaces_setup(ctx, encode_state, encoder_context);
1436     gen6_mfc_batchbuffer_idrt_setup(ctx, encode_state, encoder_context);
1437     gen6_mfc_batchbuffer_constant_setup(ctx, encode_state, encoder_context);
1438     gen6_mfc_avc_batchbuffer_pipeline(ctx, encode_state, encoder_context);
1439 }
1440
1441 static dri_bo *
1442 gen6_mfc_avc_hardware_batchbuffer(VADriverContextP ctx,
1443                                   struct encode_state *encode_state,
1444                                   struct intel_encoder_context *encoder_context)
1445 {
1446     struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
1447
1448     gen6_mfc_build_avc_batchbuffer(ctx, encode_state, encoder_context);
1449     dri_bo_reference(mfc_context->mfc_batchbuffer_surface.bo);
1450
1451     return mfc_context->mfc_batchbuffer_surface.bo;
1452 }
1453
1454 #endif
1455
1456
1457 static void
1458 gen6_mfc_avc_pipeline_programing(VADriverContextP ctx,
1459                                  struct encode_state *encode_state,
1460                                  struct intel_encoder_context *encoder_context)
1461 {
1462     struct intel_batchbuffer *batch = encoder_context->base.batch;
1463     dri_bo *slice_batch_bo;
1464
1465     if ( intel_mfc_interlace_check(ctx, encode_state, encoder_context) ) {
1466         fprintf(stderr, "Current VA driver don't support interlace mode!\n");
1467         assert(0);
1468         return; 
1469     }
1470
1471 #if __SOFTWARE__
1472     slice_batch_bo = gen6_mfc_avc_software_batchbuffer(ctx, encode_state, encoder_context);
1473 #else
1474     slice_batch_bo = gen6_mfc_avc_hardware_batchbuffer(ctx, encode_state, encoder_context);
1475 #endif
1476
1477     // begin programing
1478     intel_batchbuffer_start_atomic_bcs(batch, 0x4000); 
1479     intel_batchbuffer_emit_mi_flush(batch);
1480     
1481     // picture level programing
1482     gen6_mfc_avc_pipeline_picture_programing(ctx, encode_state, encoder_context);
1483
1484     BEGIN_BCS_BATCH(batch, 2);
1485     OUT_BCS_BATCH(batch, MI_BATCH_BUFFER_START | (1 << 8));
1486     OUT_BCS_RELOC(batch,
1487                   slice_batch_bo,
1488                   I915_GEM_DOMAIN_COMMAND, 0, 
1489                   0);
1490     ADVANCE_BCS_BATCH(batch);
1491
1492     // end programing
1493     intel_batchbuffer_end_atomic(batch);
1494
1495     dri_bo_unreference(slice_batch_bo);
1496 }
1497
1498 static VAStatus
1499 gen6_mfc_avc_encode_picture(VADriverContextP ctx, 
1500                             struct encode_state *encode_state,
1501                             struct intel_encoder_context *encoder_context)
1502 {
1503     struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
1504     unsigned int rate_control_mode = encoder_context->rate_control_mode;
1505     int current_frame_bits_size;
1506     int sts;
1507  
1508     for (;;) {
1509         gen6_mfc_init(ctx, encode_state, encoder_context);
1510         gen6_mfc_avc_prepare(ctx, encode_state, encoder_context);
1511         /*Programing bcs pipeline*/
1512         gen6_mfc_avc_pipeline_programing(ctx, encode_state, encoder_context);   //filling the pipeline
1513         gen6_mfc_run(ctx, encode_state, encoder_context);
1514         if (rate_control_mode == VA_RC_CBR /*|| rate_control_mode == VA_RC_VBR*/) {
1515             gen6_mfc_stop(ctx, encode_state, encoder_context, &current_frame_bits_size);
1516             sts = intel_mfc_brc_postpack(encode_state, mfc_context, current_frame_bits_size);
1517             if (sts == BRC_NO_HRD_VIOLATION) {
1518                 intel_mfc_hrd_context_update(encode_state, mfc_context);
1519                 break;
1520             }
1521             else if (sts == BRC_OVERFLOW_WITH_MIN_QP || sts == BRC_UNDERFLOW_WITH_MAX_QP) {
1522                 if (!mfc_context->hrd.violation_noted) {
1523                     fprintf(stderr, "Unrepairable %s!\n", (sts == BRC_OVERFLOW_WITH_MIN_QP)? "overflow": "underflow");
1524                     mfc_context->hrd.violation_noted = 1;
1525                 }
1526                 return VA_STATUS_SUCCESS;
1527             }
1528         } else {
1529             break;
1530         }
1531     }
1532
1533     return VA_STATUS_SUCCESS;
1534 }
1535
1536 VAStatus
1537 gen6_mfc_pipeline(VADriverContextP ctx,
1538                   VAProfile profile,
1539                   struct encode_state *encode_state,
1540                   struct intel_encoder_context *encoder_context)
1541 {
1542     VAStatus vaStatus;
1543
1544     switch (profile) {
1545     case VAProfileH264Baseline:
1546     case VAProfileH264Main:
1547     case VAProfileH264High:
1548         vaStatus = gen6_mfc_avc_encode_picture(ctx, encode_state, encoder_context);
1549         break;
1550
1551         /* FIXME: add for other profile */
1552     default:
1553         vaStatus = VA_STATUS_ERROR_UNSUPPORTED_PROFILE;
1554         break;
1555     }
1556
1557     return vaStatus;
1558 }
1559
1560 void
1561 gen6_mfc_context_destroy(void *context)
1562 {
1563     struct gen6_mfc_context *mfc_context = context;
1564     int i;
1565
1566     dri_bo_unreference(mfc_context->post_deblocking_output.bo);
1567     mfc_context->post_deblocking_output.bo = NULL;
1568
1569     dri_bo_unreference(mfc_context->pre_deblocking_output.bo);
1570     mfc_context->pre_deblocking_output.bo = NULL;
1571
1572     dri_bo_unreference(mfc_context->uncompressed_picture_source.bo);
1573     mfc_context->uncompressed_picture_source.bo = NULL;
1574
1575     dri_bo_unreference(mfc_context->mfc_indirect_pak_bse_object.bo); 
1576     mfc_context->mfc_indirect_pak_bse_object.bo = NULL;
1577
1578     for (i = 0; i < NUM_MFC_DMV_BUFFERS; i++){
1579         dri_bo_unreference(mfc_context->direct_mv_buffers[i].bo);
1580         mfc_context->direct_mv_buffers[i].bo = NULL;
1581     }
1582
1583     dri_bo_unreference(mfc_context->intra_row_store_scratch_buffer.bo);
1584     mfc_context->intra_row_store_scratch_buffer.bo = NULL;
1585
1586     dri_bo_unreference(mfc_context->macroblock_status_buffer.bo);
1587     mfc_context->macroblock_status_buffer.bo = NULL;
1588
1589     dri_bo_unreference(mfc_context->deblocking_filter_row_store_scratch_buffer.bo);
1590     mfc_context->deblocking_filter_row_store_scratch_buffer.bo = NULL;
1591
1592     dri_bo_unreference(mfc_context->bsd_mpc_row_store_scratch_buffer.bo);
1593     mfc_context->bsd_mpc_row_store_scratch_buffer.bo = NULL;
1594
1595
1596     for (i = 0; i < MAX_MFC_REFERENCE_SURFACES; i++){
1597         dri_bo_unreference(mfc_context->reference_surfaces[i].bo);
1598         mfc_context->reference_surfaces[i].bo = NULL;  
1599     }
1600
1601     i965_gpe_context_destroy(&mfc_context->gpe_context);
1602
1603     dri_bo_unreference(mfc_context->mfc_batchbuffer_surface.bo);
1604     mfc_context->mfc_batchbuffer_surface.bo = NULL;
1605
1606     dri_bo_unreference(mfc_context->aux_batchbuffer_surface.bo);
1607     mfc_context->aux_batchbuffer_surface.bo = NULL;
1608
1609     if (mfc_context->aux_batchbuffer)
1610         intel_batchbuffer_free(mfc_context->aux_batchbuffer);
1611
1612     mfc_context->aux_batchbuffer = NULL;
1613
1614     free(mfc_context);
1615 }
1616
1617 Bool gen6_mfc_context_init(VADriverContextP ctx, struct intel_encoder_context *encoder_context)
1618 {
1619     struct gen6_mfc_context *mfc_context = calloc(1, sizeof(struct gen6_mfc_context));
1620
1621     mfc_context->gpe_context.surface_state_binding_table.length = (SURFACE_STATE_PADDED_SIZE + sizeof(unsigned int)) * MAX_MEDIA_SURFACES_GEN6;
1622
1623     mfc_context->gpe_context.idrt.max_entries = MAX_GPE_KERNELS;
1624     mfc_context->gpe_context.idrt.entry_size = sizeof(struct gen6_interface_descriptor_data);
1625
1626     mfc_context->gpe_context.curbe.length = 32 * 4;
1627
1628     mfc_context->gpe_context.vfe_state.max_num_threads = 60 - 1;
1629     mfc_context->gpe_context.vfe_state.num_urb_entries = 16;
1630     mfc_context->gpe_context.vfe_state.gpgpu_mode = 0;
1631     mfc_context->gpe_context.vfe_state.urb_entry_size = 59 - 1;
1632     mfc_context->gpe_context.vfe_state.curbe_allocation_size = 37 - 1;
1633
1634     i965_gpe_load_kernels(ctx,
1635                           &mfc_context->gpe_context,
1636                           gen6_mfc_kernels,
1637                           NUM_MFC_KERNEL);
1638
1639     mfc_context->pipe_mode_select = gen6_mfc_pipe_mode_select;
1640     mfc_context->set_surface_state = gen6_mfc_surface_state;
1641     mfc_context->ind_obj_base_addr_state = gen6_mfc_ind_obj_base_addr_state;
1642     mfc_context->avc_img_state = gen6_mfc_avc_img_state;
1643     mfc_context->avc_qm_state = gen6_mfc_avc_qm_state;
1644     mfc_context->avc_fqm_state = gen6_mfc_avc_fqm_state;
1645     mfc_context->insert_object = gen6_mfc_avc_insert_object;
1646     mfc_context->buffer_suface_setup = i965_gpe_buffer_suface_setup;
1647
1648     encoder_context->mfc_context = mfc_context;
1649     encoder_context->mfc_context_destroy = gen6_mfc_context_destroy;
1650     encoder_context->mfc_pipeline = gen6_mfc_pipeline;
1651     encoder_context->mfc_brc_prepare = intel_mfc_brc_prepare;
1652
1653     return True;
1654 }