Use the common API to write avc SPS/PPS/SEI info on SNB/IVY/HSW
[platform/upstream/libva-intel-driver.git] / src / gen6_mfc.c
1 /*
2  * Copyright © 2010-2011 Intel Corporation
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the
6  * "Software"), to deal in the Software without restriction, including
7  * without limitation the rights to use, copy, modify, merge, publish,
8  * distribute, sub license, and/or sell copies of the Software, and to
9  * permit persons to whom the Software is furnished to do so, subject to
10  * the following conditions:
11  *
12  * The above copyright notice and this permission notice (including the
13  * next paragraph) shall be included in all copies or substantial portions
14  * of the Software.
15  *
16  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
17  * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
19  * IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR
20  * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21  * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22  * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
23  *
24  * Authors:
25  *    Zhou Chang <chang.zhou@intel.com>
26  *
27  */
28
29 #include <stdio.h>
30 #include <stdlib.h>
31 #include <string.h>
32 #include <assert.h>
33 #include <math.h>
34
35 #include "intel_batchbuffer.h"
36 #include "i965_defines.h"
37 #include "i965_structs.h"
38 #include "i965_drv_video.h"
39 #include "i965_encoder.h"
40 #include "i965_encoder_utils.h"
41 #include "gen6_mfc.h"
42 #include "gen6_vme.h"
43
44
45 static const uint32_t gen6_mfc_batchbuffer_avc_intra[][4] = {
46 #include "shaders/utils/mfc_batchbuffer_avc_intra.g6b"
47 };
48
49 static const uint32_t gen6_mfc_batchbuffer_avc_inter[][4] = {
50 #include "shaders/utils/mfc_batchbuffer_avc_inter.g6b"
51 };
52
53 static struct i965_kernel gen6_mfc_kernels[] = {
54     {
55         "MFC AVC INTRA BATCHBUFFER ",
56         MFC_BATCHBUFFER_AVC_INTRA,
57         gen6_mfc_batchbuffer_avc_intra,
58         sizeof(gen6_mfc_batchbuffer_avc_intra),
59         NULL
60     },
61
62     {
63         "MFC AVC INTER BATCHBUFFER ",
64         MFC_BATCHBUFFER_AVC_INTER,
65         gen6_mfc_batchbuffer_avc_inter,
66         sizeof(gen6_mfc_batchbuffer_avc_inter),
67         NULL
68     },
69 };
70
71 static void
72 gen6_mfc_pipe_mode_select(VADriverContextP ctx,
73                           int standard_select,
74                           struct intel_encoder_context *encoder_context)
75 {
76     struct intel_batchbuffer *batch = encoder_context->base.batch;
77     struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
78
79     assert(standard_select == MFX_FORMAT_AVC);
80
81     BEGIN_BCS_BATCH(batch, 4);
82
83     OUT_BCS_BATCH(batch, MFX_PIPE_MODE_SELECT | (4 - 2));
84     OUT_BCS_BATCH(batch,
85                   (1 << 10) | /* disable Stream-Out , advanced QP/bitrate control need enable it*/
86                   ((!!mfc_context->post_deblocking_output.bo) << 9)  | /* Post Deblocking Output */
87                   ((!!mfc_context->pre_deblocking_output.bo) << 8)  | /* Pre Deblocking Output */
88                   (0 << 7)  | /* disable TLB prefectch */
89                   (0 << 5)  | /* not in stitch mode */
90                   (1 << 4)  | /* encoding mode */
91                   (2 << 0));  /* Standard Select: AVC */
92     OUT_BCS_BATCH(batch,
93                   (0 << 20) | /* round flag in PB slice */
94                   (0 << 19) | /* round flag in Intra8x8 */
95                   (0 << 7)  | /* expand NOA bus flag */
96                   (1 << 6)  | /* must be 1 */
97                   (0 << 5)  | /* disable clock gating for NOA */
98                   (0 << 4)  | /* terminate if AVC motion and POC table error occurs */
99                   (0 << 3)  | /* terminate if AVC mbdata error occurs */
100                   (0 << 2)  | /* terminate if AVC CABAC/CAVLC decode error occurs */
101                   (0 << 1)  | /* AVC long field motion vector */
102                   (0 << 0));  /* always calculate AVC ILDB boundary strength */
103     OUT_BCS_BATCH(batch, 0);
104
105     ADVANCE_BCS_BATCH(batch);
106 }
107
108 static void
109 gen6_mfc_surface_state(VADriverContextP ctx, struct intel_encoder_context *encoder_context)
110 {
111     struct intel_batchbuffer *batch = encoder_context->base.batch;
112     struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
113
114     BEGIN_BCS_BATCH(batch, 6);
115
116     OUT_BCS_BATCH(batch, MFX_SURFACE_STATE | (6 - 2));
117     OUT_BCS_BATCH(batch, 0);
118     OUT_BCS_BATCH(batch,
119                   ((mfc_context->surface_state.height - 1) << 19) |
120                   ((mfc_context->surface_state.width - 1) << 6));
121     OUT_BCS_BATCH(batch,
122                   (MFX_SURFACE_PLANAR_420_8 << 28) | /* 420 planar YUV surface */
123                   (1 << 27) | /* must be 1 for interleave U/V, hardware requirement */
124                   (0 << 22) | /* surface object control state, FIXME??? */
125                   ((mfc_context->surface_state.w_pitch - 1) << 3) | /* pitch */
126                   (0 << 2)  | /* must be 0 for interleave U/V */
127                   (1 << 1)  | /* must be y-tiled */
128                   (I965_TILEWALK_YMAJOR << 0));                         /* tile walk, TILEWALK_YMAJOR */
129     OUT_BCS_BATCH(batch,
130                   (0 << 16) |                                                           /* must be 0 for interleave U/V */
131                   (mfc_context->surface_state.h_pitch));                /* y offset for U(cb) */
132     OUT_BCS_BATCH(batch, 0);
133     ADVANCE_BCS_BATCH(batch);
134 }
135
136 static void
137 gen6_mfc_pipe_buf_addr_state(VADriverContextP ctx, struct intel_encoder_context *encoder_context)
138 {
139     struct intel_batchbuffer *batch = encoder_context->base.batch;
140     struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
141     int i;
142
143     BEGIN_BCS_BATCH(batch, 24);
144
145     OUT_BCS_BATCH(batch, MFX_PIPE_BUF_ADDR_STATE | (24 - 2));
146
147     if (mfc_context->pre_deblocking_output.bo)
148         OUT_BCS_RELOC(batch, mfc_context->pre_deblocking_output.bo,
149                       I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
150                       0);
151     else
152         OUT_BCS_BATCH(batch, 0);                                                                                        /* pre output addr   */
153
154     if (mfc_context->post_deblocking_output.bo)
155         OUT_BCS_RELOC(batch, mfc_context->post_deblocking_output.bo,
156                       I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
157                       0);                                                                                       /* post output addr  */ 
158     else
159         OUT_BCS_BATCH(batch, 0);
160
161     OUT_BCS_RELOC(batch, mfc_context->uncompressed_picture_source.bo,
162                   I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
163                   0);                                                                                   /* uncompressed data */
164     OUT_BCS_RELOC(batch, mfc_context->macroblock_status_buffer.bo,
165                   I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
166                   0);                                                                                   /* StreamOut data*/
167     OUT_BCS_RELOC(batch, mfc_context->intra_row_store_scratch_buffer.bo,
168                   I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
169                   0);   
170     OUT_BCS_RELOC(batch, mfc_context->deblocking_filter_row_store_scratch_buffer.bo,
171                   I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
172                   0);
173     /* 7..22 Reference pictures*/
174     for (i = 0; i < ARRAY_ELEMS(mfc_context->reference_surfaces); i++) {
175         if ( mfc_context->reference_surfaces[i].bo != NULL) {
176             OUT_BCS_RELOC(batch, mfc_context->reference_surfaces[i].bo,
177                           I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
178                           0);                   
179         } else {
180             OUT_BCS_BATCH(batch, 0);
181         }
182     }
183     OUT_BCS_RELOC(batch, mfc_context->macroblock_status_buffer.bo,
184                   I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
185                   0);                                                                                   /* Macroblock status buffer*/
186
187     ADVANCE_BCS_BATCH(batch);
188 }
189
190 static void
191 gen6_mfc_ind_obj_base_addr_state(VADriverContextP ctx, struct intel_encoder_context *encoder_context)
192 {
193     struct intel_batchbuffer *batch = encoder_context->base.batch;
194     struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
195     struct gen6_vme_context *vme_context = encoder_context->vme_context;
196
197     BEGIN_BCS_BATCH(batch, 11);
198
199     OUT_BCS_BATCH(batch, MFX_IND_OBJ_BASE_ADDR_STATE | (11 - 2));
200     OUT_BCS_BATCH(batch, 0);
201     OUT_BCS_BATCH(batch, 0);
202     /* MFX Indirect MV Object Base Address */
203     OUT_BCS_RELOC(batch, vme_context->vme_output.bo, I915_GEM_DOMAIN_INSTRUCTION, 0, 0);
204     OUT_BCS_BATCH(batch, 0);    
205     OUT_BCS_BATCH(batch, 0);
206     OUT_BCS_BATCH(batch, 0);
207     OUT_BCS_BATCH(batch, 0);
208     OUT_BCS_BATCH(batch, 0);
209     /*MFC Indirect PAK-BSE Object Base Address for Encoder*/    
210     OUT_BCS_RELOC(batch,
211                   mfc_context->mfc_indirect_pak_bse_object.bo,
212                   I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
213                   0);
214     OUT_BCS_RELOC(batch,
215                   mfc_context->mfc_indirect_pak_bse_object.bo,
216                   I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
217                   mfc_context->mfc_indirect_pak_bse_object.end_offset);
218
219     ADVANCE_BCS_BATCH(batch);
220 }
221
222 static void
223 gen6_mfc_bsp_buf_base_addr_state(VADriverContextP ctx, struct intel_encoder_context *encoder_context)
224 {
225     struct intel_batchbuffer *batch = encoder_context->base.batch;
226     struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
227
228     BEGIN_BCS_BATCH(batch, 4);
229
230     OUT_BCS_BATCH(batch, MFX_BSP_BUF_BASE_ADDR_STATE | (4 - 2));
231     OUT_BCS_RELOC(batch, mfc_context->bsd_mpc_row_store_scratch_buffer.bo,
232                   I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
233                   0);
234     OUT_BCS_BATCH(batch, 0);
235     OUT_BCS_BATCH(batch, 0);
236
237     ADVANCE_BCS_BATCH(batch);
238 }
239
240 static void
241 gen6_mfc_avc_img_state(VADriverContextP ctx,struct encode_state *encode_state,
242                        struct intel_encoder_context *encoder_context)
243 {
244     struct intel_batchbuffer *batch = encoder_context->base.batch;
245     struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
246     VAEncSequenceParameterBufferH264 *pSequenceParameter = (VAEncSequenceParameterBufferH264 *)encode_state->seq_param_ext->buffer;
247     VAEncPictureParameterBufferH264 *pPicParameter = (VAEncPictureParameterBufferH264 *)encode_state->pic_param_ext->buffer;
248     int width_in_mbs = (mfc_context->surface_state.width + 15) / 16;
249     int height_in_mbs = (mfc_context->surface_state.height + 15) / 16;
250
251     BEGIN_BCS_BATCH(batch, 13);
252     OUT_BCS_BATCH(batch, MFX_AVC_IMG_STATE | (13 - 2));
253     OUT_BCS_BATCH(batch, 
254                   ((width_in_mbs * height_in_mbs) & 0xFFFF));
255     OUT_BCS_BATCH(batch, 
256                   (height_in_mbs << 16) | 
257                   (width_in_mbs << 0));
258     OUT_BCS_BATCH(batch, 
259                   (0 << 24) |     /*Second Chroma QP Offset*/
260                   (0 << 16) |     /*Chroma QP Offset*/
261                   (0 << 14) |   /*Max-bit conformance Intra flag*/
262                   (0 << 13) |   /*Max Macroblock size conformance Inter flag*/
263                   (1 << 12) |   /*Should always be written as "1" */
264                   (0 << 10) |   /*QM Preset FLag */
265                   (0 << 8)  |   /*Image Structure*/
266                   (0 << 0) );   /*Current Decoed Image Frame Store ID, reserved in Encode mode*/
267     OUT_BCS_BATCH(batch,
268                   (400 << 16) |   /*Mininum Frame size*/        
269                   (0 << 15) |   /*Disable reading of Macroblock Status Buffer*/
270                   (0 << 14) |   /*Load BitStream Pointer only once, 1 slic 1 frame*/
271                   (0 << 13) |   /*CABAC 0 word insertion test enable*/
272                   (1 << 12) |   /*MVUnpackedEnable,compliant to DXVA*/
273                   (1 << 10) |   /*Chroma Format IDC, 4:2:0*/
274                   (pPicParameter->pic_fields.bits.entropy_coding_mode_flag << 7)  |   /*0:CAVLC encoding mode,1:CABAC*/
275                   (0 << 6)  |   /*Only valid for VLD decoding mode*/
276                   (0 << 5)  |   /*Constrained Intra Predition Flag, from PPS*/
277                   (pSequenceParameter->seq_fields.bits.direct_8x8_inference_flag << 4)  |   /*Direct 8x8 inference flag*/
278                   (pPicParameter->pic_fields.bits.transform_8x8_mode_flag << 3)  |   /*8x8 or 4x4 IDCT Transform Mode Flag*/
279                   (1 << 2)  |   /*Frame MB only flag*/
280                   (0 << 1)  |   /*MBAFF mode is in active*/
281                   (0 << 0) );   /*Field picture flag*/
282     OUT_BCS_BATCH(batch, 
283                   (1<<16)   |   /*Frame Size Rate Control Flag*/  
284                   (1<<12)   |   
285                   (1<<9)    |   /*MB level Rate Control Enabling Flag*/
286                   (1 << 3)  |   /*FrameBitRateMinReportMask*/
287                   (1 << 2)  |   /*FrameBitRateMaxReportMask*/
288                   (1 << 1)  |   /*InterMBMaxSizeReportMask*/
289                   (1 << 0) );   /*IntraMBMaxSizeReportMask*/
290     OUT_BCS_BATCH(batch,                        /*Inter and Intra Conformance Max size limit*/
291                   (0x0600 << 16) |              /*InterMbMaxSz 192 Byte*/
292                   (0x0800) );                   /*IntraMbMaxSz 256 Byte*/
293     OUT_BCS_BATCH(batch, 0x00000000);   /*Reserved : MBZReserved*/
294     OUT_BCS_BATCH(batch, 0x01020304);   /*Slice QP Delta for bitrate control*/                  
295     OUT_BCS_BATCH(batch, 0xFEFDFCFB);           
296     OUT_BCS_BATCH(batch, 0x80601004);   /*MAX = 128KB, MIN = 64KB*/
297     OUT_BCS_BATCH(batch, 0x00800001);   
298     OUT_BCS_BATCH(batch, 0);
299
300     ADVANCE_BCS_BATCH(batch);
301 }
302
303 static void
304 gen6_mfc_avc_directmode_state(VADriverContextP ctx, struct intel_encoder_context *encoder_context)
305 {
306     struct intel_batchbuffer *batch = encoder_context->base.batch;
307     struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
308
309     int i;
310
311     BEGIN_BCS_BATCH(batch, 69);
312
313     OUT_BCS_BATCH(batch, MFX_AVC_DIRECTMODE_STATE | (69 - 2));
314
315     /* Reference frames and Current frames */
316     for(i = 0; i < NUM_MFC_DMV_BUFFERS; i++) {
317         if ( mfc_context->direct_mv_buffers[i].bo != NULL) { 
318             OUT_BCS_RELOC(batch, mfc_context->direct_mv_buffers[i].bo,
319                           I915_GEM_DOMAIN_INSTRUCTION, 0,
320                           0);
321         } else {
322             OUT_BCS_BATCH(batch, 0);
323         }
324     }
325
326     /* POL list */
327     for(i = 0; i < 32; i++) {
328         OUT_BCS_BATCH(batch, i/2);
329     }
330     OUT_BCS_BATCH(batch, 0);
331     OUT_BCS_BATCH(batch, 0);
332
333     ADVANCE_BCS_BATCH(batch);
334 }
335
336 static void
337 gen6_mfc_avc_slice_state(VADriverContextP ctx,
338                          VAEncPictureParameterBufferH264 *pic_param,
339                          VAEncSliceParameterBufferH264 *slice_param,
340                          struct encode_state *encode_state,
341                          struct intel_encoder_context *encoder_context,
342                          int rate_control_enable,
343                          int qp,
344                          struct intel_batchbuffer *batch)
345 {
346     struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
347     int width_in_mbs = (mfc_context->surface_state.width + 15) / 16;
348     int height_in_mbs = (mfc_context->surface_state.height + 15) / 16;
349     int beginmb = slice_param->macroblock_address;
350     int endmb = beginmb + slice_param->num_macroblocks;
351     int beginx = beginmb % width_in_mbs;
352     int beginy = beginmb / width_in_mbs;
353     int nextx =  endmb % width_in_mbs;
354     int nexty = endmb / width_in_mbs;
355     int slice_type = slice_param->slice_type;
356     int last_slice = (endmb == (width_in_mbs * height_in_mbs));
357     int bit_rate_control_target, maxQpN, maxQpP;
358     unsigned char correct[6], grow, shrink;
359     int i;
360     int weighted_pred_idc = 0;
361     unsigned int luma_log2_weight_denom = slice_param->luma_log2_weight_denom;
362     unsigned int chroma_log2_weight_denom = slice_param->chroma_log2_weight_denom;
363
364     if (batch == NULL)
365         batch = encoder_context->base.batch;
366
367     bit_rate_control_target = slice_type;
368     if (slice_type == SLICE_TYPE_SP)
369         bit_rate_control_target = SLICE_TYPE_P;
370     else if (slice_type == SLICE_TYPE_SI)
371         bit_rate_control_target = SLICE_TYPE_I;
372
373     if (slice_type == SLICE_TYPE_P) {
374         weighted_pred_idc = pic_param->pic_fields.bits.weighted_pred_flag;
375     } else if (slice_type == SLICE_TYPE_B) {
376         weighted_pred_idc = pic_param->pic_fields.bits.weighted_bipred_idc;
377
378         if (weighted_pred_idc == 2) {
379             /* 8.4.3 - Derivation process for prediction weights (8-279) */
380             luma_log2_weight_denom = 5;
381             chroma_log2_weight_denom = 5;
382         }
383     }
384
385     maxQpN = mfc_context->bit_rate_control_context[bit_rate_control_target].MaxQpNegModifier;
386     maxQpP = mfc_context->bit_rate_control_context[bit_rate_control_target].MaxQpPosModifier;
387
388     for (i = 0; i < 6; i++)
389         correct[i] = mfc_context->bit_rate_control_context[bit_rate_control_target].Correct[i];
390
391     grow = mfc_context->bit_rate_control_context[bit_rate_control_target].GrowInit + 
392         (mfc_context->bit_rate_control_context[bit_rate_control_target].GrowResistance << 4);
393     shrink = mfc_context->bit_rate_control_context[bit_rate_control_target].ShrinkInit + 
394         (mfc_context->bit_rate_control_context[bit_rate_control_target].ShrinkResistance << 4);
395
396     BEGIN_BCS_BATCH(batch, 11);;
397
398     OUT_BCS_BATCH(batch, MFX_AVC_SLICE_STATE | (11 - 2) );
399     OUT_BCS_BATCH(batch, slice_type);                   /*Slice Type: I:P:B Slice*/
400
401     if (slice_type == SLICE_TYPE_I) {
402         OUT_BCS_BATCH(batch, 0);                        /*no reference frames and pred_weight_table*/
403     } else {
404         OUT_BCS_BATCH(batch,
405                       (1 << 16) |                       /*1 reference frame*/
406                       (chroma_log2_weight_denom << 8) |
407                       (luma_log2_weight_denom << 0));
408     }
409
410     OUT_BCS_BATCH(batch, 
411                   (weighted_pred_idc << 30) |
412                   (slice_param->direct_spatial_mv_pred_flag<<29) |             /*Direct Prediction Type*/
413                   (slice_param->disable_deblocking_filter_idc << 27) |
414                   (slice_param->cabac_init_idc << 24) |
415                   (qp<<16) |                    /*Slice Quantization Parameter*/
416                   ((slice_param->slice_beta_offset_div2 & 0xf) << 8) |
417                   ((slice_param->slice_alpha_c0_offset_div2 & 0xf) << 0));
418     OUT_BCS_BATCH(batch,
419                   (beginy << 24) |                      /*First MB X&Y , the begin postion of current slice*/
420                   (beginx << 16) |
421                   slice_param->macroblock_address );
422     OUT_BCS_BATCH(batch, (nexty << 16) | nextx);                       /*Next slice first MB X&Y*/
423     OUT_BCS_BATCH(batch, 
424                   (0/*rate_control_enable*/ << 31) |            /*in CBR mode RateControlCounterEnable = enable*/
425                   (1 << 30) |           /*ResetRateControlCounter*/
426                   (0 << 28) |           /*RC Triggle Mode = Always Rate Control*/
427                   (4 << 24) |     /*RC Stable Tolerance, middle level*/
428                   (0/*rate_control_enable*/ << 23) |     /*RC Panic Enable*/                 
429                   (0 << 22) |     /*QP mode, don't modfiy CBP*/
430                   (0 << 21) |     /*MB Type Direct Conversion Enabled*/ 
431                   (0 << 20) |     /*MB Type Skip Conversion Enabled*/ 
432                   (last_slice << 19) |     /*IsLastSlice*/
433                   (0 << 18) |   /*BitstreamOutputFlag Compressed BitStream Output Disable Flag 0:enable 1:disable*/
434                   (1 << 17) |       /*HeaderPresentFlag*/       
435                   (1 << 16) |       /*SliceData PresentFlag*/
436                   (1 << 15) |       /*TailPresentFlag*/
437                   (1 << 13) |       /*RBSP NAL TYPE*/   
438                   (0 << 12) );    /*CabacZeroWordInsertionEnable*/
439     OUT_BCS_BATCH(batch, mfc_context->mfc_indirect_pak_bse_object.offset);
440     OUT_BCS_BATCH(batch,
441                   (maxQpN << 24) |     /*Target QP - 24 is lowest QP*/ 
442                   (maxQpP << 16) |     /*Target QP + 20 is highest QP*/
443                   (shrink << 8)  |
444                   (grow << 0));   
445     OUT_BCS_BATCH(batch,
446                   (correct[5] << 20) |
447                   (correct[4] << 16) |
448                   (correct[3] << 12) |
449                   (correct[2] << 8) |
450                   (correct[1] << 4) |
451                   (correct[0] << 0));
452     OUT_BCS_BATCH(batch, 0);
453
454     ADVANCE_BCS_BATCH(batch);
455 }
456
457 static void gen6_mfc_avc_qm_state(VADriverContextP ctx, struct intel_encoder_context *encoder_context)
458 {
459     struct intel_batchbuffer *batch = encoder_context->base.batch;
460     int i;
461
462     BEGIN_BCS_BATCH(batch, 58);
463
464     OUT_BCS_BATCH(batch, MFX_AVC_QM_STATE | 56);
465     OUT_BCS_BATCH(batch, 0xFF ) ; 
466     for( i = 0; i < 56; i++) {
467         OUT_BCS_BATCH(batch, 0x10101010); 
468     }   
469
470     ADVANCE_BCS_BATCH(batch);
471 }
472
473 static void gen6_mfc_avc_fqm_state(VADriverContextP ctx, struct intel_encoder_context *encoder_context)
474 {
475     struct intel_batchbuffer *batch = encoder_context->base.batch;
476     int i;
477
478     BEGIN_BCS_BATCH(batch, 113);
479     OUT_BCS_BATCH(batch, MFC_AVC_FQM_STATE | (113 - 2));
480
481     for(i = 0; i < 112;i++) {
482         OUT_BCS_BATCH(batch, 0x10001000);
483     }   
484
485     ADVANCE_BCS_BATCH(batch);   
486 }
487
488 static void
489 gen6_mfc_avc_ref_idx_state(VADriverContextP ctx, struct intel_encoder_context *encoder_context)
490 {
491     struct intel_batchbuffer *batch = encoder_context->base.batch;
492     int i;
493
494     BEGIN_BCS_BATCH(batch, 10);
495     OUT_BCS_BATCH(batch, MFX_AVC_REF_IDX_STATE | 8); 
496     OUT_BCS_BATCH(batch, 0);                  //Select L0
497     OUT_BCS_BATCH(batch, 0x80808020);         //Only 1 reference
498     for(i = 0; i < 7; i++) {
499         OUT_BCS_BATCH(batch, 0x80808080);
500     }   
501     ADVANCE_BCS_BATCH(batch);
502
503     BEGIN_BCS_BATCH(batch, 10);
504     OUT_BCS_BATCH(batch, MFX_AVC_REF_IDX_STATE | 8); 
505     OUT_BCS_BATCH(batch, 1);                  //Select L1
506     OUT_BCS_BATCH(batch, 0x80808022);         //Only 1 reference
507     for(i = 0; i < 7; i++) {
508         OUT_BCS_BATCH(batch, 0x80808080);
509     }   
510     ADVANCE_BCS_BATCH(batch);
511 }
512         
513 static void
514 gen6_mfc_avc_insert_object(VADriverContextP ctx, struct intel_encoder_context *encoder_context,
515                            unsigned int *insert_data, int lenght_in_dws, int data_bits_in_last_dw,
516                            int skip_emul_byte_count, int is_last_header, int is_end_of_slice, int emulation_flag,
517                            struct intel_batchbuffer *batch)
518 {
519     if (batch == NULL)
520         batch = encoder_context->base.batch;
521
522     BEGIN_BCS_BATCH(batch, lenght_in_dws + 2);
523
524     OUT_BCS_BATCH(batch, MFC_AVC_INSERT_OBJECT | (lenght_in_dws + 2 - 2));
525
526     OUT_BCS_BATCH(batch,
527                   (0 << 16) |   /* always start at offset 0 */
528                   (data_bits_in_last_dw << 8) |
529                   (skip_emul_byte_count << 4) |
530                   (!!emulation_flag << 3) |
531                   ((!!is_last_header) << 2) |
532                   ((!!is_end_of_slice) << 1) |
533                   (0 << 0));    /* FIXME: ??? */
534
535     intel_batchbuffer_data(batch, insert_data, lenght_in_dws * 4);
536     ADVANCE_BCS_BATCH(batch);
537 }
538
539 static void gen6_mfc_init(VADriverContextP ctx, struct intel_encoder_context *encoder_context)
540 {
541     struct i965_driver_data *i965 = i965_driver_data(ctx);
542     struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
543     dri_bo *bo;
544     int i;
545
546     /*Encode common setup for MFC*/
547     dri_bo_unreference(mfc_context->post_deblocking_output.bo);
548     mfc_context->post_deblocking_output.bo = NULL;
549
550     dri_bo_unreference(mfc_context->pre_deblocking_output.bo);
551     mfc_context->pre_deblocking_output.bo = NULL;
552
553     dri_bo_unreference(mfc_context->uncompressed_picture_source.bo);
554     mfc_context->uncompressed_picture_source.bo = NULL;
555
556     dri_bo_unreference(mfc_context->mfc_indirect_pak_bse_object.bo); 
557     mfc_context->mfc_indirect_pak_bse_object.bo = NULL;
558
559     for (i = 0; i < NUM_MFC_DMV_BUFFERS; i++){
560         if ( mfc_context->direct_mv_buffers[i].bo != NULL);
561         dri_bo_unreference(mfc_context->direct_mv_buffers[i].bo);
562         mfc_context->direct_mv_buffers[i].bo = NULL;
563     }
564
565     for (i = 0; i < MAX_MFC_REFERENCE_SURFACES; i++){
566         if (mfc_context->reference_surfaces[i].bo != NULL)
567             dri_bo_unreference(mfc_context->reference_surfaces[i].bo);
568         mfc_context->reference_surfaces[i].bo = NULL;  
569     }
570
571     dri_bo_unreference(mfc_context->intra_row_store_scratch_buffer.bo);
572     bo = dri_bo_alloc(i965->intel.bufmgr,
573                       "Buffer",
574                       128 * 64,
575                       64);
576     assert(bo);
577     mfc_context->intra_row_store_scratch_buffer.bo = bo;
578
579     dri_bo_unreference(mfc_context->macroblock_status_buffer.bo);
580     bo = dri_bo_alloc(i965->intel.bufmgr,
581                       "Buffer",
582                       128*128*16,
583                       64);
584     assert(bo);
585     mfc_context->macroblock_status_buffer.bo = bo;
586
587     dri_bo_unreference(mfc_context->deblocking_filter_row_store_scratch_buffer.bo);
588     bo = dri_bo_alloc(i965->intel.bufmgr,
589                       "Buffer",
590                       49152,  /* 6 * 128 * 64 */
591                       64);
592     assert(bo);
593     mfc_context->deblocking_filter_row_store_scratch_buffer.bo = bo;
594
595     dri_bo_unreference(mfc_context->bsd_mpc_row_store_scratch_buffer.bo);
596     bo = dri_bo_alloc(i965->intel.bufmgr,
597                       "Buffer",
598                       12288, /* 1.5 * 128 * 64 */
599                       0x1000);
600     assert(bo);
601     mfc_context->bsd_mpc_row_store_scratch_buffer.bo = bo;
602
603     dri_bo_unreference(mfc_context->mfc_batchbuffer_surface.bo);
604     mfc_context->mfc_batchbuffer_surface.bo = NULL;
605
606     dri_bo_unreference(mfc_context->aux_batchbuffer_surface.bo);
607     mfc_context->aux_batchbuffer_surface.bo = NULL;
608
609     if (mfc_context->aux_batchbuffer)
610         intel_batchbuffer_free(mfc_context->aux_batchbuffer);
611
612     mfc_context->aux_batchbuffer = intel_batchbuffer_new(&i965->intel, I915_EXEC_BSD);
613     mfc_context->aux_batchbuffer_surface.bo = mfc_context->aux_batchbuffer->buffer;
614     dri_bo_reference(mfc_context->aux_batchbuffer_surface.bo);
615     mfc_context->aux_batchbuffer_surface.pitch = 16;
616     mfc_context->aux_batchbuffer_surface.num_blocks = mfc_context->aux_batchbuffer->size / 16;
617     mfc_context->aux_batchbuffer_surface.size_block = 16;
618
619     i965_gpe_context_init(ctx, &mfc_context->gpe_context);
620 }
621
622 static void gen6_mfc_avc_pipeline_picture_programing( VADriverContextP ctx,
623                                       struct encode_state *encode_state,
624                                       struct intel_encoder_context *encoder_context)
625 {
626     struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
627
628     mfc_context->pipe_mode_select(ctx, MFX_FORMAT_AVC, encoder_context);
629     mfc_context->set_surface_state(ctx, encoder_context);
630     mfc_context->ind_obj_base_addr_state(ctx, encoder_context);
631     gen6_mfc_pipe_buf_addr_state(ctx, encoder_context);
632     gen6_mfc_bsp_buf_base_addr_state(ctx, encoder_context);
633     mfc_context->avc_img_state(ctx, encode_state, encoder_context);
634     mfc_context->avc_qm_state(ctx, encoder_context);
635     mfc_context->avc_fqm_state(ctx, encoder_context);
636     gen6_mfc_avc_directmode_state(ctx, encoder_context); 
637     gen6_mfc_avc_ref_idx_state(ctx, encoder_context);
638 }
639
640 static void 
641 gen6_mfc_free_avc_surface(void **data)
642 {
643     struct gen6_mfc_avc_surface_aux *avc_surface = *data;
644
645     if (!avc_surface)
646         return;
647
648     dri_bo_unreference(avc_surface->dmv_top);
649     avc_surface->dmv_top = NULL;
650     dri_bo_unreference(avc_surface->dmv_bottom);
651     avc_surface->dmv_bottom = NULL;
652
653     free(avc_surface);
654     *data = NULL;
655 }
656
657
658
659
660 static VAStatus gen6_mfc_avc_prepare(VADriverContextP ctx, 
661                                      struct encode_state *encode_state,
662                                      struct intel_encoder_context *encoder_context)
663 {
664     struct i965_driver_data *i965 = i965_driver_data(ctx);
665     struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
666     struct object_surface *obj_surface; 
667     struct object_buffer *obj_buffer;
668     struct gen6_mfc_avc_surface_aux* gen6_avc_surface;
669     dri_bo *bo;
670     VAEncPictureParameterBufferH264 *pPicParameter = (VAEncPictureParameterBufferH264 *)encode_state->pic_param_ext->buffer;
671     VAStatus vaStatus = VA_STATUS_SUCCESS;
672     int i, j, enable_avc_ildb = 0;
673     VAEncSliceParameterBufferH264 *slice_param;
674     VACodedBufferSegment *coded_buffer_segment;
675     unsigned char *flag = NULL;
676
677     for (j = 0; j < encode_state->num_slice_params_ext && enable_avc_ildb == 0; j++) {
678         assert(encode_state->slice_params_ext && encode_state->slice_params_ext[j]->buffer);
679         slice_param = (VAEncSliceParameterBufferH264 *)encode_state->slice_params_ext[j]->buffer;
680
681         for (i = 0; i < encode_state->slice_params_ext[j]->num_elements; i++) {
682             assert((slice_param->slice_type == SLICE_TYPE_I) ||
683                    (slice_param->slice_type == SLICE_TYPE_SI) ||
684                    (slice_param->slice_type == SLICE_TYPE_P) ||
685                    (slice_param->slice_type == SLICE_TYPE_SP) ||
686                    (slice_param->slice_type == SLICE_TYPE_B));
687
688             if (slice_param->disable_deblocking_filter_idc != 1) {
689                 enable_avc_ildb = 1;
690                 break;
691             }
692
693             slice_param++;
694         }
695     }
696
697     /*Setup all the input&output object*/
698
699     /* Setup current frame and current direct mv buffer*/
700     obj_surface = SURFACE(pPicParameter->CurrPic.picture_id);
701     assert(obj_surface);
702     i965_check_alloc_surface_bo(ctx, obj_surface, 1, VA_FOURCC('N','V','1','2'), SUBSAMPLE_YUV420);
703
704     if ( obj_surface->private_data == NULL) {
705         gen6_avc_surface = calloc(sizeof(struct gen6_mfc_avc_surface_aux), 1);
706         gen6_avc_surface->dmv_top = 
707             dri_bo_alloc(i965->intel.bufmgr,
708                          "Buffer",
709                          68*8192, 
710                          64);
711         gen6_avc_surface->dmv_bottom = 
712             dri_bo_alloc(i965->intel.bufmgr,
713                          "Buffer",
714                          68*8192, 
715                          64);
716         assert(gen6_avc_surface->dmv_top);
717         assert(gen6_avc_surface->dmv_bottom);
718         obj_surface->private_data = (void *)gen6_avc_surface;
719         obj_surface->free_private_data = (void *)gen6_mfc_free_avc_surface; 
720     }
721     gen6_avc_surface = (struct gen6_mfc_avc_surface_aux*) obj_surface->private_data;
722     mfc_context->direct_mv_buffers[NUM_MFC_DMV_BUFFERS - 2].bo = gen6_avc_surface->dmv_top;
723     mfc_context->direct_mv_buffers[NUM_MFC_DMV_BUFFERS - 1].bo = gen6_avc_surface->dmv_bottom;
724     dri_bo_reference(gen6_avc_surface->dmv_top);
725     dri_bo_reference(gen6_avc_surface->dmv_bottom);
726
727     if (enable_avc_ildb) {
728         mfc_context->post_deblocking_output.bo = obj_surface->bo;
729         dri_bo_reference(mfc_context->post_deblocking_output.bo);
730     } else {
731         mfc_context->pre_deblocking_output.bo = obj_surface->bo;
732         dri_bo_reference(mfc_context->pre_deblocking_output.bo);
733     }
734
735     mfc_context->surface_state.width = obj_surface->orig_width;
736     mfc_context->surface_state.height = obj_surface->orig_height;
737     mfc_context->surface_state.w_pitch = obj_surface->width;
738     mfc_context->surface_state.h_pitch = obj_surface->height;
739     
740     /* Setup reference frames and direct mv buffers*/
741     for(i = 0; i < MAX_MFC_REFERENCE_SURFACES; i++) {
742         if ( pPicParameter->ReferenceFrames[i].picture_id != VA_INVALID_ID ) { 
743             obj_surface = SURFACE(pPicParameter->ReferenceFrames[i].picture_id);
744             assert(obj_surface);
745             if (obj_surface->bo != NULL) {
746                 mfc_context->reference_surfaces[i].bo = obj_surface->bo;
747                 dri_bo_reference(obj_surface->bo);
748             }
749             /* Check DMV buffer */
750             if ( obj_surface->private_data == NULL) {
751                 
752                 gen6_avc_surface = calloc(sizeof(struct gen6_mfc_avc_surface_aux), 1);
753                 gen6_avc_surface->dmv_top = 
754                     dri_bo_alloc(i965->intel.bufmgr,
755                                  "Buffer",
756                                  68*8192, 
757                                  64);
758                 gen6_avc_surface->dmv_bottom = 
759                     dri_bo_alloc(i965->intel.bufmgr,
760                                  "Buffer",
761                                  68*8192, 
762                                  64);
763                 assert(gen6_avc_surface->dmv_top);
764                 assert(gen6_avc_surface->dmv_bottom);
765                 obj_surface->private_data = gen6_avc_surface;
766                 obj_surface->free_private_data = gen6_mfc_free_avc_surface; 
767             }
768     
769             gen6_avc_surface = (struct gen6_mfc_avc_surface_aux*) obj_surface->private_data;
770             /* Setup DMV buffer */
771             mfc_context->direct_mv_buffers[i*2].bo = gen6_avc_surface->dmv_top;
772             mfc_context->direct_mv_buffers[i*2+1].bo = gen6_avc_surface->dmv_bottom; 
773             dri_bo_reference(gen6_avc_surface->dmv_top);
774             dri_bo_reference(gen6_avc_surface->dmv_bottom);
775         } else {
776             break;
777         }
778     }
779         
780     obj_surface = SURFACE(encoder_context->input_yuv_surface);
781     assert(obj_surface && obj_surface->bo);
782     mfc_context->uncompressed_picture_source.bo = obj_surface->bo;
783     dri_bo_reference(mfc_context->uncompressed_picture_source.bo);
784
785     obj_buffer = BUFFER (pPicParameter->coded_buf); /* FIXME: fix this later */
786     bo = obj_buffer->buffer_store->bo;
787     assert(bo);
788     mfc_context->mfc_indirect_pak_bse_object.bo = bo;
789     mfc_context->mfc_indirect_pak_bse_object.offset = I965_CODEDBUFFER_SIZE;
790     mfc_context->mfc_indirect_pak_bse_object.end_offset = ALIGN(obj_buffer->size_element - 0x1000, 0x1000);
791     dri_bo_reference(mfc_context->mfc_indirect_pak_bse_object.bo);
792     
793     dri_bo_map(bo, 1);
794     coded_buffer_segment = (VACodedBufferSegment *)bo->virtual;
795     flag = (unsigned char *)(coded_buffer_segment + 1);
796     *flag = 0;
797     dri_bo_unmap(bo);
798
799     return vaStatus;
800 }
801
802 static VAStatus gen6_mfc_run(VADriverContextP ctx, 
803                              struct encode_state *encode_state,
804                              struct intel_encoder_context *encoder_context)
805 {
806     struct intel_batchbuffer *batch = encoder_context->base.batch;
807
808     intel_batchbuffer_flush(batch);             //run the pipeline
809
810     return VA_STATUS_SUCCESS;
811 }
812
813 static VAStatus
814 gen6_mfc_stop(VADriverContextP ctx, 
815               struct encode_state *encode_state,
816               struct intel_encoder_context *encoder_context,
817               int *encoded_bits_size)
818 {
819     VAStatus vaStatus = VA_STATUS_ERROR_UNKNOWN;
820     VAEncPictureParameterBufferH264 *pPicParameter = (VAEncPictureParameterBufferH264 *)encode_state->pic_param_ext->buffer;
821     VACodedBufferSegment *coded_buffer_segment;
822     
823     vaStatus = i965_MapBuffer(ctx, pPicParameter->coded_buf, (void **)&coded_buffer_segment);
824     assert(vaStatus == VA_STATUS_SUCCESS);
825     *encoded_bits_size = coded_buffer_segment->size * 8;
826     i965_UnmapBuffer(ctx, pPicParameter->coded_buf);
827
828     return VA_STATUS_SUCCESS;
829 }
830
831 #if __SOFTWARE__
832
833 static int
834 gen6_mfc_avc_pak_object_intra(VADriverContextP ctx, int x, int y, int end_mb, int qp,unsigned int *msg,
835                               struct intel_encoder_context *encoder_context,
836                               unsigned char target_mb_size, unsigned char max_mb_size,
837                               struct intel_batchbuffer *batch)
838 {
839     int len_in_dwords = 11;
840
841     if (batch == NULL)
842         batch = encoder_context->base.batch;
843
844     BEGIN_BCS_BATCH(batch, len_in_dwords);
845
846     OUT_BCS_BATCH(batch, MFC_AVC_PAK_OBJECT | (len_in_dwords - 2));
847     OUT_BCS_BATCH(batch, 0);
848     OUT_BCS_BATCH(batch, 0);
849     OUT_BCS_BATCH(batch, 
850                   (0 << 24) |           /* PackedMvNum, Debug*/
851                   (0 << 20) |           /* No motion vector */
852                   (1 << 19) |           /* CbpDcY */
853                   (1 << 18) |           /* CbpDcU */
854                   (1 << 17) |           /* CbpDcV */
855                   (msg[0] & 0xFFFF) );
856
857     OUT_BCS_BATCH(batch, (0xFFFF << 16) | (y << 8) | x);                /* Code Block Pattern for Y*/
858     OUT_BCS_BATCH(batch, 0x000F000F);                                                   /* Code Block Pattern */                
859     OUT_BCS_BATCH(batch, (0 << 27) | (end_mb << 26) | qp);      /* Last MB */
860
861     /*Stuff for Intra MB*/
862     OUT_BCS_BATCH(batch, msg[1]);                       /* We using Intra16x16 no 4x4 predmode*/        
863     OUT_BCS_BATCH(batch, msg[2]);       
864     OUT_BCS_BATCH(batch, msg[3]&0xFC);          
865     
866     /*MaxSizeInWord and TargetSzieInWord*/
867     OUT_BCS_BATCH(batch, (max_mb_size << 24) |
868                   (target_mb_size << 16) );
869
870     ADVANCE_BCS_BATCH(batch);
871
872     return len_in_dwords;
873 }
874
875 static int
876 gen6_mfc_avc_pak_object_inter(VADriverContextP ctx, int x, int y, int end_mb, int qp,
877                               unsigned int *msg, unsigned int offset,
878                               struct intel_encoder_context *encoder_context,
879                               unsigned char target_mb_size,unsigned char max_mb_size, int slice_type,
880                               struct intel_batchbuffer *batch)
881 {
882     int len_in_dwords = 11;
883
884     if (batch == NULL)
885         batch = encoder_context->base.batch;
886
887     BEGIN_BCS_BATCH(batch, len_in_dwords);
888
889     OUT_BCS_BATCH(batch, MFC_AVC_PAK_OBJECT | (len_in_dwords - 2));
890
891     OUT_BCS_BATCH(batch, msg[2]);         /* 32 MV*/
892     OUT_BCS_BATCH(batch, offset);
893
894     OUT_BCS_BATCH(batch, msg[0]);
895
896     OUT_BCS_BATCH(batch, (0xFFFF<<16) | (y << 8) | x);        /* Code Block Pattern for Y*/
897     OUT_BCS_BATCH(batch, 0x000F000F);                         /* Code Block Pattern */  
898 #if 0 
899     if ( slice_type == SLICE_TYPE_B) {
900         OUT_BCS_BATCH(batch, (0xF<<28) | (end_mb << 26) | qp);  /* Last MB */
901     } else {
902         OUT_BCS_BATCH(batch, (end_mb << 26) | qp);      /* Last MB */
903     }
904 #else
905     OUT_BCS_BATCH(batch, (end_mb << 26) | qp);  /* Last MB */
906 #endif
907
908
909     /*Stuff for Inter MB*/
910     OUT_BCS_BATCH(batch, msg[1]);        
911     OUT_BCS_BATCH(batch, 0x0);    
912     OUT_BCS_BATCH(batch, 0x0);        
913
914     /*MaxSizeInWord and TargetSzieInWord*/
915     OUT_BCS_BATCH(batch, (max_mb_size << 24) |
916                   (target_mb_size << 16) );
917
918     ADVANCE_BCS_BATCH(batch);
919
920     return len_in_dwords;
921 }
922
923 static void 
924 gen6_mfc_avc_pipeline_slice_programing(VADriverContextP ctx,
925                                        struct encode_state *encode_state,
926                                        struct intel_encoder_context *encoder_context,
927                                        int slice_index,
928                                        struct intel_batchbuffer *slice_batch)
929 {
930     struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
931     struct gen6_vme_context *vme_context = encoder_context->vme_context;
932     VAEncSequenceParameterBufferH264 *pSequenceParameter = (VAEncSequenceParameterBufferH264 *)encode_state->seq_param_ext->buffer;
933     VAEncPictureParameterBufferH264 *pPicParameter = (VAEncPictureParameterBufferH264 *)encode_state->pic_param_ext->buffer;
934     VAEncSliceParameterBufferH264 *pSliceParameter = (VAEncSliceParameterBufferH264 *)encode_state->slice_params_ext[slice_index]->buffer; 
935     unsigned int *msg = NULL, offset = 0;
936     int is_intra = pSliceParameter->slice_type == SLICE_TYPE_I;
937     int width_in_mbs = (mfc_context->surface_state.width + 15) / 16;
938     int height_in_mbs = (mfc_context->surface_state.height + 15) / 16;
939     int last_slice = (pSliceParameter->macroblock_address + pSliceParameter->num_macroblocks) == (width_in_mbs * height_in_mbs);
940     int i,x,y;
941     int qp = pPicParameter->pic_init_qp + pSliceParameter->slice_qp_delta;
942     unsigned int rate_control_mode = encoder_context->rate_control_mode;
943     unsigned char *slice_header = NULL;
944     int slice_header_length_in_bits = 0;
945     unsigned int tail_data[] = { 0x0, 0x0 };
946     int slice_type = pSliceParameter->slice_type;
947
948
949     if (rate_control_mode == VA_RC_CBR) {
950         qp = mfc_context->bit_rate_control_context[slice_type].QpPrimeY;
951         pSliceParameter->slice_qp_delta = qp - pPicParameter->pic_init_qp;
952     }
953
954     /* only support for 8-bit pixel bit-depth */
955     assert(pSequenceParameter->bit_depth_luma_minus8 == 0);
956     assert(pSequenceParameter->bit_depth_chroma_minus8 == 0);
957     assert(pPicParameter->pic_init_qp >= 0 && pPicParameter->pic_init_qp < 52);
958     assert(qp >= 0 && qp < 52);
959
960     gen6_mfc_avc_slice_state(ctx, 
961                              pPicParameter,
962                              pSliceParameter,
963                              encode_state, encoder_context,
964                              (rate_control_mode == VA_RC_CBR), qp, slice_batch);
965
966     if ( slice_index == 0) 
967         intel_mfc_avc_pipeline_header_programing(ctx, encode_state, encoder_context, slice_batch);
968
969     slice_header_length_in_bits = build_avc_slice_header(pSequenceParameter, pPicParameter, pSliceParameter, &slice_header);
970
971     // slice hander
972     mfc_context->insert_object(ctx, encoder_context,
973                                (unsigned int *)slice_header, ALIGN(slice_header_length_in_bits, 32) >> 5, slice_header_length_in_bits & 0x1f,
974                                5,  /* first 5 bytes are start code + nal unit type */
975                                1, 0, 1, slice_batch);
976
977     dri_bo_map(vme_context->vme_output.bo , 1);
978     msg = (unsigned int *)vme_context->vme_output.bo->virtual;
979
980     if (is_intra) {
981         msg += pSliceParameter->macroblock_address * INTRA_VME_OUTPUT_IN_DWS;
982     } else {
983         msg += pSliceParameter->macroblock_address * INTER_VME_OUTPUT_IN_DWS;
984         msg += 32; /* the first 32 DWs are MVs */
985         offset = pSliceParameter->macroblock_address * INTER_VME_OUTPUT_IN_BYTES;
986     }
987    
988     for (i = pSliceParameter->macroblock_address; 
989          i < pSliceParameter->macroblock_address + pSliceParameter->num_macroblocks; i++) {
990         int last_mb = (i == (pSliceParameter->macroblock_address + pSliceParameter->num_macroblocks - 1) );
991         x = i % width_in_mbs;
992         y = i / width_in_mbs;
993
994         if (is_intra) {
995             assert(msg);
996             gen6_mfc_avc_pak_object_intra(ctx, x, y, last_mb, qp, msg, encoder_context, 0, 0, slice_batch);
997             msg += INTRA_VME_OUTPUT_IN_DWS;
998         } else {
999             if (msg[0] & INTRA_MB_FLAG_MASK) {
1000                 gen6_mfc_avc_pak_object_intra(ctx, x, y, last_mb, qp, msg, encoder_context, 0, 0, slice_batch);
1001             } else {
1002                 gen6_mfc_avc_pak_object_inter(ctx, x, y, last_mb, qp, msg, offset, encoder_context, 0, 0, pSliceParameter->slice_type, slice_batch);
1003             }
1004
1005             msg += INTER_VME_OUTPUT_IN_DWS;
1006             offset += INTER_VME_OUTPUT_IN_BYTES;
1007         }
1008     }
1009    
1010     dri_bo_unmap(vme_context->vme_output.bo);
1011
1012     if ( last_slice ) {    
1013         mfc_context->insert_object(ctx, encoder_context,
1014                                    tail_data, 2, 8,
1015                                    2, 1, 1, 0, slice_batch);
1016     } else {
1017         mfc_context->insert_object(ctx, encoder_context,
1018                                    tail_data, 1, 8,
1019                                    1, 1, 1, 0, slice_batch);
1020     }
1021
1022     free(slice_header);
1023
1024 }
1025
1026 static dri_bo *
1027 gen6_mfc_avc_software_batchbuffer(VADriverContextP ctx,
1028                                   struct encode_state *encode_state,
1029                                   struct intel_encoder_context *encoder_context)
1030 {
1031     struct i965_driver_data *i965 = i965_driver_data(ctx);
1032     struct intel_batchbuffer *batch = intel_batchbuffer_new(&i965->intel, I915_EXEC_BSD);
1033     dri_bo *batch_bo = batch->buffer;
1034     int i;
1035
1036     for (i = 0; i < encode_state->num_slice_params_ext; i++) {
1037         gen6_mfc_avc_pipeline_slice_programing(ctx, encode_state, encoder_context, i, batch);
1038     }
1039
1040     intel_batchbuffer_align(batch, 8);
1041     
1042     BEGIN_BCS_BATCH(batch, 2);
1043     OUT_BCS_BATCH(batch, 0);
1044     OUT_BCS_BATCH(batch, MI_BATCH_BUFFER_END);
1045     ADVANCE_BCS_BATCH(batch);
1046
1047     dri_bo_reference(batch_bo);
1048     intel_batchbuffer_free(batch);
1049
1050     return batch_bo;
1051 }
1052
1053 #else
1054
1055 static void
1056 gen6_mfc_batchbuffer_surfaces_input(VADriverContextP ctx,
1057                                     struct encode_state *encode_state,
1058                                     struct intel_encoder_context *encoder_context)
1059
1060 {
1061     struct gen6_vme_context *vme_context = encoder_context->vme_context;
1062     struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
1063
1064     assert(vme_context->vme_output.bo);
1065     mfc_context->buffer_suface_setup(ctx,
1066                                      &mfc_context->gpe_context,
1067                                      &vme_context->vme_output,
1068                                      BINDING_TABLE_OFFSET(BIND_IDX_VME_OUTPUT),
1069                                      SURFACE_STATE_OFFSET(BIND_IDX_VME_OUTPUT));
1070     assert(mfc_context->aux_batchbuffer_surface.bo);
1071     mfc_context->buffer_suface_setup(ctx,
1072                                      &mfc_context->gpe_context,
1073                                      &mfc_context->aux_batchbuffer_surface,
1074                                      BINDING_TABLE_OFFSET(BIND_IDX_MFC_SLICE_HEADER),
1075                                      SURFACE_STATE_OFFSET(BIND_IDX_MFC_SLICE_HEADER));
1076 }
1077
1078 static void
1079 gen6_mfc_batchbuffer_surfaces_output(VADriverContextP ctx,
1080                                      struct encode_state *encode_state,
1081                                      struct intel_encoder_context *encoder_context)
1082
1083 {
1084     struct i965_driver_data *i965 = i965_driver_data(ctx);
1085     struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
1086     VAEncSequenceParameterBufferH264 *pSequenceParameter = (VAEncSequenceParameterBufferH264 *)encode_state->seq_param_ext->buffer;
1087     int width_in_mbs = pSequenceParameter->picture_width_in_mbs;
1088     int height_in_mbs = pSequenceParameter->picture_height_in_mbs;
1089     mfc_context->mfc_batchbuffer_surface.num_blocks = width_in_mbs * height_in_mbs + encode_state->num_slice_params_ext * 8 + 1;
1090     mfc_context->mfc_batchbuffer_surface.size_block = 16 * CMD_LEN_IN_OWORD; /* 3 OWORDs */
1091     mfc_context->mfc_batchbuffer_surface.pitch = 16;
1092     mfc_context->mfc_batchbuffer_surface.bo = dri_bo_alloc(i965->intel.bufmgr, 
1093                                                            "MFC batchbuffer",
1094                                                            mfc_context->mfc_batchbuffer_surface.num_blocks * mfc_context->mfc_batchbuffer_surface.size_block,
1095                                                            0x1000);
1096     mfc_context->buffer_suface_setup(ctx,
1097                                      &mfc_context->gpe_context,
1098                                      &mfc_context->mfc_batchbuffer_surface,
1099                                      BINDING_TABLE_OFFSET(BIND_IDX_MFC_BATCHBUFFER),
1100                                      SURFACE_STATE_OFFSET(BIND_IDX_MFC_BATCHBUFFER));
1101 }
1102
1103 static void
1104 gen6_mfc_batchbuffer_surfaces_setup(VADriverContextP ctx, 
1105                                     struct encode_state *encode_state,
1106                                     struct intel_encoder_context *encoder_context)
1107 {
1108     gen6_mfc_batchbuffer_surfaces_input(ctx, encode_state, encoder_context);
1109     gen6_mfc_batchbuffer_surfaces_output(ctx, encode_state, encoder_context);
1110 }
1111
1112 static void
1113 gen6_mfc_batchbuffer_idrt_setup(VADriverContextP ctx, 
1114                                 struct encode_state *encode_state,
1115                                 struct intel_encoder_context *encoder_context)
1116 {
1117     struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
1118     struct gen6_interface_descriptor_data *desc;   
1119     int i;
1120     dri_bo *bo;
1121
1122     bo = mfc_context->gpe_context.idrt.bo;
1123     dri_bo_map(bo, 1);
1124     assert(bo->virtual);
1125     desc = bo->virtual;
1126
1127     for (i = 0; i < mfc_context->gpe_context.num_kernels; i++) {
1128         struct i965_kernel *kernel;
1129
1130         kernel = &mfc_context->gpe_context.kernels[i];
1131         assert(sizeof(*desc) == 32);
1132
1133         /*Setup the descritor table*/
1134         memset(desc, 0, sizeof(*desc));
1135         desc->desc0.kernel_start_pointer = (kernel->bo->offset >> 6);
1136         desc->desc2.sampler_count = 0;
1137         desc->desc2.sampler_state_pointer = 0;
1138         desc->desc3.binding_table_entry_count = 2;
1139         desc->desc3.binding_table_pointer = (BINDING_TABLE_OFFSET(0) >> 5);
1140         desc->desc4.constant_urb_entry_read_offset = 0;
1141         desc->desc4.constant_urb_entry_read_length = 4;
1142                 
1143         /*kernel start*/
1144         dri_bo_emit_reloc(bo,   
1145                           I915_GEM_DOMAIN_INSTRUCTION, 0,
1146                           0,
1147                           i * sizeof(*desc) + offsetof(struct gen6_interface_descriptor_data, desc0),
1148                           kernel->bo);
1149         desc++;
1150     }
1151
1152     dri_bo_unmap(bo);
1153 }
1154
1155 static void
1156 gen6_mfc_batchbuffer_constant_setup(VADriverContextP ctx, 
1157                                     struct encode_state *encode_state,
1158                                     struct intel_encoder_context *encoder_context)
1159 {
1160     struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
1161     
1162     (void)mfc_context;
1163 }
1164
1165 static void
1166 gen6_mfc_batchbuffer_emit_object_command(struct intel_batchbuffer *batch,
1167                                          int index,
1168                                          int head_offset,
1169                                          int batchbuffer_offset,
1170                                          int head_size,
1171                                          int tail_size,
1172                                          int number_mb_cmds,
1173                                          int first_object,
1174                                          int last_object,
1175                                          int last_slice,
1176                                          int mb_x,
1177                                          int mb_y,
1178                                          int width_in_mbs,
1179                                          int qp)
1180 {
1181     BEGIN_BATCH(batch, 12);
1182     
1183     OUT_BATCH(batch, CMD_MEDIA_OBJECT | (12 - 2));
1184     OUT_BATCH(batch, index);
1185     OUT_BATCH(batch, 0);
1186     OUT_BATCH(batch, 0);
1187     OUT_BATCH(batch, 0);
1188     OUT_BATCH(batch, 0);
1189    
1190     /*inline data */
1191     OUT_BATCH(batch, head_offset);
1192     OUT_BATCH(batch, batchbuffer_offset);
1193     OUT_BATCH(batch, 
1194               head_size << 16 |
1195               tail_size);
1196     OUT_BATCH(batch,
1197               number_mb_cmds << 16 |
1198               first_object << 2 |
1199               last_object << 1 |
1200               last_slice);
1201     OUT_BATCH(batch,
1202               mb_y << 8 |
1203               mb_x);
1204     OUT_BATCH(batch,
1205               qp << 16 |
1206               width_in_mbs);
1207
1208     ADVANCE_BATCH(batch);
1209 }
1210
1211 static void
1212 gen6_mfc_avc_batchbuffer_slice_command(VADriverContextP ctx,
1213                                        struct intel_encoder_context *encoder_context,
1214                                        VAEncSliceParameterBufferH264 *slice_param,
1215                                        int head_offset,
1216                                        unsigned short head_size,
1217                                        unsigned short tail_size,
1218                                        int batchbuffer_offset,
1219                                        int qp,
1220                                        int last_slice)
1221 {
1222     struct intel_batchbuffer *batch = encoder_context->base.batch;
1223     struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
1224     int width_in_mbs = (mfc_context->surface_state.width + 15) / 16;
1225     int total_mbs = slice_param->num_macroblocks;
1226     int number_mb_cmds = 128;
1227     int starting_mb = 0;
1228     int last_object = 0;
1229     int first_object = 1;
1230     int i;
1231     int mb_x, mb_y;
1232     int index = (slice_param->slice_type == SLICE_TYPE_I) ? MFC_BATCHBUFFER_AVC_INTRA : MFC_BATCHBUFFER_AVC_INTER;
1233
1234     for (i = 0; i < total_mbs / number_mb_cmds; i++) {
1235         last_object = (total_mbs - starting_mb) == number_mb_cmds;
1236         mb_x = (slice_param->macroblock_address + starting_mb) % width_in_mbs;
1237         mb_y = (slice_param->macroblock_address + starting_mb) / width_in_mbs;
1238         assert(mb_x <= 255 && mb_y <= 255);
1239
1240         starting_mb += number_mb_cmds;
1241
1242         gen6_mfc_batchbuffer_emit_object_command(batch,
1243                                                  index,
1244                                                  head_offset,
1245                                                  batchbuffer_offset,
1246                                                  head_size,
1247                                                  tail_size,
1248                                                  number_mb_cmds,
1249                                                  first_object,
1250                                                  last_object,
1251                                                  last_slice,
1252                                                  mb_x,
1253                                                  mb_y,
1254                                                  width_in_mbs,
1255                                                  qp);
1256
1257         if (first_object) {
1258             head_offset += head_size;
1259             batchbuffer_offset += head_size;
1260         }
1261
1262         if (last_object) {
1263             head_offset += tail_size;
1264             batchbuffer_offset += tail_size;
1265         }
1266
1267         batchbuffer_offset += number_mb_cmds * CMD_LEN_IN_OWORD;
1268
1269         first_object = 0;
1270     }
1271
1272     if (!last_object) {
1273         last_object = 1;
1274         number_mb_cmds = total_mbs % number_mb_cmds;
1275         mb_x = (slice_param->macroblock_address + starting_mb) % width_in_mbs;
1276         mb_y = (slice_param->macroblock_address + starting_mb) / width_in_mbs;
1277         assert(mb_x <= 255 && mb_y <= 255);
1278         starting_mb += number_mb_cmds;
1279
1280         gen6_mfc_batchbuffer_emit_object_command(batch,
1281                                                  index,
1282                                                  head_offset,
1283                                                  batchbuffer_offset,
1284                                                  head_size,
1285                                                  tail_size,
1286                                                  number_mb_cmds,
1287                                                  first_object,
1288                                                  last_object,
1289                                                  last_slice,
1290                                                  mb_x,
1291                                                  mb_y,
1292                                                  width_in_mbs,
1293                                                  qp);
1294     }
1295 }
1296                           
1297 /*
1298  * return size in Owords (16bytes)
1299  */         
1300 static int
1301 gen6_mfc_avc_batchbuffer_slice(VADriverContextP ctx,
1302                                struct encode_state *encode_state,
1303                                struct intel_encoder_context *encoder_context,
1304                                int slice_index,
1305                                int batchbuffer_offset)
1306 {
1307     struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
1308     struct intel_batchbuffer *slice_batch = mfc_context->aux_batchbuffer;
1309     VAEncSequenceParameterBufferH264 *pSequenceParameter = (VAEncSequenceParameterBufferH264 *)encode_state->seq_param_ext->buffer;
1310     VAEncPictureParameterBufferH264 *pPicParameter = (VAEncPictureParameterBufferH264 *)encode_state->pic_param_ext->buffer;
1311     VAEncSliceParameterBufferH264 *pSliceParameter = (VAEncSliceParameterBufferH264 *)encode_state->slice_params_ext[slice_index]->buffer; 
1312     int width_in_mbs = (mfc_context->surface_state.width + 15) / 16;
1313     int height_in_mbs = (mfc_context->surface_state.height + 15) / 16;
1314     int last_slice = (pSliceParameter->macroblock_address + pSliceParameter->num_macroblocks) == (width_in_mbs * height_in_mbs);
1315     int qp = pPicParameter->pic_init_qp + pSliceParameter->slice_qp_delta;
1316     unsigned int rate_control_mode = encoder_context->rate_control_mode;
1317     unsigned char *slice_header = NULL;
1318     int slice_header_length_in_bits = 0;
1319     unsigned int tail_data[] = { 0x0, 0x0 };
1320     long head_offset;
1321     int old_used = intel_batchbuffer_used_size(slice_batch), used;
1322     unsigned short head_size, tail_size;
1323     int slice_type = pSliceParameter->slice_type;
1324
1325     if (rate_control_mode == VA_RC_CBR) {
1326         qp = mfc_context->bit_rate_control_context[slice_type].QpPrimeY;
1327         pSliceParameter->slice_qp_delta = qp - pPicParameter->pic_init_qp;
1328     }
1329
1330     /* only support for 8-bit pixel bit-depth */
1331     assert(pSequenceParameter->bit_depth_luma_minus8 == 0);
1332     assert(pSequenceParameter->bit_depth_chroma_minus8 == 0);
1333     assert(pPicParameter->pic_init_qp >= 0 && pPicParameter->pic_init_qp < 52);
1334     assert(qp >= 0 && qp < 52);
1335
1336     head_offset = old_used / 16;
1337     gen6_mfc_avc_slice_state(ctx,
1338                              pPicParameter,
1339                              pSliceParameter,
1340                              encode_state,
1341                              encoder_context,
1342                              (rate_control_mode == VA_RC_CBR),
1343                              qp,
1344                              slice_batch);
1345
1346     if (slice_index == 0)
1347         intel_mfc_avc_pipeline_header_programing(ctx, encode_state, encoder_context, slice_batch);
1348
1349     slice_header_length_in_bits = build_avc_slice_header(pSequenceParameter, pPicParameter, pSliceParameter, &slice_header);
1350
1351     // slice hander
1352     mfc_context->insert_object(ctx,
1353                                encoder_context,
1354                                (unsigned int *)slice_header,
1355                                ALIGN(slice_header_length_in_bits, 32) >> 5,
1356                                slice_header_length_in_bits & 0x1f,
1357                                5,  /* first 5 bytes are start code + nal unit type */
1358                                1,
1359                                0,
1360                                1,
1361                                slice_batch);
1362     free(slice_header);
1363
1364     intel_batchbuffer_align(slice_batch, 16); /* aligned by an Oword */
1365     used = intel_batchbuffer_used_size(slice_batch);
1366     head_size = (used - old_used) / 16;
1367     old_used = used;
1368
1369     /* tail */
1370     if (last_slice) {    
1371         mfc_context->insert_object(ctx,
1372                                    encoder_context,
1373                                    tail_data,
1374                                    2,
1375                                    8,
1376                                    2,
1377                                    1,
1378                                    1,
1379                                    0,
1380                                    slice_batch);
1381     } else {
1382         mfc_context->insert_object(ctx,
1383                                    encoder_context,
1384                                    tail_data,
1385                                    1,
1386                                    8,
1387                                    1,
1388                                    1,
1389                                    1,
1390                                    0,
1391                                    slice_batch);
1392     }
1393
1394     intel_batchbuffer_align(slice_batch, 16); /* aligned by an Oword */
1395     used = intel_batchbuffer_used_size(slice_batch);
1396     tail_size = (used - old_used) / 16;
1397
1398    
1399     gen6_mfc_avc_batchbuffer_slice_command(ctx,
1400                                            encoder_context,
1401                                            pSliceParameter,
1402                                            head_offset,
1403                                            head_size,
1404                                            tail_size,
1405                                            batchbuffer_offset,
1406                                            qp,
1407                                            last_slice);
1408
1409     return head_size + tail_size + pSliceParameter->num_macroblocks * CMD_LEN_IN_OWORD;
1410 }
1411
1412 static void
1413 gen6_mfc_avc_batchbuffer_pipeline(VADriverContextP ctx,
1414                                   struct encode_state *encode_state,
1415                                   struct intel_encoder_context *encoder_context)
1416 {
1417     struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
1418     struct intel_batchbuffer *batch = encoder_context->base.batch;
1419     int i, size, offset = 0;
1420     intel_batchbuffer_start_atomic(batch, 0x4000); 
1421     gen6_gpe_pipeline_setup(ctx, &mfc_context->gpe_context, batch);
1422
1423     for ( i = 0; i < encode_state->num_slice_params_ext; i++) {
1424         size = gen6_mfc_avc_batchbuffer_slice(ctx, encode_state, encoder_context, i, offset);
1425         offset += size;
1426     }
1427
1428     intel_batchbuffer_end_atomic(batch);
1429     intel_batchbuffer_flush(batch);
1430 }
1431
1432 static void
1433 gen6_mfc_build_avc_batchbuffer(VADriverContextP ctx, 
1434                                struct encode_state *encode_state,
1435                                struct intel_encoder_context *encoder_context)
1436 {
1437     gen6_mfc_batchbuffer_surfaces_setup(ctx, encode_state, encoder_context);
1438     gen6_mfc_batchbuffer_idrt_setup(ctx, encode_state, encoder_context);
1439     gen6_mfc_batchbuffer_constant_setup(ctx, encode_state, encoder_context);
1440     gen6_mfc_avc_batchbuffer_pipeline(ctx, encode_state, encoder_context);
1441 }
1442
1443 static dri_bo *
1444 gen6_mfc_avc_hardware_batchbuffer(VADriverContextP ctx,
1445                                   struct encode_state *encode_state,
1446                                   struct intel_encoder_context *encoder_context)
1447 {
1448     struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
1449
1450     gen6_mfc_build_avc_batchbuffer(ctx, encode_state, encoder_context);
1451     dri_bo_reference(mfc_context->mfc_batchbuffer_surface.bo);
1452
1453     return mfc_context->mfc_batchbuffer_surface.bo;
1454 }
1455
1456 #endif
1457
1458
1459 static void
1460 gen6_mfc_avc_pipeline_programing(VADriverContextP ctx,
1461                                  struct encode_state *encode_state,
1462                                  struct intel_encoder_context *encoder_context)
1463 {
1464     struct intel_batchbuffer *batch = encoder_context->base.batch;
1465     dri_bo *slice_batch_bo;
1466
1467     if ( intel_mfc_interlace_check(ctx, encode_state, encoder_context) ) {
1468         fprintf(stderr, "Current VA driver don't support interlace mode!\n");
1469         assert(0);
1470         return; 
1471     }
1472
1473 #if __SOFTWARE__
1474     slice_batch_bo = gen6_mfc_avc_software_batchbuffer(ctx, encode_state, encoder_context);
1475 #else
1476     slice_batch_bo = gen6_mfc_avc_hardware_batchbuffer(ctx, encode_state, encoder_context);
1477 #endif
1478
1479     // begin programing
1480     intel_batchbuffer_start_atomic_bcs(batch, 0x4000); 
1481     intel_batchbuffer_emit_mi_flush(batch);
1482     
1483     // picture level programing
1484     gen6_mfc_avc_pipeline_picture_programing(ctx, encode_state, encoder_context);
1485
1486     BEGIN_BCS_BATCH(batch, 2);
1487     OUT_BCS_BATCH(batch, MI_BATCH_BUFFER_START | (1 << 8));
1488     OUT_BCS_RELOC(batch,
1489                   slice_batch_bo,
1490                   I915_GEM_DOMAIN_COMMAND, 0, 
1491                   0);
1492     ADVANCE_BCS_BATCH(batch);
1493
1494     // end programing
1495     intel_batchbuffer_end_atomic(batch);
1496
1497     dri_bo_unreference(slice_batch_bo);
1498 }
1499
1500 static VAStatus
1501 gen6_mfc_avc_encode_picture(VADriverContextP ctx, 
1502                             struct encode_state *encode_state,
1503                             struct intel_encoder_context *encoder_context)
1504 {
1505     struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
1506     unsigned int rate_control_mode = encoder_context->rate_control_mode;
1507     int current_frame_bits_size;
1508     int sts;
1509  
1510     for (;;) {
1511         gen6_mfc_init(ctx, encoder_context);
1512         gen6_mfc_avc_prepare(ctx, encode_state, encoder_context);
1513         /*Programing bcs pipeline*/
1514         gen6_mfc_avc_pipeline_programing(ctx, encode_state, encoder_context);   //filling the pipeline
1515         gen6_mfc_run(ctx, encode_state, encoder_context);
1516         if (rate_control_mode == VA_RC_CBR /*|| rate_control_mode == VA_RC_VBR*/) {
1517             gen6_mfc_stop(ctx, encode_state, encoder_context, &current_frame_bits_size);
1518             sts = intel_mfc_brc_postpack(encode_state, mfc_context, current_frame_bits_size);
1519             if (sts == BRC_NO_HRD_VIOLATION) {
1520                 intel_mfc_hrd_context_update(encode_state, mfc_context);
1521                 break;
1522             }
1523             else if (sts == BRC_OVERFLOW_WITH_MIN_QP || sts == BRC_UNDERFLOW_WITH_MAX_QP) {
1524                 if (!mfc_context->hrd.violation_noted) {
1525                     fprintf(stderr, "Unrepairable %s!\n", (sts == BRC_OVERFLOW_WITH_MIN_QP)? "overflow": "underflow");
1526                     mfc_context->hrd.violation_noted = 1;
1527                 }
1528                 return VA_STATUS_SUCCESS;
1529             }
1530         } else {
1531             break;
1532         }
1533     }
1534
1535     return VA_STATUS_SUCCESS;
1536 }
1537
1538 VAStatus
1539 gen6_mfc_pipeline(VADriverContextP ctx,
1540                   VAProfile profile,
1541                   struct encode_state *encode_state,
1542                   struct intel_encoder_context *encoder_context)
1543 {
1544     VAStatus vaStatus;
1545
1546     switch (profile) {
1547     case VAProfileH264Baseline:
1548     case VAProfileH264Main:
1549     case VAProfileH264High:
1550         vaStatus = gen6_mfc_avc_encode_picture(ctx, encode_state, encoder_context);
1551         break;
1552
1553         /* FIXME: add for other profile */
1554     default:
1555         vaStatus = VA_STATUS_ERROR_UNSUPPORTED_PROFILE;
1556         break;
1557     }
1558
1559     return vaStatus;
1560 }
1561
1562 void
1563 gen6_mfc_context_destroy(void *context)
1564 {
1565     struct gen6_mfc_context *mfc_context = context;
1566     int i;
1567
1568     dri_bo_unreference(mfc_context->post_deblocking_output.bo);
1569     mfc_context->post_deblocking_output.bo = NULL;
1570
1571     dri_bo_unreference(mfc_context->pre_deblocking_output.bo);
1572     mfc_context->pre_deblocking_output.bo = NULL;
1573
1574     dri_bo_unreference(mfc_context->uncompressed_picture_source.bo);
1575     mfc_context->uncompressed_picture_source.bo = NULL;
1576
1577     dri_bo_unreference(mfc_context->mfc_indirect_pak_bse_object.bo); 
1578     mfc_context->mfc_indirect_pak_bse_object.bo = NULL;
1579
1580     for (i = 0; i < NUM_MFC_DMV_BUFFERS; i++){
1581         dri_bo_unreference(mfc_context->direct_mv_buffers[i].bo);
1582         mfc_context->direct_mv_buffers[i].bo = NULL;
1583     }
1584
1585     dri_bo_unreference(mfc_context->intra_row_store_scratch_buffer.bo);
1586     mfc_context->intra_row_store_scratch_buffer.bo = NULL;
1587
1588     dri_bo_unreference(mfc_context->macroblock_status_buffer.bo);
1589     mfc_context->macroblock_status_buffer.bo = NULL;
1590
1591     dri_bo_unreference(mfc_context->deblocking_filter_row_store_scratch_buffer.bo);
1592     mfc_context->deblocking_filter_row_store_scratch_buffer.bo = NULL;
1593
1594     dri_bo_unreference(mfc_context->bsd_mpc_row_store_scratch_buffer.bo);
1595     mfc_context->bsd_mpc_row_store_scratch_buffer.bo = NULL;
1596
1597
1598     for (i = 0; i < MAX_MFC_REFERENCE_SURFACES; i++){
1599         dri_bo_unreference(mfc_context->reference_surfaces[i].bo);
1600         mfc_context->reference_surfaces[i].bo = NULL;  
1601     }
1602
1603     i965_gpe_context_destroy(&mfc_context->gpe_context);
1604
1605     dri_bo_unreference(mfc_context->mfc_batchbuffer_surface.bo);
1606     mfc_context->mfc_batchbuffer_surface.bo = NULL;
1607
1608     dri_bo_unreference(mfc_context->aux_batchbuffer_surface.bo);
1609     mfc_context->aux_batchbuffer_surface.bo = NULL;
1610
1611     if (mfc_context->aux_batchbuffer)
1612         intel_batchbuffer_free(mfc_context->aux_batchbuffer);
1613
1614     mfc_context->aux_batchbuffer = NULL;
1615
1616     free(mfc_context);
1617 }
1618
1619 Bool gen6_mfc_context_init(VADriverContextP ctx, struct intel_encoder_context *encoder_context)
1620 {
1621     struct gen6_mfc_context *mfc_context = calloc(1, sizeof(struct gen6_mfc_context));
1622
1623     mfc_context->gpe_context.surface_state_binding_table.length = (SURFACE_STATE_PADDED_SIZE + sizeof(unsigned int)) * MAX_MEDIA_SURFACES_GEN6;
1624
1625     mfc_context->gpe_context.idrt.max_entries = MAX_GPE_KERNELS;
1626     mfc_context->gpe_context.idrt.entry_size = sizeof(struct gen6_interface_descriptor_data);
1627
1628     mfc_context->gpe_context.curbe.length = 32 * 4;
1629
1630     mfc_context->gpe_context.vfe_state.max_num_threads = 60 - 1;
1631     mfc_context->gpe_context.vfe_state.num_urb_entries = 16;
1632     mfc_context->gpe_context.vfe_state.gpgpu_mode = 0;
1633     mfc_context->gpe_context.vfe_state.urb_entry_size = 59 - 1;
1634     mfc_context->gpe_context.vfe_state.curbe_allocation_size = 37 - 1;
1635
1636     i965_gpe_load_kernels(ctx,
1637                           &mfc_context->gpe_context,
1638                           gen6_mfc_kernels,
1639                           NUM_MFC_KERNEL);
1640
1641     mfc_context->pipe_mode_select = gen6_mfc_pipe_mode_select;
1642     mfc_context->set_surface_state = gen6_mfc_surface_state;
1643     mfc_context->ind_obj_base_addr_state = gen6_mfc_ind_obj_base_addr_state;
1644     mfc_context->avc_img_state = gen6_mfc_avc_img_state;
1645     mfc_context->avc_qm_state = gen6_mfc_avc_qm_state;
1646     mfc_context->avc_fqm_state = gen6_mfc_avc_fqm_state;
1647     mfc_context->insert_object = gen6_mfc_avc_insert_object;
1648     mfc_context->buffer_suface_setup = i965_gpe_buffer_suface_setup;
1649
1650     encoder_context->mfc_context = mfc_context;
1651     encoder_context->mfc_context_destroy = gen6_mfc_context_destroy;
1652     encoder_context->mfc_pipeline = gen6_mfc_pipeline;
1653     encoder_context->mfc_brc_prepare = intel_mfc_brc_prepare;
1654
1655     return True;
1656 }