2 * Copyright © 2010-2011 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the
6 * "Software"), to deal in the Software without restriction, including
7 * without limitation the rights to use, copy, modify, merge, publish,
8 * distribute, sub license, and/or sell copies of the Software, and to
9 * permit persons to whom the Software is furnished to do so, subject to
10 * the following conditions:
12 * The above copyright notice and this permission notice (including the
13 * next paragraph) shall be included in all copies or substantial portions
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
17 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
19 * IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR
20 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 * Zhou Chang <chang.zhou@intel.com>
34 #include "intel_batchbuffer.h"
35 #include "i965_defines.h"
36 #include "i965_structs.h"
37 #include "i965_drv_video.h"
38 #include "i965_encoder.h"
39 #include "i965_encoder_utils.h"
43 static const uint32_t gen6_mfc_batchbuffer_avc_intra[][4] = {
44 #include "shaders/utils/mfc_batchbuffer_avc_intra.g6b"
47 static const uint32_t gen6_mfc_batchbuffer_avc_inter[][4] = {
48 #include "shaders/utils/mfc_batchbuffer_avc_inter.g6b"
51 static struct i965_kernel gen6_mfc_kernels[] = {
53 "MFC AVC INTRA BATCHBUFFER ",
54 MFC_BATCHBUFFER_AVC_INTRA,
55 gen6_mfc_batchbuffer_avc_intra,
56 sizeof(gen6_mfc_batchbuffer_avc_intra),
61 "MFC AVC INTER BATCHBUFFER ",
62 MFC_BATCHBUFFER_AVC_INTER,
63 gen6_mfc_batchbuffer_avc_inter,
64 sizeof(gen6_mfc_batchbuffer_avc_inter),
70 gen6_mfc_pipe_mode_select(VADriverContextP ctx,
72 struct intel_encoder_context *encoder_context)
74 struct intel_batchbuffer *batch = encoder_context->base.batch;
76 assert(standard_select == MFX_FORMAT_AVC);
78 BEGIN_BCS_BATCH(batch, 4);
80 OUT_BCS_BATCH(batch, MFX_PIPE_MODE_SELECT | (4 - 2));
82 (1 << 10) | /* disable Stream-Out , advanced QP/bitrate control need enable it*/
83 (1 << 9) | /* Post Deblocking Output */
84 (0 << 8) | /* Pre Deblocking Output */
85 (0 << 7) | /* disable TLB prefectch */
86 (0 << 5) | /* not in stitch mode */
87 (1 << 4) | /* encoding mode */
88 (2 << 0)); /* Standard Select: AVC */
90 (0 << 20) | /* round flag in PB slice */
91 (0 << 19) | /* round flag in Intra8x8 */
92 (0 << 7) | /* expand NOA bus flag */
93 (1 << 6) | /* must be 1 */
94 (0 << 5) | /* disable clock gating for NOA */
95 (0 << 4) | /* terminate if AVC motion and POC table error occurs */
96 (0 << 3) | /* terminate if AVC mbdata error occurs */
97 (0 << 2) | /* terminate if AVC CABAC/CAVLC decode error occurs */
98 (0 << 1) | /* AVC long field motion vector */
99 (0 << 0)); /* always calculate AVC ILDB boundary strength */
100 OUT_BCS_BATCH(batch, 0);
102 ADVANCE_BCS_BATCH(batch);
106 gen6_mfc_surface_state(VADriverContextP ctx, struct intel_encoder_context *encoder_context)
108 struct intel_batchbuffer *batch = encoder_context->base.batch;
109 struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
111 BEGIN_BCS_BATCH(batch, 6);
113 OUT_BCS_BATCH(batch, MFX_SURFACE_STATE | (6 - 2));
114 OUT_BCS_BATCH(batch, 0);
116 ((mfc_context->surface_state.height - 1) << 19) |
117 ((mfc_context->surface_state.width - 1) << 6));
119 (MFX_SURFACE_PLANAR_420_8 << 28) | /* 420 planar YUV surface */
120 (1 << 27) | /* must be 1 for interleave U/V, hardware requirement */
121 (0 << 22) | /* surface object control state, FIXME??? */
122 ((mfc_context->surface_state.w_pitch - 1) << 3) | /* pitch */
123 (0 << 2) | /* must be 0 for interleave U/V */
124 (1 << 1) | /* must be y-tiled */
125 (I965_TILEWALK_YMAJOR << 0)); /* tile walk, TILEWALK_YMAJOR */
127 (0 << 16) | /* must be 0 for interleave U/V */
128 (mfc_context->surface_state.h_pitch)); /* y offset for U(cb) */
129 OUT_BCS_BATCH(batch, 0);
130 ADVANCE_BCS_BATCH(batch);
134 gen6_mfc_pipe_buf_addr_state(VADriverContextP ctx, struct intel_encoder_context *encoder_context)
136 struct intel_batchbuffer *batch = encoder_context->base.batch;
137 struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
140 BEGIN_BCS_BATCH(batch, 24);
142 OUT_BCS_BATCH(batch, MFX_PIPE_BUF_ADDR_STATE | (24 - 2));
144 OUT_BCS_BATCH(batch, 0); /* pre output addr */
146 OUT_BCS_RELOC(batch, mfc_context->post_deblocking_output.bo,
147 I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
148 0); /* post output addr */
150 OUT_BCS_RELOC(batch, mfc_context->uncompressed_picture_source.bo,
151 I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
152 0); /* uncompressed data */
153 OUT_BCS_RELOC(batch, mfc_context->macroblock_status_buffer.bo,
154 I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
155 0); /* StreamOut data*/
156 OUT_BCS_RELOC(batch, mfc_context->intra_row_store_scratch_buffer.bo,
157 I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
159 OUT_BCS_RELOC(batch, mfc_context->deblocking_filter_row_store_scratch_buffer.bo,
160 I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
162 /* 7..22 Reference pictures*/
163 for (i = 0; i < ARRAY_ELEMS(mfc_context->reference_surfaces); i++) {
164 if ( mfc_context->reference_surfaces[i].bo != NULL) {
165 OUT_BCS_RELOC(batch, mfc_context->reference_surfaces[i].bo,
166 I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
169 OUT_BCS_BATCH(batch, 0);
172 OUT_BCS_RELOC(batch, mfc_context->macroblock_status_buffer.bo,
173 I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
174 0); /* Macroblock status buffer*/
176 ADVANCE_BCS_BATCH(batch);
180 gen6_mfc_ind_obj_base_addr_state(VADriverContextP ctx, struct intel_encoder_context *encoder_context)
182 struct intel_batchbuffer *batch = encoder_context->base.batch;
183 struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
184 struct gen6_vme_context *vme_context = encoder_context->vme_context;
186 BEGIN_BCS_BATCH(batch, 11);
188 OUT_BCS_BATCH(batch, MFX_IND_OBJ_BASE_ADDR_STATE | (11 - 2));
189 OUT_BCS_BATCH(batch, 0);
190 OUT_BCS_BATCH(batch, 0);
191 /* MFX Indirect MV Object Base Address */
192 OUT_BCS_RELOC(batch, vme_context->vme_output.bo, I915_GEM_DOMAIN_INSTRUCTION, 0, 0);
193 OUT_BCS_BATCH(batch, 0);
194 OUT_BCS_BATCH(batch, 0);
195 OUT_BCS_BATCH(batch, 0);
196 OUT_BCS_BATCH(batch, 0);
197 OUT_BCS_BATCH(batch, 0);
198 /*MFC Indirect PAK-BSE Object Base Address for Encoder*/
200 mfc_context->mfc_indirect_pak_bse_object.bo,
201 I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
204 mfc_context->mfc_indirect_pak_bse_object.bo,
205 I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
206 mfc_context->mfc_indirect_pak_bse_object.end_offset);
208 ADVANCE_BCS_BATCH(batch);
212 gen6_mfc_bsp_buf_base_addr_state(VADriverContextP ctx, struct intel_encoder_context *encoder_context)
214 struct intel_batchbuffer *batch = encoder_context->base.batch;
215 struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
217 BEGIN_BCS_BATCH(batch, 4);
219 OUT_BCS_BATCH(batch, MFX_BSP_BUF_BASE_ADDR_STATE | (4 - 2));
220 OUT_BCS_RELOC(batch, mfc_context->bsd_mpc_row_store_scratch_buffer.bo,
221 I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
223 OUT_BCS_BATCH(batch, 0);
224 OUT_BCS_BATCH(batch, 0);
226 ADVANCE_BCS_BATCH(batch);
230 gen6_mfc_avc_img_state(VADriverContextP ctx,struct encode_state *encode_state,
231 struct intel_encoder_context *encoder_context)
233 struct intel_batchbuffer *batch = encoder_context->base.batch;
234 struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
235 VAEncSequenceParameterBufferH264 *pSequenceParameter = (VAEncSequenceParameterBufferH264 *)encode_state->seq_param_ext->buffer;
236 VAEncPictureParameterBufferH264 *pPicParameter = (VAEncPictureParameterBufferH264 *)encode_state->pic_param_ext->buffer;
237 int width_in_mbs = (mfc_context->surface_state.width + 15) / 16;
238 int height_in_mbs = (mfc_context->surface_state.height + 15) / 16;
240 BEGIN_BCS_BATCH(batch, 13);
241 OUT_BCS_BATCH(batch, MFX_AVC_IMG_STATE | (13 - 2));
243 ((width_in_mbs * height_in_mbs) & 0xFFFF));
245 (height_in_mbs << 16) |
246 (width_in_mbs << 0));
248 (0 << 24) | /*Second Chroma QP Offset*/
249 (0 << 16) | /*Chroma QP Offset*/
250 (0 << 14) | /*Max-bit conformance Intra flag*/
251 (0 << 13) | /*Max Macroblock size conformance Inter flag*/
252 (1 << 12) | /*Should always be written as "1" */
253 (0 << 10) | /*QM Preset FLag */
254 (0 << 8) | /*Image Structure*/
255 (0 << 0) ); /*Current Decoed Image Frame Store ID, reserved in Encode mode*/
257 (400 << 16) | /*Mininum Frame size*/
258 (0 << 15) | /*Disable reading of Macroblock Status Buffer*/
259 (0 << 14) | /*Load BitStream Pointer only once, 1 slic 1 frame*/
260 (0 << 13) | /*CABAC 0 word insertion test enable*/
261 (1 << 12) | /*MVUnpackedEnable,compliant to DXVA*/
262 (1 << 10) | /*Chroma Format IDC, 4:2:0*/
263 (pPicParameter->pic_fields.bits.entropy_coding_mode_flag << 7) | /*0:CAVLC encoding mode,1:CABAC*/
264 (0 << 6) | /*Only valid for VLD decoding mode*/
265 (0 << 5) | /*Constrained Intra Predition Flag, from PPS*/
266 (pSequenceParameter->seq_fields.bits.direct_8x8_inference_flag << 4) | /*Direct 8x8 inference flag*/
267 (pPicParameter->pic_fields.bits.transform_8x8_mode_flag << 3) | /*8x8 or 4x4 IDCT Transform Mode Flag*/
268 (1 << 2) | /*Frame MB only flag*/
269 (0 << 1) | /*MBAFF mode is in active*/
270 (0 << 0) ); /*Field picture flag*/
272 (1<<16) | /*Frame Size Rate Control Flag*/
274 (1<<9) | /*MB level Rate Control Enabling Flag*/
275 (1 << 3) | /*FrameBitRateMinReportMask*/
276 (1 << 2) | /*FrameBitRateMaxReportMask*/
277 (1 << 1) | /*InterMBMaxSizeReportMask*/
278 (1 << 0) ); /*IntraMBMaxSizeReportMask*/
279 OUT_BCS_BATCH(batch, /*Inter and Intra Conformance Max size limit*/
280 (0x0600 << 16) | /*InterMbMaxSz 192 Byte*/
281 (0x0800) ); /*IntraMbMaxSz 256 Byte*/
282 OUT_BCS_BATCH(batch, 0x00000000); /*Reserved : MBZReserved*/
283 OUT_BCS_BATCH(batch, 0x01020304); /*Slice QP Delta for bitrate control*/
284 OUT_BCS_BATCH(batch, 0xFEFDFCFB);
285 OUT_BCS_BATCH(batch, 0x80601004); /*MAX = 128KB, MIN = 64KB*/
286 OUT_BCS_BATCH(batch, 0x00800001);
287 OUT_BCS_BATCH(batch, 0);
289 ADVANCE_BCS_BATCH(batch);
293 gen6_mfc_avc_directmode_state(VADriverContextP ctx, struct intel_encoder_context *encoder_context)
295 struct intel_batchbuffer *batch = encoder_context->base.batch;
296 struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
300 BEGIN_BCS_BATCH(batch, 69);
302 OUT_BCS_BATCH(batch, MFX_AVC_DIRECTMODE_STATE | (69 - 2));
304 /* Reference frames and Current frames */
305 for(i = 0; i < NUM_MFC_DMV_BUFFERS; i++) {
306 if ( mfc_context->direct_mv_buffers[i].bo != NULL) {
307 OUT_BCS_RELOC(batch, mfc_context->direct_mv_buffers[i].bo,
308 I915_GEM_DOMAIN_INSTRUCTION, 0,
311 OUT_BCS_BATCH(batch, 0);
316 for(i = 0; i < 32; i++) {
317 OUT_BCS_BATCH(batch, i/2);
319 OUT_BCS_BATCH(batch, 0);
320 OUT_BCS_BATCH(batch, 0);
322 ADVANCE_BCS_BATCH(batch);
326 gen6_mfc_avc_slice_state(VADriverContextP ctx,
327 VAEncSliceParameterBufferH264 *slice_param,
328 struct encode_state *encode_state,
329 struct intel_encoder_context *encoder_context,
330 int rate_control_enable,
332 struct intel_batchbuffer *batch)
334 struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
335 int width_in_mbs = (mfc_context->surface_state.width + 15) / 16;
336 int height_in_mbs = (mfc_context->surface_state.height + 15) / 16;
337 int beginmb = slice_param->macroblock_address;
338 int endmb = beginmb + slice_param->num_macroblocks;
339 int beginx = beginmb % width_in_mbs;
340 int beginy = beginmb / width_in_mbs;
341 int nextx = endmb % width_in_mbs;
342 int nexty = endmb / width_in_mbs;
343 int slice_type = slice_param->slice_type;
344 int last_slice = (endmb == (width_in_mbs * height_in_mbs));
345 int bit_rate_control_target, maxQpN, maxQpP;
346 unsigned char correct[6], grow, shrink;
350 batch = encoder_context->base.batch;
352 if (slice_type == SLICE_TYPE_I)
353 bit_rate_control_target = 0;
355 bit_rate_control_target = 1;
357 maxQpN = mfc_context->bit_rate_control_context[bit_rate_control_target].MaxQpNegModifier;
358 maxQpP = mfc_context->bit_rate_control_context[bit_rate_control_target].MaxQpPosModifier;
360 for (i = 0; i < 6; i++)
361 correct[i] = mfc_context->bit_rate_control_context[bit_rate_control_target].Correct[i];
363 grow = mfc_context->bit_rate_control_context[bit_rate_control_target].GrowInit +
364 (mfc_context->bit_rate_control_context[bit_rate_control_target].GrowResistance << 4);
365 shrink = mfc_context->bit_rate_control_context[bit_rate_control_target].ShrinkInit +
366 (mfc_context->bit_rate_control_context[bit_rate_control_target].ShrinkResistance << 4);
368 BEGIN_BCS_BATCH(batch, 11);;
370 OUT_BCS_BATCH(batch, MFX_AVC_SLICE_STATE | (11 - 2) );
371 OUT_BCS_BATCH(batch, slice_type); /*Slice Type: I:P:B Slice*/
373 if (slice_type == SLICE_TYPE_I) {
374 OUT_BCS_BATCH(batch, 0); /*no reference frames and pred_weight_table*/
376 OUT_BCS_BATCH(batch, 0x00010000); /*1 reference frame*/
380 (slice_param->direct_spatial_mv_pred_flag<<29) | /*Direct Prediction Type*/
381 (0<<24) | /*Enable deblocking operation*/
382 (qp<<16) | /*Slice Quantization Parameter*/
385 (beginy << 24) | /*First MB X&Y , the begin postion of current slice*/
387 slice_param->macroblock_address );
388 OUT_BCS_BATCH(batch, (nexty << 16) | nextx); /*Next slice first MB X&Y*/
390 (rate_control_enable << 31) | /*in CBR mode RateControlCounterEnable = enable*/
391 (1 << 30) | /*ResetRateControlCounter*/
392 (0 << 28) | /*RC Triggle Mode = Always Rate Control*/
393 (4 << 24) | /*RC Stable Tolerance, middle level*/
394 (rate_control_enable << 23) | /*RC Panic Enable*/
395 (0 << 22) | /*QP mode, don't modfiy CBP*/
396 (0 << 21) | /*MB Type Direct Conversion Enabled*/
397 (0 << 20) | /*MB Type Skip Conversion Enabled*/
398 (last_slice << 19) | /*IsLastSlice*/
399 (0 << 18) | /*BitstreamOutputFlag Compressed BitStream Output Disable Flag 0:enable 1:disable*/
400 (1 << 17) | /*HeaderPresentFlag*/
401 (1 << 16) | /*SliceData PresentFlag*/
402 (1 << 15) | /*TailPresentFlag*/
403 (1 << 13) | /*RBSP NAL TYPE*/
404 (0 << 12) ); /*CabacZeroWordInsertionEnable*/
405 OUT_BCS_BATCH(batch, mfc_context->mfc_indirect_pak_bse_object.offset);
407 (maxQpN << 24) | /*Target QP - 24 is lowest QP*/
408 (maxQpP << 16) | /*Target QP + 20 is highest QP*/
418 OUT_BCS_BATCH(batch, 0);
420 ADVANCE_BCS_BATCH(batch);
423 static void gen6_mfc_avc_qm_state(VADriverContextP ctx, struct intel_encoder_context *encoder_context)
425 struct intel_batchbuffer *batch = encoder_context->base.batch;
428 BEGIN_BCS_BATCH(batch, 58);
430 OUT_BCS_BATCH(batch, MFX_AVC_QM_STATE | 56);
431 OUT_BCS_BATCH(batch, 0xFF ) ;
432 for( i = 0; i < 56; i++) {
433 OUT_BCS_BATCH(batch, 0x10101010);
436 ADVANCE_BCS_BATCH(batch);
439 static void gen6_mfc_avc_fqm_state(VADriverContextP ctx, struct intel_encoder_context *encoder_context)
441 struct intel_batchbuffer *batch = encoder_context->base.batch;
444 BEGIN_BCS_BATCH(batch, 113);
445 OUT_BCS_BATCH(batch, MFC_AVC_FQM_STATE | (113 - 2));
447 for(i = 0; i < 112;i++) {
448 OUT_BCS_BATCH(batch, 0x10001000);
451 ADVANCE_BCS_BATCH(batch);
455 gen6_mfc_avc_ref_idx_state(VADriverContextP ctx, struct intel_encoder_context *encoder_context)
457 struct intel_batchbuffer *batch = encoder_context->base.batch;
460 BEGIN_BCS_BATCH(batch, 10);
461 OUT_BCS_BATCH(batch, MFX_AVC_REF_IDX_STATE | 8);
462 OUT_BCS_BATCH(batch, 0); //Select L0
463 OUT_BCS_BATCH(batch, 0x80808020); //Only 1 reference
464 for(i = 0; i < 7; i++) {
465 OUT_BCS_BATCH(batch, 0x80808080);
467 ADVANCE_BCS_BATCH(batch);
469 BEGIN_BCS_BATCH(batch, 10);
470 OUT_BCS_BATCH(batch, MFX_AVC_REF_IDX_STATE | 8);
471 OUT_BCS_BATCH(batch, 1); //Select L1
472 OUT_BCS_BATCH(batch, 0x80808022); //Only 1 reference
473 for(i = 0; i < 7; i++) {
474 OUT_BCS_BATCH(batch, 0x80808080);
476 ADVANCE_BCS_BATCH(batch);
480 gen6_mfc_avc_insert_object(VADriverContextP ctx, struct intel_encoder_context *encoder_context,
481 unsigned int *insert_data, int lenght_in_dws, int data_bits_in_last_dw,
482 int skip_emul_byte_count, int is_last_header, int is_end_of_slice, int emulation_flag,
483 struct intel_batchbuffer *batch)
486 batch = encoder_context->base.batch;
488 BEGIN_BCS_BATCH(batch, lenght_in_dws + 2);
490 OUT_BCS_BATCH(batch, MFC_AVC_INSERT_OBJECT | (lenght_in_dws + 2 - 2));
493 (0 << 16) | /* always start at offset 0 */
494 (data_bits_in_last_dw << 8) |
495 (skip_emul_byte_count << 4) |
496 (!!emulation_flag << 3) |
497 ((!!is_last_header) << 2) |
498 ((!!is_end_of_slice) << 1) |
499 (0 << 0)); /* FIXME: ??? */
501 intel_batchbuffer_data(batch, insert_data, lenght_in_dws * 4);
502 ADVANCE_BCS_BATCH(batch);
505 static void gen6_mfc_init(VADriverContextP ctx, struct intel_encoder_context *encoder_context)
507 struct i965_driver_data *i965 = i965_driver_data(ctx);
508 struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
512 /*Encode common setup for MFC*/
513 dri_bo_unreference(mfc_context->post_deblocking_output.bo);
514 mfc_context->post_deblocking_output.bo = NULL;
516 dri_bo_unreference(mfc_context->pre_deblocking_output.bo);
517 mfc_context->pre_deblocking_output.bo = NULL;
519 dri_bo_unreference(mfc_context->uncompressed_picture_source.bo);
520 mfc_context->uncompressed_picture_source.bo = NULL;
522 dri_bo_unreference(mfc_context->mfc_indirect_pak_bse_object.bo);
523 mfc_context->mfc_indirect_pak_bse_object.bo = NULL;
525 for (i = 0; i < NUM_MFC_DMV_BUFFERS; i++){
526 if ( mfc_context->direct_mv_buffers[i].bo != NULL);
527 dri_bo_unreference(mfc_context->direct_mv_buffers[i].bo);
528 mfc_context->direct_mv_buffers[i].bo = NULL;
531 for (i = 0; i < MAX_MFC_REFERENCE_SURFACES; i++){
532 if (mfc_context->reference_surfaces[i].bo != NULL)
533 dri_bo_unreference(mfc_context->reference_surfaces[i].bo);
534 mfc_context->reference_surfaces[i].bo = NULL;
537 dri_bo_unreference(mfc_context->intra_row_store_scratch_buffer.bo);
538 bo = dri_bo_alloc(i965->intel.bufmgr,
543 mfc_context->intra_row_store_scratch_buffer.bo = bo;
545 dri_bo_unreference(mfc_context->macroblock_status_buffer.bo);
546 bo = dri_bo_alloc(i965->intel.bufmgr,
551 mfc_context->macroblock_status_buffer.bo = bo;
553 dri_bo_unreference(mfc_context->deblocking_filter_row_store_scratch_buffer.bo);
554 bo = dri_bo_alloc(i965->intel.bufmgr,
556 49152, /* 6 * 128 * 64 */
559 mfc_context->deblocking_filter_row_store_scratch_buffer.bo = bo;
561 dri_bo_unreference(mfc_context->bsd_mpc_row_store_scratch_buffer.bo);
562 bo = dri_bo_alloc(i965->intel.bufmgr,
564 12288, /* 1.5 * 128 * 64 */
567 mfc_context->bsd_mpc_row_store_scratch_buffer.bo = bo;
569 dri_bo_unreference(mfc_context->mfc_batchbuffer_surface.bo);
570 mfc_context->mfc_batchbuffer_surface.bo = NULL;
572 dri_bo_unreference(mfc_context->aux_batchbuffer_surface.bo);
573 mfc_context->aux_batchbuffer_surface.bo = NULL;
575 if (mfc_context->aux_batchbuffer)
576 intel_batchbuffer_free(mfc_context->aux_batchbuffer);
578 mfc_context->aux_batchbuffer = intel_batchbuffer_new(&i965->intel, I915_EXEC_BSD);
579 mfc_context->aux_batchbuffer_surface.bo = mfc_context->aux_batchbuffer->buffer;
580 dri_bo_reference(mfc_context->aux_batchbuffer_surface.bo);
581 mfc_context->aux_batchbuffer_surface.pitch = 16;
582 mfc_context->aux_batchbuffer_surface.num_blocks = mfc_context->aux_batchbuffer->size / 16;
583 mfc_context->aux_batchbuffer_surface.size_block = 16;
585 i965_gpe_context_init(ctx, &mfc_context->gpe_context);
588 static void gen6_mfc_avc_pipeline_header_programing(VADriverContextP ctx,
589 struct encode_state *encode_state,
590 struct intel_encoder_context *encoder_context,
591 struct intel_batchbuffer *slice_batch)
593 struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
594 static int count = 0;
595 unsigned int rate_control_mode = encoder_context->rate_control_mode;
597 if (encode_state->packed_header_data[VAEncPackedHeaderH264_SPS]) {
598 VAEncPackedHeaderParameterBuffer *param = NULL;
599 unsigned int *header_data = (unsigned int *)encode_state->packed_header_data[VAEncPackedHeaderH264_SPS]->buffer;
600 unsigned int length_in_bits;
602 assert(encode_state->packed_header_param[VAEncPackedHeaderH264_SPS]);
603 param = (VAEncPackedHeaderParameterBuffer *)encode_state->packed_header_param[VAEncPackedHeaderH264_SPS]->buffer;
604 length_in_bits = param->bit_length;
606 mfc_context->insert_object(ctx,
609 ALIGN(length_in_bits, 32) >> 5,
610 length_in_bits & 0x1f,
611 5, /* FIXME: check it */
614 !param->has_emulation_bytes,
618 if (encode_state->packed_header_data[VAEncPackedHeaderH264_PPS]) {
619 VAEncPackedHeaderParameterBuffer *param = NULL;
620 unsigned int *header_data = (unsigned int *)encode_state->packed_header_data[VAEncPackedHeaderH264_PPS]->buffer;
621 unsigned int length_in_bits;
623 assert(encode_state->packed_header_param[VAEncPackedHeaderH264_PPS]);
624 param = (VAEncPackedHeaderParameterBuffer *)encode_state->packed_header_param[VAEncPackedHeaderH264_PPS]->buffer;
625 length_in_bits = param->bit_length;
627 mfc_context->insert_object(ctx,
630 ALIGN(length_in_bits, 32) >> 5,
631 length_in_bits & 0x1f,
632 5, /* FIXME: check it */
635 !param->has_emulation_bytes,
639 if ( (rate_control_mode == VA_RC_CBR) && encode_state->packed_header_data[VAEncPackedHeaderH264_SPS]) { // this is frist AU
640 struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
642 unsigned char *sei_data = NULL;
643 int length_in_bits = build_avc_sei_buffering_period(mfc_context->vui_hrd.i_initial_cpb_removal_delay_length,
644 mfc_context->vui_hrd.i_initial_cpb_removal_delay, 0, &sei_data);
645 mfc_context->insert_object(ctx,
647 (unsigned int *)sei_data,
648 ALIGN(length_in_bits, 32) >> 5,
649 length_in_bits & 0x1f,
658 // SEI pic_timing header
659 if (rate_control_mode == VA_RC_CBR) {
660 struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
661 unsigned char *sei_data = NULL;
662 int length_in_bits = build_avc_sei_pic_timing( mfc_context->vui_hrd.i_cpb_removal_delay_length,
663 mfc_context->vui_hrd.i_cpb_removal_delay * mfc_context->vui_hrd.i_frame_number,
664 mfc_context->vui_hrd.i_dpb_output_delay_length,
666 mfc_context->insert_object(ctx,
668 (unsigned int *)sei_data,
669 ALIGN(length_in_bits, 32) >> 5,
670 length_in_bits & 0x1f,
682 static void gen6_mfc_avc_pipeline_picture_programing( VADriverContextP ctx,
683 struct encode_state *encode_state,
684 struct intel_encoder_context *encoder_context)
686 struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
688 mfc_context->pipe_mode_select(ctx, MFX_FORMAT_AVC, encoder_context);
689 mfc_context->set_surface_state(ctx, encoder_context);
690 mfc_context->ind_obj_base_addr_state(ctx, encoder_context);
691 gen6_mfc_pipe_buf_addr_state(ctx, encoder_context);
692 gen6_mfc_bsp_buf_base_addr_state(ctx, encoder_context);
693 mfc_context->avc_img_state(ctx, encode_state, encoder_context);
694 mfc_context->avc_qm_state(ctx, encoder_context);
695 mfc_context->avc_fqm_state(ctx, encoder_context);
696 gen6_mfc_avc_directmode_state(ctx, encoder_context);
697 gen6_mfc_avc_ref_idx_state(ctx, encoder_context);
701 gen6_mfc_free_avc_surface(void **data)
703 struct gen6_mfc_avc_surface_aux *avc_surface = *data;
708 dri_bo_unreference(avc_surface->dmv_top);
709 avc_surface->dmv_top = NULL;
710 dri_bo_unreference(avc_surface->dmv_bottom);
711 avc_surface->dmv_bottom = NULL;
718 gen6_mfc_bit_rate_control_context_init(struct encode_state *encode_state,
719 struct gen6_mfc_context *mfc_context)
721 VAEncSequenceParameterBufferH264 *pSequenceParameter = (VAEncSequenceParameterBufferH264 *)encode_state->seq_param_ext->buffer;
723 int width_in_mbs = (mfc_context->surface_state.width + 15) / 16;
724 int height_in_mbs = (mfc_context->surface_state.height + 15) / 16;
725 float fps = pSequenceParameter->time_scale * 0.5 / pSequenceParameter->num_units_in_tick ;
726 int inter_mb_size = pSequenceParameter->bits_per_second * 1.0 / (fps+4.0) / width_in_mbs / height_in_mbs;
727 int intra_mb_size = inter_mb_size * 5.0;
730 mfc_context->bit_rate_control_context[0].target_mb_size = intra_mb_size;
731 mfc_context->bit_rate_control_context[0].target_frame_size = intra_mb_size * width_in_mbs * height_in_mbs;
732 mfc_context->bit_rate_control_context[1].target_mb_size = inter_mb_size;
733 mfc_context->bit_rate_control_context[1].target_frame_size = inter_mb_size * width_in_mbs * height_in_mbs;
735 for(i = 0 ; i < 2; i++) {
736 mfc_context->bit_rate_control_context[i].QpPrimeY = 26;
737 mfc_context->bit_rate_control_context[i].MaxQpNegModifier = 6;
738 mfc_context->bit_rate_control_context[i].MaxQpPosModifier = 6;
739 mfc_context->bit_rate_control_context[i].GrowInit = 6;
740 mfc_context->bit_rate_control_context[i].GrowResistance = 4;
741 mfc_context->bit_rate_control_context[i].ShrinkInit = 6;
742 mfc_context->bit_rate_control_context[i].ShrinkResistance = 4;
744 mfc_context->bit_rate_control_context[i].Correct[0] = 8;
745 mfc_context->bit_rate_control_context[i].Correct[1] = 4;
746 mfc_context->bit_rate_control_context[i].Correct[2] = 2;
747 mfc_context->bit_rate_control_context[i].Correct[3] = 2;
748 mfc_context->bit_rate_control_context[i].Correct[4] = 4;
749 mfc_context->bit_rate_control_context[i].Correct[5] = 8;
752 mfc_context->bit_rate_control_context[0].TargetSizeInWord = (intra_mb_size + 16)/ 16;
753 mfc_context->bit_rate_control_context[1].TargetSizeInWord = (inter_mb_size + 16)/ 16;
755 mfc_context->bit_rate_control_context[0].MaxSizeInWord = mfc_context->bit_rate_control_context[0].TargetSizeInWord * 1.5;
756 mfc_context->bit_rate_control_context[1].MaxSizeInWord = mfc_context->bit_rate_control_context[1].TargetSizeInWord * 1.5;
759 static int gen6_mfc_bit_rate_control_context_update(struct encode_state *encode_state,
760 struct gen6_mfc_context *mfc_context,
761 int current_frame_size)
763 VAEncSliceParameterBufferH264 *pSliceParameter = (VAEncSliceParameterBufferH264 *)encode_state->slice_params_ext[0]->buffer;
764 int control_index = 1 - (pSliceParameter->slice_type == SLICE_TYPE_I);
765 int oldQp = mfc_context->bit_rate_control_context[control_index].QpPrimeY;
768 printf("conrol_index = %d, start_qp = %d, result = %d, target = %d\n", control_index,
769 mfc_context->bit_rate_control_context[control_index].QpPrimeY, current_frame_size,
770 mfc_context->bit_rate_control_context[control_index].target_frame_size );
773 if ( current_frame_size > mfc_context->bit_rate_control_context[control_index].target_frame_size * 4.0 ) {
774 mfc_context->bit_rate_control_context[control_index].QpPrimeY += 4;
775 } else if ( current_frame_size > mfc_context->bit_rate_control_context[control_index].target_frame_size * 2.0 ) {
776 mfc_context->bit_rate_control_context[control_index].QpPrimeY += 3;
777 } else if ( current_frame_size > mfc_context->bit_rate_control_context[control_index].target_frame_size * 1.50 ) {
778 mfc_context->bit_rate_control_context[control_index].QpPrimeY += 2;
779 } else if ( current_frame_size > mfc_context->bit_rate_control_context[control_index].target_frame_size * 1.20 ) {
780 mfc_context->bit_rate_control_context[control_index].QpPrimeY ++;
781 } else if (current_frame_size < mfc_context->bit_rate_control_context[control_index].target_frame_size * 0.30 ) {
782 mfc_context->bit_rate_control_context[control_index].QpPrimeY -= 3;
783 } else if (current_frame_size < mfc_context->bit_rate_control_context[control_index].target_frame_size * 0.50 ) {
784 mfc_context->bit_rate_control_context[control_index].QpPrimeY -= 2;
785 } else if (current_frame_size < mfc_context->bit_rate_control_context[control_index].target_frame_size * 0.80 ) {
786 mfc_context->bit_rate_control_context[control_index].QpPrimeY --;
789 if ( mfc_context->bit_rate_control_context[control_index].QpPrimeY > 51)
790 mfc_context->bit_rate_control_context[control_index].QpPrimeY = 51;
791 if ( mfc_context->bit_rate_control_context[control_index].QpPrimeY < 1)
792 mfc_context->bit_rate_control_context[control_index].QpPrimeY = 1;
794 if ( mfc_context->bit_rate_control_context[control_index].QpPrimeY != oldQp)
801 gen6_mfc_hrd_context_init(struct encode_state *encode_state,
802 struct intel_encoder_context *encoder_context)
804 struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
805 VAEncSequenceParameterBufferH264 *pSequenceParameter = (VAEncSequenceParameterBufferH264 *)encode_state->seq_param_ext->buffer;
806 unsigned int rate_control_mode = encoder_context->rate_control_mode;
807 int target_bit_rate = pSequenceParameter->bits_per_second;
809 // current we only support CBR mode.
810 if (rate_control_mode == VA_RC_CBR) {
811 mfc_context->vui_hrd.i_bit_rate_value = target_bit_rate >> 10;
812 mfc_context->vui_hrd.i_cpb_size_value = (target_bit_rate * 8) >> 10;
813 mfc_context->vui_hrd.i_initial_cpb_removal_delay = mfc_context->vui_hrd.i_cpb_size_value * 0.5 * 1024 / target_bit_rate * 90000;
814 mfc_context->vui_hrd.i_cpb_removal_delay = 2;
815 mfc_context->vui_hrd.i_frame_number = 0;
817 mfc_context->vui_hrd.i_initial_cpb_removal_delay_length = 24;
818 mfc_context->vui_hrd.i_cpb_removal_delay_length = 24;
819 mfc_context->vui_hrd.i_dpb_output_delay_length = 24;
825 gen6_mfc_hrd_context_update(struct encode_state *encode_state,
826 struct gen6_mfc_context *mfc_context)
828 mfc_context->vui_hrd.i_frame_number++;
831 static VAStatus gen6_mfc_avc_prepare(VADriverContextP ctx,
832 struct encode_state *encode_state,
833 struct intel_encoder_context *encoder_context)
835 struct i965_driver_data *i965 = i965_driver_data(ctx);
836 struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
837 struct object_surface *obj_surface;
838 struct object_buffer *obj_buffer;
839 struct gen6_mfc_avc_surface_aux* gen6_avc_surface;
841 VAEncPictureParameterBufferH264 *pPicParameter = (VAEncPictureParameterBufferH264 *)encode_state->pic_param_ext->buffer;
842 unsigned int rate_control_mode = encoder_context->rate_control_mode;
843 VAStatus vaStatus = VA_STATUS_SUCCESS;
846 /*Setup all the input&output object*/
848 /* Setup current frame and current direct mv buffer*/
849 obj_surface = SURFACE(pPicParameter->CurrPic.picture_id);
851 i965_check_alloc_surface_bo(ctx, obj_surface, 1, VA_FOURCC('N','V','1','2'), SUBSAMPLE_YUV420);
853 if ( obj_surface->private_data == NULL) {
854 gen6_avc_surface = calloc(sizeof(struct gen6_mfc_avc_surface_aux), 1);
855 gen6_avc_surface->dmv_top =
856 dri_bo_alloc(i965->intel.bufmgr,
860 gen6_avc_surface->dmv_bottom =
861 dri_bo_alloc(i965->intel.bufmgr,
865 assert(gen6_avc_surface->dmv_top);
866 assert(gen6_avc_surface->dmv_bottom);
867 obj_surface->private_data = (void *)gen6_avc_surface;
868 obj_surface->free_private_data = (void *)gen6_mfc_free_avc_surface;
870 gen6_avc_surface = (struct gen6_mfc_avc_surface_aux*) obj_surface->private_data;
871 mfc_context->direct_mv_buffers[NUM_MFC_DMV_BUFFERS - 2].bo = gen6_avc_surface->dmv_top;
872 mfc_context->direct_mv_buffers[NUM_MFC_DMV_BUFFERS - 1].bo = gen6_avc_surface->dmv_bottom;
873 dri_bo_reference(gen6_avc_surface->dmv_top);
874 dri_bo_reference(gen6_avc_surface->dmv_bottom);
876 mfc_context->post_deblocking_output.bo = obj_surface->bo;
877 dri_bo_reference(mfc_context->post_deblocking_output.bo);
879 mfc_context->surface_state.width = obj_surface->orig_width;
880 mfc_context->surface_state.height = obj_surface->orig_height;
881 mfc_context->surface_state.w_pitch = obj_surface->width;
882 mfc_context->surface_state.h_pitch = obj_surface->height;
884 /* Setup reference frames and direct mv buffers*/
885 for(i = 0; i < MAX_MFC_REFERENCE_SURFACES; i++) {
886 if ( pPicParameter->ReferenceFrames[i].picture_id != VA_INVALID_ID ) {
887 obj_surface = SURFACE(pPicParameter->ReferenceFrames[i].picture_id);
889 if (obj_surface->bo != NULL) {
890 mfc_context->reference_surfaces[i].bo = obj_surface->bo;
891 dri_bo_reference(obj_surface->bo);
893 /* Check DMV buffer */
894 if ( obj_surface->private_data == NULL) {
896 gen6_avc_surface = calloc(sizeof(struct gen6_mfc_avc_surface_aux), 1);
897 gen6_avc_surface->dmv_top =
898 dri_bo_alloc(i965->intel.bufmgr,
902 gen6_avc_surface->dmv_bottom =
903 dri_bo_alloc(i965->intel.bufmgr,
907 assert(gen6_avc_surface->dmv_top);
908 assert(gen6_avc_surface->dmv_bottom);
909 obj_surface->private_data = gen6_avc_surface;
910 obj_surface->free_private_data = gen6_mfc_free_avc_surface;
913 gen6_avc_surface = (struct gen6_mfc_avc_surface_aux*) obj_surface->private_data;
914 /* Setup DMV buffer */
915 mfc_context->direct_mv_buffers[i*2].bo = gen6_avc_surface->dmv_top;
916 mfc_context->direct_mv_buffers[i*2+1].bo = gen6_avc_surface->dmv_bottom;
917 dri_bo_reference(gen6_avc_surface->dmv_top);
918 dri_bo_reference(gen6_avc_surface->dmv_bottom);
924 obj_surface = SURFACE(encoder_context->input_yuv_surface);
925 assert(obj_surface && obj_surface->bo);
926 mfc_context->uncompressed_picture_source.bo = obj_surface->bo;
927 dri_bo_reference(mfc_context->uncompressed_picture_source.bo);
929 obj_buffer = BUFFER (pPicParameter->coded_buf); /* FIXME: fix this later */
930 bo = obj_buffer->buffer_store->bo;
932 mfc_context->mfc_indirect_pak_bse_object.bo = bo;
933 mfc_context->mfc_indirect_pak_bse_object.offset = ALIGN(sizeof(VACodedBufferSegment), 64);
934 mfc_context->mfc_indirect_pak_bse_object.end_offset = ALIGN (obj_buffer->size_element - 0x1000, 0x1000);
935 dri_bo_reference(mfc_context->mfc_indirect_pak_bse_object.bo);
937 /*Programing bit rate control */
938 if ( mfc_context->bit_rate_control_context[0].MaxSizeInWord == 0 )
939 gen6_mfc_bit_rate_control_context_init(encode_state, mfc_context);
941 /*Programing HRD control */
942 if ( (rate_control_mode == VA_RC_CBR) && (mfc_context->vui_hrd.i_cpb_size_value == 0) )
943 gen6_mfc_hrd_context_init(encode_state, encoder_context);
948 static VAStatus gen6_mfc_run(VADriverContextP ctx,
949 struct encode_state *encode_state,
950 struct intel_encoder_context *encoder_context)
952 struct intel_batchbuffer *batch = encoder_context->base.batch;
954 intel_batchbuffer_flush(batch); //run the pipeline
956 return VA_STATUS_SUCCESS;
960 gen6_mfc_stop(VADriverContextP ctx,
961 struct encode_state *encode_state,
962 struct intel_encoder_context *encoder_context,
963 int *encoded_bits_size)
965 struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
966 unsigned int *status_mem;
967 unsigned int buffer_size_bits = 0;
968 int width_in_mbs = (mfc_context->surface_state.width + 15) / 16;
969 int height_in_mbs = (mfc_context->surface_state.height + 15) / 16;
972 dri_bo_map(mfc_context->macroblock_status_buffer.bo, 1);
973 status_mem = (unsigned int *)mfc_context->macroblock_status_buffer.bo->virtual;
974 //Detecting encoder buffer size and bit rate control result
975 for(i = 0; i < width_in_mbs * height_in_mbs; i++) {
976 unsigned short current_mb = status_mem[1] >> 16;
977 buffer_size_bits += current_mb;
980 dri_bo_unmap(mfc_context->macroblock_status_buffer.bo);
982 *encoded_bits_size = buffer_size_bits;
984 return VA_STATUS_SUCCESS;
990 gen6_mfc_avc_pak_object_intra(VADriverContextP ctx, int x, int y, int end_mb, int qp,unsigned int *msg,
991 struct intel_encoder_context *encoder_context,
992 unsigned char target_mb_size, unsigned char max_mb_size,
993 struct intel_batchbuffer *batch)
995 int len_in_dwords = 11;
998 batch = encoder_context->base.batch;
1000 BEGIN_BCS_BATCH(batch, len_in_dwords);
1002 OUT_BCS_BATCH(batch, MFC_AVC_PAK_OBJECT | (len_in_dwords - 2));
1003 OUT_BCS_BATCH(batch, 0);
1004 OUT_BCS_BATCH(batch, 0);
1005 OUT_BCS_BATCH(batch,
1006 (0 << 24) | /* PackedMvNum, Debug*/
1007 (0 << 20) | /* No motion vector */
1008 (1 << 19) | /* CbpDcY */
1009 (1 << 18) | /* CbpDcU */
1010 (1 << 17) | /* CbpDcV */
1011 (msg[0] & 0xFFFF) );
1013 OUT_BCS_BATCH(batch, (0xFFFF << 16) | (y << 8) | x); /* Code Block Pattern for Y*/
1014 OUT_BCS_BATCH(batch, 0x000F000F); /* Code Block Pattern */
1015 OUT_BCS_BATCH(batch, (0 << 27) | (end_mb << 26) | qp); /* Last MB */
1017 /*Stuff for Intra MB*/
1018 OUT_BCS_BATCH(batch, msg[1]); /* We using Intra16x16 no 4x4 predmode*/
1019 OUT_BCS_BATCH(batch, msg[2]);
1020 OUT_BCS_BATCH(batch, msg[3]&0xFC);
1022 /*MaxSizeInWord and TargetSzieInWord*/
1023 OUT_BCS_BATCH(batch, (max_mb_size << 24) |
1024 (target_mb_size << 16) );
1026 ADVANCE_BCS_BATCH(batch);
1028 return len_in_dwords;
1032 gen6_mfc_avc_pak_object_inter(VADriverContextP ctx, int x, int y, int end_mb, int qp, unsigned int offset,
1033 struct intel_encoder_context *encoder_context,
1034 unsigned char target_mb_size,unsigned char max_mb_size, int slice_type,
1035 struct intel_batchbuffer *batch)
1037 int len_in_dwords = 11;
1040 batch = encoder_context->base.batch;
1042 BEGIN_BCS_BATCH(batch, len_in_dwords);
1044 OUT_BCS_BATCH(batch, MFC_AVC_PAK_OBJECT | (len_in_dwords - 2));
1046 OUT_BCS_BATCH(batch, 32); /* 32 MV*/
1047 OUT_BCS_BATCH(batch, offset);
1049 OUT_BCS_BATCH(batch,
1050 (1 << 24) | /* PackedMvNum, Debug*/
1051 (4 << 20) | /* 8 MV, SNB don't use it*/
1052 (1 << 19) | /* CbpDcY */
1053 (1 << 18) | /* CbpDcU */
1054 (1 << 17) | /* CbpDcV */
1055 (0 << 15) | /* Transform8x8Flag = 0*/
1056 (0 << 14) | /* Frame based*/
1057 (0 << 13) | /* Inter MB */
1058 (1 << 8) | /* MbType = P_L0_16x16 */
1059 (0 << 7) | /* MBZ for frame */
1060 (0 << 6) | /* MBZ */
1061 (2 << 4) | /* MBZ for inter*/
1062 (0 << 3) | /* MBZ */
1063 (0 << 2) | /* SkipMbFlag */
1064 (0 << 0)); /* InterMbMode */
1066 OUT_BCS_BATCH(batch, (0xFFFF<<16) | (y << 8) | x); /* Code Block Pattern for Y*/
1067 OUT_BCS_BATCH(batch, 0x000F000F); /* Code Block Pattern */
1069 if ( slice_type == SLICE_TYPE_B) {
1070 OUT_BCS_BATCH(batch, (0xF<<28) | (end_mb << 26) | qp); /* Last MB */
1072 OUT_BCS_BATCH(batch, (end_mb << 26) | qp); /* Last MB */
1075 OUT_BCS_BATCH(batch, (end_mb << 26) | qp); /* Last MB */
1079 /*Stuff for Inter MB*/
1080 OUT_BCS_BATCH(batch, 0x0);
1081 OUT_BCS_BATCH(batch, 0x0);
1082 OUT_BCS_BATCH(batch, 0x0);
1084 /*MaxSizeInWord and TargetSzieInWord*/
1085 OUT_BCS_BATCH(batch, (max_mb_size << 24) |
1086 (target_mb_size << 16) );
1088 ADVANCE_BCS_BATCH(batch);
1090 return len_in_dwords;
1094 gen6_mfc_avc_pipeline_slice_programing(VADriverContextP ctx,
1095 struct encode_state *encode_state,
1096 struct intel_encoder_context *encoder_context,
1098 struct intel_batchbuffer *slice_batch)
1100 struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
1101 struct gen6_vme_context *vme_context = encoder_context->vme_context;
1102 VAEncSequenceParameterBufferH264 *pSequenceParameter = (VAEncSequenceParameterBufferH264 *)encode_state->seq_param_ext->buffer;
1103 VAEncPictureParameterBufferH264 *pPicParameter = (VAEncPictureParameterBufferH264 *)encode_state->pic_param_ext->buffer;
1104 VAEncSliceParameterBufferH264 *pSliceParameter = (VAEncSliceParameterBufferH264 *)encode_state->slice_params_ext[slice_index]->buffer;
1105 unsigned int *msg = NULL, offset = 0;
1106 int is_intra = pSliceParameter->slice_type == SLICE_TYPE_I;
1107 int width_in_mbs = (mfc_context->surface_state.width + 15) / 16;
1108 int height_in_mbs = (mfc_context->surface_state.height + 15) / 16;
1109 int last_slice = (pSliceParameter->macroblock_address + pSliceParameter->num_macroblocks) == (width_in_mbs * height_in_mbs);
1111 int qp = pPicParameter->pic_init_qp + pSliceParameter->slice_qp_delta;
1112 unsigned int rate_control_mode = encoder_context->rate_control_mode;
1113 unsigned char *slice_header = NULL;
1114 int slice_header_length_in_bits = 0;
1115 unsigned int tail_data[] = { 0x0, 0x0 };
1117 gen6_mfc_avc_slice_state(ctx, pSliceParameter,
1118 encode_state, encoder_context,
1119 (rate_control_mode == VA_RC_CBR), qp, slice_batch);
1121 if ( slice_index == 0)
1122 gen6_mfc_avc_pipeline_header_programing(ctx, encode_state, encoder_context, slice_batch);
1124 slice_header_length_in_bits = build_avc_slice_header(pSequenceParameter, pPicParameter, pSliceParameter, &slice_header);
1127 mfc_context->insert_object(ctx, encoder_context,
1128 (unsigned int *)slice_header, ALIGN(slice_header_length_in_bits, 32) >> 5, slice_header_length_in_bits & 0x1f,
1129 5, /* first 5 bytes are start code + nal unit type */
1130 1, 0, 1, slice_batch);
1132 if ( rate_control_mode == VA_RC_CBR) {
1133 qp = mfc_context->bit_rate_control_context[1-is_intra].QpPrimeY;
1137 dri_bo_map(vme_context->vme_output.bo , 1);
1138 msg = (unsigned int *)vme_context->vme_output.bo->virtual;
1141 for (i = pSliceParameter->macroblock_address;
1142 i < pSliceParameter->macroblock_address + pSliceParameter->num_macroblocks; i++) {
1143 int last_mb = (i == (pSliceParameter->macroblock_address + pSliceParameter->num_macroblocks - 1) );
1144 x = i % width_in_mbs;
1145 y = i / width_in_mbs;
1149 gen6_mfc_avc_pak_object_intra(ctx, x, y, last_mb, qp, msg, encoder_context, 0, 0, slice_batch);
1152 gen6_mfc_avc_pak_object_inter(ctx, x, y, last_mb, qp, offset, encoder_context, 0, 0, pSliceParameter->slice_type, slice_batch);
1158 dri_bo_unmap(vme_context->vme_output.bo);
1160 mfc_context->insert_object(ctx, encoder_context,
1162 2, 1, 1, 0, slice_batch);
1164 mfc_context->insert_object(ctx, encoder_context,
1166 1, 1, 1, 0, slice_batch);
1174 gen6_mfc_avc_software_batchbuffer(VADriverContextP ctx,
1175 struct encode_state *encode_state,
1176 struct intel_encoder_context *encoder_context)
1178 struct i965_driver_data *i965 = i965_driver_data(ctx);
1179 struct intel_batchbuffer *batch = intel_batchbuffer_new(&i965->intel, I915_EXEC_BSD);
1180 dri_bo *batch_bo = batch->buffer;
1183 for (i = 0; i < encode_state->num_slice_params_ext; i++) {
1184 gen6_mfc_avc_pipeline_slice_programing(ctx, encode_state, encoder_context, i, batch);
1187 used = intel_batchbuffer_used_size(batch);
1189 if ((used & 4) == 0) {
1190 BEGIN_BCS_BATCH(batch, 2);
1191 OUT_BCS_BATCH(batch, 0);
1192 OUT_BCS_BATCH(batch, MI_BATCH_BUFFER_END);
1195 dri_bo_reference(batch_bo);
1196 intel_batchbuffer_free(batch);
1204 gen6_mfc_batchbuffer_surfaces_input(VADriverContextP ctx,
1205 struct encode_state *encode_state,
1206 struct intel_encoder_context *encoder_context)
1209 struct gen6_vme_context *vme_context = encoder_context->vme_context;
1210 struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
1212 assert(vme_context->vme_output.bo);
1213 mfc_context->buffer_suface_setup(ctx,
1214 &mfc_context->gpe_context,
1215 &vme_context->vme_output,
1216 BINDING_TABLE_OFFSET(BIND_IDX_VME_OUTPUT),
1217 SURFACE_STATE_OFFSET(BIND_IDX_VME_OUTPUT));
1218 assert(mfc_context->aux_batchbuffer_surface.bo);
1219 mfc_context->buffer_suface_setup(ctx,
1220 &mfc_context->gpe_context,
1221 &mfc_context->aux_batchbuffer_surface,
1222 BINDING_TABLE_OFFSET(BIND_IDX_MFC_SLICE_HEADER),
1223 SURFACE_STATE_OFFSET(BIND_IDX_MFC_SLICE_HEADER));
1227 gen6_mfc_batchbuffer_surfaces_output(VADriverContextP ctx,
1228 struct encode_state *encode_state,
1229 struct intel_encoder_context *encoder_context)
1232 struct i965_driver_data *i965 = i965_driver_data(ctx);
1233 struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
1234 VAEncSequenceParameterBufferH264 *pSequenceParameter = (VAEncSequenceParameterBufferH264 *)encode_state->seq_param_ext->buffer;
1235 int width_in_mbs = pSequenceParameter->picture_width_in_mbs;
1236 int height_in_mbs = pSequenceParameter->picture_height_in_mbs;
1237 mfc_context->mfc_batchbuffer_surface.num_blocks = width_in_mbs * height_in_mbs + encode_state->num_slice_params_ext * 2 + 1;
1238 mfc_context->mfc_batchbuffer_surface.size_block = 48; /* 3 OWORDs */
1239 mfc_context->mfc_batchbuffer_surface.pitch = 16;
1240 mfc_context->mfc_batchbuffer_surface.bo = dri_bo_alloc(i965->intel.bufmgr,
1242 mfc_context->mfc_batchbuffer_surface.num_blocks * mfc_context->mfc_batchbuffer_surface.size_block,
1244 mfc_context->buffer_suface_setup(ctx,
1245 &mfc_context->gpe_context,
1246 &mfc_context->mfc_batchbuffer_surface,
1247 BINDING_TABLE_OFFSET(BIND_IDX_MFC_BATCHBUFFER),
1248 SURFACE_STATE_OFFSET(BIND_IDX_MFC_BATCHBUFFER));
1252 gen6_mfc_batchbuffer_surfaces_setup(VADriverContextP ctx,
1253 struct encode_state *encode_state,
1254 struct intel_encoder_context *encoder_context)
1256 gen6_mfc_batchbuffer_surfaces_input(ctx, encode_state, encoder_context);
1257 gen6_mfc_batchbuffer_surfaces_output(ctx, encode_state, encoder_context);
1261 gen6_mfc_batchbuffer_idrt_setup(VADriverContextP ctx,
1262 struct encode_state *encode_state,
1263 struct intel_encoder_context *encoder_context)
1265 struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
1266 struct gen6_interface_descriptor_data *desc;
1270 bo = mfc_context->gpe_context.idrt.bo;
1272 assert(bo->virtual);
1275 for (i = 0; i < mfc_context->gpe_context.num_kernels; i++) {
1276 struct i965_kernel *kernel;
1278 kernel = &mfc_context->gpe_context.kernels[i];
1279 assert(sizeof(*desc) == 32);
1281 /*Setup the descritor table*/
1282 memset(desc, 0, sizeof(*desc));
1283 desc->desc0.kernel_start_pointer = (kernel->bo->offset >> 6);
1284 desc->desc2.sampler_count = 0;
1285 desc->desc2.sampler_state_pointer = 0;
1286 desc->desc3.binding_table_entry_count = 2;
1287 desc->desc3.binding_table_pointer = (BINDING_TABLE_OFFSET(0) >> 5);
1288 desc->desc4.constant_urb_entry_read_offset = 0;
1289 desc->desc4.constant_urb_entry_read_length = 4;
1292 dri_bo_emit_reloc(bo,
1293 I915_GEM_DOMAIN_INSTRUCTION, 0,
1295 i * sizeof(*desc) + offsetof(struct gen6_interface_descriptor_data, desc0),
1304 gen6_mfc_batchbuffer_constant_setup(VADriverContextP ctx,
1305 struct encode_state *encode_state,
1306 struct intel_encoder_context *encoder_context)
1308 struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
1314 gen6_mfc_batchbuffer_emit_object_command(struct intel_batchbuffer *batch,
1317 int batchbuffer_offset,
1329 BEGIN_BATCH(batch, 12);
1331 OUT_BATCH(batch, CMD_MEDIA_OBJECT | (12 - 2));
1332 OUT_BATCH(batch, index);
1333 OUT_BATCH(batch, 0);
1334 OUT_BATCH(batch, 0);
1335 OUT_BATCH(batch, 0);
1336 OUT_BATCH(batch, 0);
1339 OUT_BATCH(batch, head_offset);
1340 OUT_BATCH(batch, batchbuffer_offset);
1345 number_mb_cmds << 16 |
1356 ADVANCE_BATCH(batch);
1360 gen6_mfc_avc_batchbuffer_slice_command(VADriverContextP ctx,
1361 struct intel_encoder_context *encoder_context,
1362 VAEncSliceParameterBufferH264 *slice_param,
1364 unsigned short head_size,
1365 unsigned short tail_size,
1366 int batchbuffer_offset,
1370 struct intel_batchbuffer *batch = encoder_context->base.batch;
1371 struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
1372 int width_in_mbs = (mfc_context->surface_state.width + 15) / 16;
1373 int total_mbs = slice_param->num_macroblocks;
1374 int number_mb_cmds = 512;
1375 int starting_mb = 0;
1376 int last_object = 0;
1377 int first_object = 1;
1380 int index = (slice_param->slice_type == SLICE_TYPE_I) ? MFC_BATCHBUFFER_AVC_INTRA : MFC_BATCHBUFFER_AVC_INTER;
1382 for (i = 0; i < total_mbs / number_mb_cmds; i++) {
1383 last_object = (total_mbs - starting_mb) == number_mb_cmds;
1384 mb_x = (slice_param->macroblock_address + starting_mb) % width_in_mbs;
1385 mb_y = (slice_param->macroblock_address + starting_mb) / width_in_mbs;
1386 assert(mb_x <= 255 && mb_y <= 255);
1388 starting_mb += number_mb_cmds;
1390 gen6_mfc_batchbuffer_emit_object_command(batch,
1406 head_offset += head_size;
1407 batchbuffer_offset += head_size;
1411 head_offset += tail_size;
1412 batchbuffer_offset += tail_size;
1415 batchbuffer_offset += number_mb_cmds * 3;
1422 number_mb_cmds = total_mbs % number_mb_cmds;
1423 mb_x = (slice_param->macroblock_address + starting_mb) % width_in_mbs;
1424 mb_y = (slice_param->macroblock_address + starting_mb) / width_in_mbs;
1425 assert(mb_x <= 255 && mb_y <= 255);
1426 starting_mb += number_mb_cmds;
1428 gen6_mfc_batchbuffer_emit_object_command(batch,
1446 * return size in Owords (16bytes)
1449 gen6_mfc_avc_batchbuffer_slice(VADriverContextP ctx,
1450 struct encode_state *encode_state,
1451 struct intel_encoder_context *encoder_context,
1453 int batchbuffer_offset)
1455 struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
1456 struct intel_batchbuffer *slice_batch = mfc_context->aux_batchbuffer;
1457 VAEncSequenceParameterBufferH264 *pSequenceParameter = (VAEncSequenceParameterBufferH264 *)encode_state->seq_param_ext->buffer;
1458 VAEncPictureParameterBufferH264 *pPicParameter = (VAEncPictureParameterBufferH264 *)encode_state->pic_param_ext->buffer;
1459 VAEncSliceParameterBufferH264 *pSliceParameter = (VAEncSliceParameterBufferH264 *)encode_state->slice_params_ext[slice_index]->buffer;
1460 int is_intra = pSliceParameter->slice_type == SLICE_TYPE_I;
1461 int width_in_mbs = (mfc_context->surface_state.width + 15) / 16;
1462 int height_in_mbs = (mfc_context->surface_state.height + 15) / 16;
1463 int last_slice = (pSliceParameter->macroblock_address + pSliceParameter->num_macroblocks) == (width_in_mbs * height_in_mbs);
1464 int qp = pPicParameter->pic_init_qp + pSliceParameter->slice_qp_delta;
1465 unsigned int rate_control_mode = encoder_context->rate_control_mode;
1466 unsigned char *slice_header = NULL;
1467 int slice_header_length_in_bits = 0;
1468 unsigned int tail_data[] = { 0x0, 0x0 };
1470 int old_used = intel_batchbuffer_used_size(slice_batch), used;
1471 unsigned short head_size, tail_size;
1473 head_offset = old_used / 16;
1474 gen6_mfc_avc_slice_state(ctx,
1478 (rate_control_mode == VA_RC_CBR),
1482 if (slice_index == 0)
1483 gen6_mfc_avc_pipeline_header_programing(ctx, encode_state, encoder_context, slice_batch);
1485 slice_header_length_in_bits = build_avc_slice_header(pSequenceParameter, pPicParameter, pSliceParameter, &slice_header);
1488 mfc_context->insert_object(ctx,
1490 (unsigned int *)slice_header,
1491 ALIGN(slice_header_length_in_bits, 32) >> 5,
1492 slice_header_length_in_bits & 0x1f,
1493 5, /* first 5 bytes are start code + nal unit type */
1500 intel_batchbuffer_align(slice_batch, 16); /* aligned by an Oword */
1501 used = intel_batchbuffer_used_size(slice_batch);
1502 head_size = (used - old_used) / 16;
1505 if (rate_control_mode == VA_RC_CBR) {
1506 qp = mfc_context->bit_rate_control_context[1 - is_intra].QpPrimeY;
1511 mfc_context->insert_object(ctx,
1522 mfc_context->insert_object(ctx,
1534 intel_batchbuffer_align(slice_batch, 16); /* aligned by an Oword */
1535 used = intel_batchbuffer_used_size(slice_batch);
1536 tail_size = (used - old_used) / 16;
1539 gen6_mfc_avc_batchbuffer_slice_command(ctx,
1549 return head_size + tail_size + pSliceParameter->num_macroblocks * 3;
1553 gen6_mfc_avc_batchbuffer_pipeline(VADriverContextP ctx,
1554 struct encode_state *encode_state,
1555 struct intel_encoder_context *encoder_context)
1557 struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
1558 struct intel_batchbuffer *batch = encoder_context->base.batch;
1559 int i, size, offset = 0;
1560 intel_batchbuffer_start_atomic(batch, 0x4000);
1561 gen6_gpe_pipeline_setup(ctx, &mfc_context->gpe_context, batch);
1563 for ( i = 0; i < encode_state->num_slice_params_ext; i++) {
1564 size = gen6_mfc_avc_batchbuffer_slice(ctx, encode_state, encoder_context, i, offset);
1568 intel_batchbuffer_end_atomic(batch);
1569 intel_batchbuffer_flush(batch);
1573 gen6_mfc_build_avc_batchbuffer(VADriverContextP ctx,
1574 struct encode_state *encode_state,
1575 struct intel_encoder_context *encoder_context)
1577 gen6_mfc_batchbuffer_surfaces_setup(ctx, encode_state, encoder_context);
1578 gen6_mfc_batchbuffer_idrt_setup(ctx, encode_state, encoder_context);
1579 gen6_mfc_batchbuffer_constant_setup(ctx, encode_state, encoder_context);
1580 gen6_mfc_avc_batchbuffer_pipeline(ctx, encode_state, encoder_context);
1584 gen6_mfc_avc_hardware_batchbuffer(VADriverContextP ctx,
1585 struct encode_state *encode_state,
1586 struct intel_encoder_context *encoder_context)
1588 struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
1590 gen6_mfc_build_avc_batchbuffer(ctx, encode_state, encoder_context);
1591 dri_bo_reference(mfc_context->mfc_batchbuffer_surface.bo);
1593 return mfc_context->mfc_batchbuffer_surface.bo;
1599 gen6_mfc_avc_pipeline_programing(VADriverContextP ctx,
1600 struct encode_state *encode_state,
1601 struct intel_encoder_context *encoder_context)
1603 struct intel_batchbuffer *batch = encoder_context->base.batch;
1606 dri_bo *slice_batch_bo = gen6_mfc_avc_software_batchbuffer(ctx, encode_state, encoder_context);
1608 dri_bo *slice_batch_bo = gen6_mfc_avc_hardware_batchbuffer(ctx, encode_state, encoder_context);
1612 intel_batchbuffer_start_atomic_bcs(batch, 0x4000);
1613 intel_batchbuffer_emit_mi_flush(batch);
1615 // picture level programing
1616 gen6_mfc_avc_pipeline_picture_programing(ctx, encode_state, encoder_context);
1618 BEGIN_BCS_BATCH(batch, 2);
1619 OUT_BCS_BATCH(batch, MI_BATCH_BUFFER_START | (1 << 8));
1620 OUT_BCS_RELOC(batch,
1622 I915_GEM_DOMAIN_COMMAND, 0,
1624 ADVANCE_BCS_BATCH(batch);
1627 intel_batchbuffer_end_atomic(batch);
1629 dri_bo_unreference(slice_batch_bo);
1633 gen6_mfc_avc_encode_picture(VADriverContextP ctx,
1634 struct encode_state *encode_state,
1635 struct intel_encoder_context *encoder_context)
1637 struct gen6_mfc_context *mfc_context = encoder_context->mfc_context;
1638 unsigned int rate_control_mode = encoder_context->rate_control_mode;
1639 int MAX_CBR_INTERATE = 4;
1640 int current_frame_bits_size;
1643 for(i = 0; i < MAX_CBR_INTERATE; i++) {
1644 gen6_mfc_init(ctx, encoder_context);
1645 gen6_mfc_avc_prepare(ctx, encode_state, encoder_context);
1646 /*Programing bcs pipeline*/
1647 gen6_mfc_avc_pipeline_programing(ctx, encode_state, encoder_context); //filling the pipeline
1648 gen6_mfc_run(ctx, encode_state, encoder_context);
1649 gen6_mfc_stop(ctx, encode_state, encoder_context, ¤t_frame_bits_size);
1650 if ( rate_control_mode == VA_RC_CBR) {
1651 //gen6_mfc_hrd_context_check(encode_state, mfc_context);
1652 if ( gen6_mfc_bit_rate_control_context_update( encode_state, mfc_context, current_frame_bits_size) ) {
1653 gen6_mfc_hrd_context_update(encode_state, mfc_context);
1661 return VA_STATUS_SUCCESS;
1665 gen6_mfc_pipeline(VADriverContextP ctx,
1667 struct encode_state *encode_state,
1668 struct intel_encoder_context *encoder_context)
1673 case VAProfileH264Baseline:
1674 case VAProfileH264Main:
1675 case VAProfileH264High:
1676 vaStatus = gen6_mfc_avc_encode_picture(ctx, encode_state, encoder_context);
1679 /* FIXME: add for other profile */
1681 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_PROFILE;
1689 gen6_mfc_context_destroy(void *context)
1691 struct gen6_mfc_context *mfc_context = context;
1694 dri_bo_unreference(mfc_context->post_deblocking_output.bo);
1695 mfc_context->post_deblocking_output.bo = NULL;
1697 dri_bo_unreference(mfc_context->pre_deblocking_output.bo);
1698 mfc_context->pre_deblocking_output.bo = NULL;
1700 dri_bo_unreference(mfc_context->uncompressed_picture_source.bo);
1701 mfc_context->uncompressed_picture_source.bo = NULL;
1703 dri_bo_unreference(mfc_context->mfc_indirect_pak_bse_object.bo);
1704 mfc_context->mfc_indirect_pak_bse_object.bo = NULL;
1706 for (i = 0; i < NUM_MFC_DMV_BUFFERS; i++){
1707 dri_bo_unreference(mfc_context->direct_mv_buffers[i].bo);
1708 mfc_context->direct_mv_buffers[i].bo = NULL;
1711 dri_bo_unreference(mfc_context->intra_row_store_scratch_buffer.bo);
1712 mfc_context->intra_row_store_scratch_buffer.bo = NULL;
1714 dri_bo_unreference(mfc_context->macroblock_status_buffer.bo);
1715 mfc_context->macroblock_status_buffer.bo = NULL;
1717 dri_bo_unreference(mfc_context->deblocking_filter_row_store_scratch_buffer.bo);
1718 mfc_context->deblocking_filter_row_store_scratch_buffer.bo = NULL;
1720 dri_bo_unreference(mfc_context->bsd_mpc_row_store_scratch_buffer.bo);
1721 mfc_context->bsd_mpc_row_store_scratch_buffer.bo = NULL;
1724 for (i = 0; i < MAX_MFC_REFERENCE_SURFACES; i++){
1725 dri_bo_unreference(mfc_context->reference_surfaces[i].bo);
1726 mfc_context->reference_surfaces[i].bo = NULL;
1729 i965_gpe_context_destroy(&mfc_context->gpe_context);
1731 dri_bo_unreference(mfc_context->mfc_batchbuffer_surface.bo);
1732 mfc_context->mfc_batchbuffer_surface.bo = NULL;
1734 dri_bo_unreference(mfc_context->aux_batchbuffer_surface.bo);
1735 mfc_context->aux_batchbuffer_surface.bo = NULL;
1737 if (mfc_context->aux_batchbuffer)
1738 intel_batchbuffer_free(mfc_context->aux_batchbuffer);
1740 mfc_context->aux_batchbuffer = NULL;
1745 Bool gen6_mfc_context_init(VADriverContextP ctx, struct intel_encoder_context *encoder_context)
1747 struct gen6_mfc_context *mfc_context = calloc(1, sizeof(struct gen6_mfc_context));
1749 mfc_context->gpe_context.surface_state_binding_table.length = (SURFACE_STATE_PADDED_SIZE + sizeof(unsigned int)) * MAX_MEDIA_SURFACES_GEN6;
1751 mfc_context->gpe_context.idrt.max_entries = MAX_GPE_KERNELS;
1752 mfc_context->gpe_context.idrt.entry_size = sizeof(struct gen6_interface_descriptor_data);
1754 mfc_context->gpe_context.curbe.length = 32 * 4;
1756 mfc_context->gpe_context.vfe_state.max_num_threads = 60 - 1;
1757 mfc_context->gpe_context.vfe_state.num_urb_entries = 16;
1758 mfc_context->gpe_context.vfe_state.gpgpu_mode = 0;
1759 mfc_context->gpe_context.vfe_state.urb_entry_size = 59 - 1;
1760 mfc_context->gpe_context.vfe_state.curbe_allocation_size = 37 - 1;
1762 i965_gpe_load_kernels(ctx,
1763 &mfc_context->gpe_context,
1767 mfc_context->pipe_mode_select = gen6_mfc_pipe_mode_select;
1768 mfc_context->set_surface_state = gen6_mfc_surface_state;
1769 mfc_context->ind_obj_base_addr_state = gen6_mfc_ind_obj_base_addr_state;
1770 mfc_context->avc_img_state = gen6_mfc_avc_img_state;
1771 mfc_context->avc_qm_state = gen6_mfc_avc_qm_state;
1772 mfc_context->avc_fqm_state = gen6_mfc_avc_fqm_state;
1773 mfc_context->insert_object = gen6_mfc_avc_insert_object;
1774 mfc_context->buffer_suface_setup = i965_gpe_buffer_suface_setup;
1776 encoder_context->mfc_context = mfc_context;
1777 encoder_context->mfc_context_destroy = gen6_mfc_context_destroy;
1778 encoder_context->mfc_pipeline = gen6_mfc_pipeline;