2 * Copyright © 2010-2011 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the
6 * "Software"), to deal in the Software without restriction, including
7 * without limitation the rights to use, copy, modify, merge, publish,
8 * distribute, sub license, and/or sell copies of the Software, and to
9 * permit persons to whom the Software is furnished to do so, subject to
10 * the following conditions:
12 * The above copyright notice and this permission notice (including the
13 * next paragraph) shall be included in all copies or substantial portions
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
17 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
19 * IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR
20 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 * Zhou Chang <chang.zhou@intel.com>
35 #include "intel_batchbuffer.h"
36 #include "i965_defines.h"
37 #include "i965_structs.h"
38 #include "i965_drv_video.h"
39 #include "i965_encoder.h"
40 #include "i965_encoder_utils.h"
43 gen6_mfc_pipe_mode_select(VADriverContextP ctx, struct gen6_encoder_context *gen6_encoder_context)
45 struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
47 BEGIN_BCS_BATCH(batch, 4);
49 OUT_BCS_BATCH(batch, MFX_PIPE_MODE_SELECT | (4 - 2));
51 (0 << 10) | /* disable Stream-Out , advanced QP/bitrate control need enable it*/
52 (1 << 9) | /* Post Deblocking Output */
53 (0 << 8) | /* Pre Deblocking Output */
54 (0 << 7) | /* disable TLB prefectch */
55 (0 << 5) | /* not in stitch mode */
56 (1 << 4) | /* encoding mode */
57 (2 << 0)); /* Standard Select: AVC */
59 (0 << 20) | /* round flag in PB slice */
60 (0 << 19) | /* round flag in Intra8x8 */
61 (0 << 7) | /* expand NOA bus flag */
62 (1 << 6) | /* must be 1 */
63 (0 << 5) | /* disable clock gating for NOA */
64 (0 << 4) | /* terminate if AVC motion and POC table error occurs */
65 (0 << 3) | /* terminate if AVC mbdata error occurs */
66 (0 << 2) | /* terminate if AVC CABAC/CAVLC decode error occurs */
67 (0 << 1) | /* AVC long field motion vector */
68 (0 << 0)); /* always calculate AVC ILDB boundary strength */
69 OUT_BCS_BATCH(batch, 0);
71 ADVANCE_BCS_BATCH(batch);
75 gen7_mfc_pipe_mode_select(VADriverContextP ctx,
77 struct gen6_encoder_context *gen6_encoder_context)
79 struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
81 assert(standard_select == MFX_FORMAT_MPEG2 ||
82 standard_select == MFX_FORMAT_AVC);
84 BEGIN_BCS_BATCH(batch, 5);
85 OUT_BCS_BATCH(batch, MFX_PIPE_MODE_SELECT | (5 - 2));
87 (MFX_LONG_MODE << 17) | /* Must be long format for encoder */
88 (MFD_MODE_VLD << 15) | /* VLD mode */
89 (0 << 10) | /* disable Stream-Out */
90 (1 << 9) | /* Post Deblocking Output */
91 (0 << 8) | /* Pre Deblocking Output */
92 (0 << 5) | /* not in stitch mode */
93 (1 << 4) | /* encoding mode */
94 (standard_select << 0)); /* standard select: avc or mpeg2 */
96 (0 << 7) | /* expand NOA bus flag */
97 (0 << 6) | /* disable slice-level clock gating */
98 (0 << 5) | /* disable clock gating for NOA */
99 (0 << 4) | /* terminate if AVC motion and POC table error occurs */
100 (0 << 3) | /* terminate if AVC mbdata error occurs */
101 (0 << 2) | /* terminate if AVC CABAC/CAVLC decode error occurs */
104 OUT_BCS_BATCH(batch, 0);
105 OUT_BCS_BATCH(batch, 0);
107 ADVANCE_BCS_BATCH(batch);
111 gen6_mfc_surface_state(VADriverContextP ctx, struct gen6_encoder_context *gen6_encoder_context)
113 struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
114 struct gen6_mfc_context *mfc_context = &gen6_encoder_context->mfc_context;
116 BEGIN_BCS_BATCH(batch, 6);
118 OUT_BCS_BATCH(batch, MFX_SURFACE_STATE | (6 - 2));
119 OUT_BCS_BATCH(batch, 0);
121 ((mfc_context->surface_state.height - 1) << 19) |
122 ((mfc_context->surface_state.width - 1) << 6));
124 (MFX_SURFACE_PLANAR_420_8 << 28) | /* 420 planar YUV surface */
125 (1 << 27) | /* must be 1 for interleave U/V, hardware requirement */
126 (0 << 22) | /* surface object control state, FIXME??? */
127 ((mfc_context->surface_state.w_pitch - 1) << 3) | /* pitch */
128 (0 << 2) | /* must be 0 for interleave U/V */
129 (1 << 1) | /* must be y-tiled */
130 (I965_TILEWALK_YMAJOR << 0)); /* tile walk, TILEWALK_YMAJOR */
132 (0 << 16) | /* must be 0 for interleave U/V */
133 (mfc_context->surface_state.h_pitch)); /* y offset for U(cb) */
134 OUT_BCS_BATCH(batch, 0);
135 ADVANCE_BCS_BATCH(batch);
139 gen7_mfc_surface_state(VADriverContextP ctx, struct gen6_encoder_context *gen6_encoder_context)
141 struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
142 struct gen6_mfc_context *mfc_context = &gen6_encoder_context->mfc_context;
144 BEGIN_BCS_BATCH(batch, 6);
146 OUT_BCS_BATCH(batch, MFX_SURFACE_STATE | (6 - 2));
147 OUT_BCS_BATCH(batch, 0);
149 ((mfc_context->surface_state.height - 1) << 18) |
150 ((mfc_context->surface_state.width - 1) << 4));
152 (MFX_SURFACE_PLANAR_420_8 << 28) | /* 420 planar YUV surface */
153 (1 << 27) | /* must be 1 for interleave U/V, hardware requirement */
154 (0 << 22) | /* surface object control state, FIXME??? */
155 ((mfc_context->surface_state.w_pitch - 1) << 3) | /* pitch */
156 (0 << 2) | /* must be 0 for interleave U/V */
157 (1 << 1) | /* must be tiled */
158 (I965_TILEWALK_YMAJOR << 0)); /* tile walk, TILEWALK_YMAJOR */
160 (0 << 16) | /* must be 0 for interleave U/V */
161 (mfc_context->surface_state.h_pitch)); /* y offset for U(cb) */
162 OUT_BCS_BATCH(batch, 0);
163 ADVANCE_BCS_BATCH(batch);
167 gen6_mfc_pipe_buf_addr_state(VADriverContextP ctx, struct gen6_encoder_context *gen6_encoder_context)
169 struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
170 struct gen6_mfc_context *mfc_context = &gen6_encoder_context->mfc_context;
173 BEGIN_BCS_BATCH(batch, 24);
175 OUT_BCS_BATCH(batch, MFX_PIPE_BUF_ADDR_STATE | (24 - 2));
177 OUT_BCS_BATCH(batch, 0); /* pre output addr */
179 OUT_BCS_RELOC(batch, mfc_context->post_deblocking_output.bo,
180 I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
181 0); /* post output addr */
183 OUT_BCS_RELOC(batch, mfc_context->uncompressed_picture_source.bo,
184 I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
185 0); /* uncompressed data */
186 OUT_BCS_RELOC(batch, mfc_context->macroblock_status_buffer.bo,
187 I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
188 0); /* StreamOut data*/
189 OUT_BCS_RELOC(batch, mfc_context->intra_row_store_scratch_buffer.bo,
190 I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
192 OUT_BCS_RELOC(batch, mfc_context->deblocking_filter_row_store_scratch_buffer.bo,
193 I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
195 /* 7..22 Reference pictures*/
196 for (i = 0; i < ARRAY_ELEMS(mfc_context->reference_surfaces); i++) {
197 if ( mfc_context->reference_surfaces[i].bo != NULL) {
198 OUT_BCS_RELOC(batch, mfc_context->reference_surfaces[i].bo,
199 I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
202 OUT_BCS_BATCH(batch, 0);
205 OUT_BCS_RELOC(batch, mfc_context->macroblock_status_buffer.bo,
206 I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
207 0); /* Macroblock status buffer*/
209 ADVANCE_BCS_BATCH(batch);
213 gen6_mfc_ind_obj_base_addr_state(VADriverContextP ctx, struct gen6_encoder_context *gen6_encoder_context)
215 struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
216 struct gen6_mfc_context *mfc_context = &gen6_encoder_context->mfc_context;
217 struct gen6_vme_context *vme_context = &gen6_encoder_context->vme_context;
219 BEGIN_BCS_BATCH(batch, 11);
221 OUT_BCS_BATCH(batch, MFX_IND_OBJ_BASE_ADDR_STATE | (11 - 2));
222 OUT_BCS_BATCH(batch, 0);
223 OUT_BCS_BATCH(batch, 0);
224 /* MFX Indirect MV Object Base Address */
225 OUT_BCS_RELOC(batch, vme_context->vme_output.bo, I915_GEM_DOMAIN_INSTRUCTION, 0, 0);
226 OUT_BCS_BATCH(batch, 0);
227 OUT_BCS_BATCH(batch, 0);
228 OUT_BCS_BATCH(batch, 0);
229 OUT_BCS_BATCH(batch, 0);
230 OUT_BCS_BATCH(batch, 0);
231 /*MFC Indirect PAK-BSE Object Base Address for Encoder*/
233 mfc_context->mfc_indirect_pak_bse_object.bo,
234 I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
237 mfc_context->mfc_indirect_pak_bse_object.bo,
238 I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
239 mfc_context->mfc_indirect_pak_bse_object.end_offset);
241 ADVANCE_BCS_BATCH(batch);
245 gen7_mfc_ind_obj_base_addr_state(VADriverContextP ctx, struct gen6_encoder_context *gen6_encoder_context)
247 struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
248 struct gen6_mfc_context *mfc_context = &gen6_encoder_context->mfc_context;
249 struct gen6_vme_context *vme_context = &gen6_encoder_context->vme_context;
251 BEGIN_BCS_BATCH(batch, 11);
253 OUT_BCS_BATCH(batch, MFX_IND_OBJ_BASE_ADDR_STATE | (11 - 2));
254 OUT_BCS_BATCH(batch, 0);
255 OUT_BCS_BATCH(batch, 0);
256 /* MFX Indirect MV Object Base Address */
257 OUT_BCS_RELOC(batch, vme_context->vme_output.bo, I915_GEM_DOMAIN_INSTRUCTION, 0, 0);
258 OUT_BCS_BATCH(batch, 0x80000000); /* must set, up to 2G */
259 OUT_BCS_BATCH(batch, 0);
260 OUT_BCS_BATCH(batch, 0);
261 OUT_BCS_BATCH(batch, 0);
262 OUT_BCS_BATCH(batch, 0);
263 /*MFC Indirect PAK-BSE Object Base Address for Encoder*/
265 mfc_context->mfc_indirect_pak_bse_object.bo,
266 I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
269 mfc_context->mfc_indirect_pak_bse_object.bo,
270 I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
271 mfc_context->mfc_indirect_pak_bse_object.end_offset);
273 ADVANCE_BCS_BATCH(batch);
277 gen6_mfc_bsp_buf_base_addr_state(VADriverContextP ctx, struct gen6_encoder_context *gen6_encoder_context)
279 struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
280 struct gen6_mfc_context *mfc_context = &gen6_encoder_context->mfc_context;
282 BEGIN_BCS_BATCH(batch, 4);
284 OUT_BCS_BATCH(batch, MFX_BSP_BUF_BASE_ADDR_STATE | (4 - 2));
285 OUT_BCS_RELOC(batch, mfc_context->bsd_mpc_row_store_scratch_buffer.bo,
286 I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
288 OUT_BCS_BATCH(batch, 0);
289 OUT_BCS_BATCH(batch, 0);
291 ADVANCE_BCS_BATCH(batch);
295 gen6_mfc_avc_img_state(VADriverContextP ctx,struct encode_state *encode_state,
296 struct gen6_encoder_context *gen6_encoder_context)
298 struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
299 struct gen6_mfc_context *mfc_context = &gen6_encoder_context->mfc_context;
300 VAEncSequenceParameterBufferH264Ext *pSequenceParameter = (VAEncSequenceParameterBufferH264Ext *)encode_state->seq_param_ext->buffer;
301 int width_in_mbs = (mfc_context->surface_state.width + 15) / 16;
302 int height_in_mbs = (mfc_context->surface_state.height + 15) / 16;
304 BEGIN_BCS_BATCH(batch, 13);
305 OUT_BCS_BATCH(batch, MFX_AVC_IMG_STATE | (13 - 2));
307 ((width_in_mbs * height_in_mbs) & 0xFFFF));
309 (height_in_mbs << 16) |
310 (width_in_mbs << 0));
312 (0 << 24) | /*Second Chroma QP Offset*/
313 (0 << 16) | /*Chroma QP Offset*/
314 (0 << 14) | /*Max-bit conformance Intra flag*/
315 (0 << 13) | /*Max Macroblock size conformance Inter flag*/
316 (1 << 12) | /*Should always be written as "1" */
317 (0 << 10) | /*QM Preset FLag */
318 (0 << 8) | /*Image Structure*/
319 (0 << 0) ); /*Current Decoed Image Frame Store ID, reserved in Encode mode*/
321 (400 << 16) | /*Mininum Frame size*/
322 (0 << 15) | /*Disable reading of Macroblock Status Buffer*/
323 (0 << 14) | /*Load BitStream Pointer only once, 1 slic 1 frame*/
324 (0 << 13) | /*CABAC 0 word insertion test enable*/
325 (1 << 12) | /*MVUnpackedEnable,compliant to DXVA*/
326 (1 << 10) | /*Chroma Format IDC, 4:2:0*/
327 (1 << 7) | /*0:CAVLC encoding mode,1:CABAC*/
328 (0 << 6) | /*Only valid for VLD decoding mode*/
329 (0 << 5) | /*Constrained Intra Predition Flag, from PPS*/
330 (pSequenceParameter->direct_8x8_inference_flag << 4) | /*Direct 8x8 inference flag*/
331 (0 << 3) | /*Only 8x8 IDCT Transform Mode Flag*/
332 (1 << 2) | /*Frame MB only flag*/
333 (0 << 1) | /*MBAFF mode is in active*/
334 (0 << 0) ); /*Field picture flag*/
336 (1<<16) | /*Frame Size Rate Control Flag*/
338 (1<<9) | /*MB level Rate Control Enabling Flag*/
339 (1 << 3) | /*FrameBitRateMinReportMask*/
340 (1 << 2) | /*FrameBitRateMaxReportMask*/
341 (1 << 1) | /*InterMBMaxSizeReportMask*/
342 (1 << 0) ); /*IntraMBMaxSizeReportMask*/
343 OUT_BCS_BATCH(batch, /*Inter and Intra Conformance Max size limit*/
344 (0x0600 << 16) | /*InterMbMaxSz 192 Byte*/
345 (0x0800) ); /*IntraMbMaxSz 256 Byte*/
346 OUT_BCS_BATCH(batch, 0x00000000); /*Reserved : MBZReserved*/
347 OUT_BCS_BATCH(batch, 0x01020304); /*Slice QP Delta for bitrate control*/
348 OUT_BCS_BATCH(batch, 0xFEFDFCFB);
349 OUT_BCS_BATCH(batch, 0x80601004); /*MAX = 128KB, MIN = 64KB*/
350 OUT_BCS_BATCH(batch, 0x00800001);
351 OUT_BCS_BATCH(batch, 0);
353 ADVANCE_BCS_BATCH(batch);
357 gen7_mfc_avc_img_state(VADriverContextP ctx, struct gen6_encoder_context *gen6_encoder_context)
359 struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
360 struct gen6_mfc_context *mfc_context = &gen6_encoder_context->mfc_context;
362 int width_in_mbs = (mfc_context->surface_state.width + 15) / 16;
363 int height_in_mbs = (mfc_context->surface_state.height + 15) / 16;
365 BEGIN_BCS_BATCH(batch, 16);
366 OUT_BCS_BATCH(batch, MFX_AVC_IMG_STATE | (16 - 2));
368 ((width_in_mbs * height_in_mbs) & 0xFFFF));
370 ((height_in_mbs - 1) << 16) |
371 ((width_in_mbs - 1) << 0));
373 (0 << 24) | /* Second Chroma QP Offset */
374 (0 << 16) | /* Chroma QP Offset */
375 (0 << 14) | /* Max-bit conformance Intra flag */
376 (0 << 13) | /* Max Macroblock size conformance Inter flag */
377 (0 << 12) | /* FIXME: Weighted_Pred_Flag */
378 (0 << 10) | /* FIXME: Weighted_BiPred_Idc */
379 (0 << 8) | /* FIXME: Image Structure */
380 (0 << 0) ); /* Current Decoed Image Frame Store ID, reserved in Encode mode */
382 (0 << 16) | /* Mininum Frame size */
383 (0 << 15) | /* Disable reading of Macroblock Status Buffer */
384 (0 << 14) | /* Load BitStream Pointer only once, 1 slic 1 frame */
385 (0 << 13) | /* CABAC 0 word insertion test enable */
386 (1 << 12) | /* MVUnpackedEnable,compliant to DXVA */
387 (1 << 10) | /* Chroma Format IDC, 4:2:0 */
388 (0 << 9) | /* FIXME: MbMvFormatFlag */
389 (1 << 7) | /* 0:CAVLC encoding mode,1:CABAC */
390 (0 << 6) | /* Only valid for VLD decoding mode */
391 (0 << 5) | /* Constrained Intra Predition Flag, from PPS */
392 (0 << 4) | /* Direct 8x8 inference flag */
393 (0 << 3) | /* Only 8x8 IDCT Transform Mode Flag */
394 (1 << 2) | /* Frame MB only flag */
395 (0 << 1) | /* MBAFF mode is in active */
396 (0 << 0)); /* Field picture flag */
397 OUT_BCS_BATCH(batch, 0); /* Mainly about MB rate control and debug, just ignoring */
398 OUT_BCS_BATCH(batch, /* Inter and Intra Conformance Max size limit */
399 (0xBB8 << 16) | /* InterMbMaxSz */
400 (0xEE8) ); /* IntraMbMaxSz */
401 OUT_BCS_BATCH(batch, 0); /* Reserved */
402 OUT_BCS_BATCH(batch, 0); /* Slice QP Delta for bitrate control */
403 OUT_BCS_BATCH(batch, 0); /* Slice QP Delta for bitrate control */
404 OUT_BCS_BATCH(batch, 0x8C000000);
405 OUT_BCS_BATCH(batch, 0x00010000);
406 OUT_BCS_BATCH(batch, 0);
407 OUT_BCS_BATCH(batch, 0);
408 OUT_BCS_BATCH(batch, 0);
409 OUT_BCS_BATCH(batch, 0);
411 ADVANCE_BCS_BATCH(batch);
414 static void gen6_mfc_avc_directmode_state(VADriverContextP ctx, struct gen6_encoder_context *gen6_encoder_context)
416 struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
417 struct gen6_mfc_context *mfc_context = &gen6_encoder_context->mfc_context;
421 BEGIN_BCS_BATCH(batch, 69);
423 OUT_BCS_BATCH(batch, MFX_AVC_DIRECTMODE_STATE | (69 - 2));
425 /* Reference frames and Current frames */
426 for(i = 0; i < NUM_MFC_DMV_BUFFERS; i++) {
427 if ( mfc_context->direct_mv_buffers[i].bo != NULL) {
428 OUT_BCS_RELOC(batch, mfc_context->direct_mv_buffers[i].bo,
429 I915_GEM_DOMAIN_INSTRUCTION, 0,
432 OUT_BCS_BATCH(batch, 0);
437 for(i = 0; i < 32; i++) {
438 OUT_BCS_BATCH(batch, i/2);
440 OUT_BCS_BATCH(batch, 0);
441 OUT_BCS_BATCH(batch, 0);
443 ADVANCE_BCS_BATCH(batch);
446 static void gen6_mfc_avc_slice_state(VADriverContextP ctx,
448 struct encode_state *encode_state,
449 struct gen6_encoder_context *gen6_encoder_context,
450 int rate_control_enable,
453 struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
454 struct gen6_mfc_context *mfc_context = &gen6_encoder_context->mfc_context;
455 VAEncSliceParameterBufferH264Ext *pSliceParameter = (VAEncSliceParameterBufferH264Ext *)encode_state->slice_params_ext[0]->buffer; /* TODO: multi slices support */
457 BEGIN_BCS_BATCH(batch, 11);;
459 OUT_BCS_BATCH(batch, MFX_AVC_SLICE_STATE | (11 - 2) );
461 OUT_BCS_BATCH(batch, slice_type); /*Slice Type: I:P:B Slice*/
463 if ( slice_type == SLICE_TYPE_I )
464 OUT_BCS_BATCH(batch, 0); /*no reference frames and pred_weight_table*/
466 OUT_BCS_BATCH(batch, 0x00010000); /*1 reference frame*/
469 (pSliceParameter->direct_spatial_mv_pred_flag<<29) | /*Direct Prediction Type*/
470 (0<<24) | /*Enable deblocking operation*/
471 (qp<<16) | /*Slice Quantization Parameter*/
473 OUT_BCS_BATCH(batch, 0); /*First MB X&Y , the postion of current slice*/
474 OUT_BCS_BATCH(batch, ( ((mfc_context->surface_state.height+15)/16) << 16) );
477 (rate_control_enable<<31) | /*in CBR mode RateControlCounterEnable = enable*/
478 (1<<30) | /*ResetRateControlCounter*/
479 (0<<28) | /*RC Triggle Mode = Always Rate Control*/
480 (8<<24) | /*RC Stable Tolerance, middle level*/
481 (rate_control_enable<<23) | /*RC Panic Enable*/
482 (0<<22) | /*QP mode, don't modfiy CBP*/
483 (0<<21) | /*MB Type Direct Conversion Disable*/
484 (0<<20) | /*MB Type Skip Conversion Disable*/
485 (1<<19) | /*IsLastSlice*/
486 (0<<18) | /*BitstreamOutputFlag Compressed BitStream Output Disable Flag 0:enable 1:disable*/
487 (1<<17) | /*HeaderPresentFlag*/
488 (1<<16) | /*SliceData PresentFlag*/
489 (1<<15) | /*TailPresentFlag*/
490 (1<<13) | /*RBSP NAL TYPE*/
491 (0<<12) ); /*CabacZeroWordInsertionEnable*/
493 OUT_BCS_BATCH(batch, mfc_context->mfc_indirect_pak_bse_object.offset);
495 OUT_BCS_BATCH(batch, (24<<24) | /*Target QP - 24 is lowest QP*/
496 (20<<16) | /*Target QP + 20 is highest QP*/
501 OUT_BCS_BATCH(batch, 0x08888888);
502 OUT_BCS_BATCH(batch, 0);
504 ADVANCE_BCS_BATCH(batch);
506 static void gen6_mfc_avc_qm_state(VADriverContextP ctx, struct gen6_encoder_context *gen6_encoder_context)
508 struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
511 BEGIN_BCS_BATCH(batch, 58);
513 OUT_BCS_BATCH(batch, MFX_AVC_QM_STATE | 56);
514 OUT_BCS_BATCH(batch, 0xFF ) ;
515 for( i = 0; i < 56; i++) {
516 OUT_BCS_BATCH(batch, 0x10101010);
519 ADVANCE_BCS_BATCH(batch);
522 static void gen6_mfc_avc_fqm_state(VADriverContextP ctx, struct gen6_encoder_context *gen6_encoder_context)
524 struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
527 BEGIN_BCS_BATCH(batch, 113);
528 OUT_BCS_BATCH(batch, MFC_AVC_FQM_STATE | (113 - 2));
530 for(i = 0; i < 112;i++) {
531 OUT_BCS_BATCH(batch, 0x10001000);
534 ADVANCE_BCS_BATCH(batch);
538 gen7_mfc_qm_state(VADriverContextP ctx,
542 struct gen6_encoder_context *gen6_encoder_context)
544 struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
545 unsigned int qm_buffer[16];
547 assert(qm_length <= 16);
548 assert(sizeof(*qm) == 4);
549 memcpy(qm_buffer, qm, qm_length * 4);
551 BEGIN_BCS_BATCH(batch, 18);
552 OUT_BCS_BATCH(batch, MFX_QM_STATE | (18 - 2));
553 OUT_BCS_BATCH(batch, qm_type << 0);
554 intel_batchbuffer_data(batch, qm_buffer, 16 * 4);
555 ADVANCE_BCS_BATCH(batch);
558 static void gen7_mfc_avc_qm_state(VADriverContextP ctx, struct gen6_encoder_context *gen6_encoder_context)
560 unsigned int qm[16] = {
561 0x10101010, 0x10101010, 0x10101010, 0x10101010,
562 0x10101010, 0x10101010, 0x10101010, 0x10101010,
563 0x10101010, 0x10101010, 0x10101010, 0x10101010,
564 0x10101010, 0x10101010, 0x10101010, 0x10101010
567 gen7_mfc_qm_state(ctx, MFX_QM_AVC_4X4_INTRA_MATRIX, qm, 12, gen6_encoder_context);
568 gen7_mfc_qm_state(ctx, MFX_QM_AVC_4X4_INTER_MATRIX, qm, 12, gen6_encoder_context);
569 gen7_mfc_qm_state(ctx, MFX_QM_AVC_8x8_INTRA_MATRIX, qm, 16, gen6_encoder_context);
570 gen7_mfc_qm_state(ctx, MFX_QM_AVC_8x8_INTER_MATRIX, qm, 16, gen6_encoder_context);
574 gen7_mfc_fqm_state(VADriverContextP ctx,
578 struct gen6_encoder_context *gen6_encoder_context)
580 struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
581 unsigned int fqm_buffer[32];
583 assert(fqm_length <= 32);
584 assert(sizeof(*fqm) == 4);
585 memcpy(fqm_buffer, fqm, fqm_length * 4);
587 BEGIN_BCS_BATCH(batch, 34);
588 OUT_BCS_BATCH(batch, MFX_FQM_STATE | (34 - 2));
589 OUT_BCS_BATCH(batch, fqm_type << 0);
590 intel_batchbuffer_data(batch, fqm_buffer, 32 * 4);
591 ADVANCE_BCS_BATCH(batch);
594 static void gen7_mfc_avc_fqm_state(VADriverContextP ctx, struct gen6_encoder_context *gen6_encoder_context)
596 unsigned int qm[32] = {
597 0x10001000, 0x10001000, 0x10001000, 0x10001000,
598 0x10001000, 0x10001000, 0x10001000, 0x10001000,
599 0x10001000, 0x10001000, 0x10001000, 0x10001000,
600 0x10001000, 0x10001000, 0x10001000, 0x10001000,
601 0x10001000, 0x10001000, 0x10001000, 0x10001000,
602 0x10001000, 0x10001000, 0x10001000, 0x10001000,
603 0x10001000, 0x10001000, 0x10001000, 0x10001000,
604 0x10001000, 0x10001000, 0x10001000, 0x10001000
607 gen7_mfc_fqm_state(ctx, MFX_QM_AVC_4X4_INTRA_MATRIX, qm, 24, gen6_encoder_context);
608 gen7_mfc_fqm_state(ctx, MFX_QM_AVC_4X4_INTER_MATRIX, qm, 24, gen6_encoder_context);
609 gen7_mfc_fqm_state(ctx, MFX_QM_AVC_8x8_INTRA_MATRIX, qm, 32, gen6_encoder_context);
610 gen7_mfc_fqm_state(ctx, MFX_QM_AVC_8x8_INTER_MATRIX, qm, 32, gen6_encoder_context);
613 static void gen6_mfc_avc_ref_idx_state(VADriverContextP ctx, struct gen6_encoder_context *gen6_encoder_context)
615 struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
618 BEGIN_BCS_BATCH(batch, 10);
619 OUT_BCS_BATCH(batch, MFX_AVC_REF_IDX_STATE | 8);
620 OUT_BCS_BATCH(batch, 0); //Select L0
621 OUT_BCS_BATCH(batch, 0x80808020); //Only 1 reference
622 for(i = 0; i < 7; i++) {
623 OUT_BCS_BATCH(batch, 0x80808080);
625 ADVANCE_BCS_BATCH(batch);
627 BEGIN_BCS_BATCH(batch, 10);
628 OUT_BCS_BATCH(batch, MFX_AVC_REF_IDX_STATE | 8);
629 OUT_BCS_BATCH(batch, 1); //Select L1
630 OUT_BCS_BATCH(batch, 0x80808022); //Only 1 reference
631 for(i = 0; i < 7; i++) {
632 OUT_BCS_BATCH(batch, 0x80808080);
634 ADVANCE_BCS_BATCH(batch);
638 gen6_mfc_avc_insert_object(VADriverContextP ctx, struct gen6_encoder_context *gen6_encoder_context,
639 unsigned int *insert_data, int lenght_in_dws, int data_bits_in_last_dw,
640 int skip_emul_byte_count, int is_last_header, int is_end_of_slice, int emulation_flag)
642 struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
644 BEGIN_BCS_BATCH(batch, lenght_in_dws + 2);
646 OUT_BCS_BATCH(batch, MFC_AVC_INSERT_OBJECT | (lenght_in_dws + 2 - 2));
648 (0 << 16) | /* always start at offset 0 */
649 (data_bits_in_last_dw << 8) |
650 (skip_emul_byte_count << 4) |
651 (emulation_flag << 3) |
652 ((!!is_last_header) << 2) |
653 ((!!is_end_of_slice) << 1) |
654 (0 << 0)); /* FIXME: ??? */
656 intel_batchbuffer_data(batch, insert_data, lenght_in_dws * 4);
657 ADVANCE_BCS_BATCH(batch);
661 gen6_mfc_avc_pak_object_intra(VADriverContextP ctx, int x, int y, int end_mb, int qp,unsigned int *msg,
662 struct gen6_encoder_context *gen6_encoder_context,
663 int intra_mb_size_in_bits)
665 struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
666 int len_in_dwords = 11;
667 unsigned char target_mb_size = intra_mb_size_in_bits / 16; //In Words
668 unsigned char max_mb_size = target_mb_size * 2 > 255? 255: target_mb_size * 2 ;
670 BEGIN_BCS_BATCH(batch, len_in_dwords);
672 OUT_BCS_BATCH(batch, MFC_AVC_PAK_OBJECT | (len_in_dwords - 2));
673 OUT_BCS_BATCH(batch, 0);
674 OUT_BCS_BATCH(batch, 0);
676 (0 << 24) | /* PackedMvNum, Debug*/
677 (0 << 20) | /* No motion vector */
678 (1 << 19) | /* CbpDcY */
679 (1 << 18) | /* CbpDcU */
680 (1 << 17) | /* CbpDcV */
683 OUT_BCS_BATCH(batch, (0xFFFF<<16) | (y << 8) | x); /* Code Block Pattern for Y*/
684 OUT_BCS_BATCH(batch, 0x000F000F); /* Code Block Pattern */
685 OUT_BCS_BATCH(batch, (0 << 27) | (end_mb << 26) | qp); /* Last MB */
687 /*Stuff for Intra MB*/
688 OUT_BCS_BATCH(batch, msg[1]); /* We using Intra16x16 no 4x4 predmode*/
689 OUT_BCS_BATCH(batch, msg[2]);
690 OUT_BCS_BATCH(batch, msg[3]&0xFC);
692 /*MaxSizeInWord and TargetSzieInWord*/
693 OUT_BCS_BATCH(batch, (max_mb_size << 24) |
694 (target_mb_size << 16) );
696 ADVANCE_BCS_BATCH(batch);
698 return len_in_dwords;
701 static int gen6_mfc_avc_pak_object_inter(VADriverContextP ctx, int x, int y, int end_mb, int qp, unsigned int offset,
702 struct gen6_encoder_context *gen6_encoder_context,
703 int inter_mb_size_in_bits, int slice_type)
705 struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
706 int len_in_dwords = 11;
707 unsigned char target_mb_size = inter_mb_size_in_bits / 16; //In Words
708 unsigned char max_mb_size = target_mb_size * 16 > 255? 255: target_mb_size * 16 ;
710 BEGIN_BCS_BATCH(batch, len_in_dwords);
712 OUT_BCS_BATCH(batch, MFC_AVC_PAK_OBJECT | (len_in_dwords - 2));
714 OUT_BCS_BATCH(batch, 32); /* 32 MV*/
715 OUT_BCS_BATCH(batch, offset);
718 (1 << 24) | /* PackedMvNum, Debug*/
719 (4 << 20) | /* 8 MV, SNB don't use it*/
720 (1 << 19) | /* CbpDcY */
721 (1 << 18) | /* CbpDcU */
722 (1 << 17) | /* CbpDcV */
723 (0 << 15) | /* Transform8x8Flag = 0*/
724 (0 << 14) | /* Frame based*/
725 (0 << 13) | /* Inter MB */
726 (1 << 8) | /* MbType = P_L0_16x16 */
727 (0 << 7) | /* MBZ for frame */
729 (2 << 4) | /* MBZ for inter*/
731 (0 << 2) | /* SkipMbFlag */
732 (0 << 0)); /* InterMbMode */
734 OUT_BCS_BATCH(batch, (0xFFFF<<16) | (y << 8) | x); /* Code Block Pattern for Y*/
735 OUT_BCS_BATCH(batch, 0x000F000F); /* Code Block Pattern */
737 if ( slice_type == SLICE_TYPE_B) {
738 OUT_BCS_BATCH(batch, (0xF<<28) | (end_mb << 26) | qp); /* Last MB */
740 OUT_BCS_BATCH(batch, (end_mb << 26) | qp); /* Last MB */
743 OUT_BCS_BATCH(batch, (end_mb << 26) | qp); /* Last MB */
747 /*Stuff for Inter MB*/
748 OUT_BCS_BATCH(batch, 0x0);
749 OUT_BCS_BATCH(batch, 0x0);
750 OUT_BCS_BATCH(batch, 0x0);
752 /*MaxSizeInWord and TargetSzieInWord*/
753 OUT_BCS_BATCH(batch, (max_mb_size << 24) |
754 (target_mb_size << 16) );
756 ADVANCE_BCS_BATCH(batch);
758 return len_in_dwords;
761 static void gen6_mfc_init(VADriverContextP ctx, struct gen6_encoder_context *gen6_encoder_context)
763 struct i965_driver_data *i965 = i965_driver_data(ctx);
764 struct gen6_mfc_context *mfc_context = &gen6_encoder_context->mfc_context;
768 /*Encode common setup for MFC*/
769 dri_bo_unreference(mfc_context->post_deblocking_output.bo);
770 mfc_context->post_deblocking_output.bo = NULL;
772 dri_bo_unreference(mfc_context->pre_deblocking_output.bo);
773 mfc_context->pre_deblocking_output.bo = NULL;
775 dri_bo_unreference(mfc_context->uncompressed_picture_source.bo);
776 mfc_context->uncompressed_picture_source.bo = NULL;
778 dri_bo_unreference(mfc_context->mfc_indirect_pak_bse_object.bo);
779 mfc_context->mfc_indirect_pak_bse_object.bo = NULL;
781 for (i = 0; i < NUM_MFC_DMV_BUFFERS; i++){
782 if ( mfc_context->direct_mv_buffers[i].bo != NULL);
783 dri_bo_unreference(mfc_context->direct_mv_buffers[i].bo);
784 mfc_context->direct_mv_buffers[i].bo = NULL;
787 for (i = 0; i < MAX_MFC_REFERENCE_SURFACES; i++){
788 if (mfc_context->reference_surfaces[i].bo != NULL)
789 dri_bo_unreference(mfc_context->reference_surfaces[i].bo);
790 mfc_context->reference_surfaces[i].bo = NULL;
793 dri_bo_unreference(mfc_context->intra_row_store_scratch_buffer.bo);
794 bo = dri_bo_alloc(i965->intel.bufmgr,
799 mfc_context->intra_row_store_scratch_buffer.bo = bo;
801 dri_bo_unreference(mfc_context->macroblock_status_buffer.bo);
802 bo = dri_bo_alloc(i965->intel.bufmgr,
807 mfc_context->macroblock_status_buffer.bo = bo;
809 dri_bo_unreference(mfc_context->deblocking_filter_row_store_scratch_buffer.bo);
810 bo = dri_bo_alloc(i965->intel.bufmgr,
812 49152, /* 6 * 128 * 64 */
815 mfc_context->deblocking_filter_row_store_scratch_buffer.bo = bo;
817 dri_bo_unreference(mfc_context->bsd_mpc_row_store_scratch_buffer.bo);
818 bo = dri_bo_alloc(i965->intel.bufmgr,
820 12288, /* 1.5 * 128 * 64 */
823 mfc_context->bsd_mpc_row_store_scratch_buffer.bo = bo;
826 struct packed_data_format
828 unsigned int length_in_bits;
830 unsigned char num_skip_bytes;
831 unsigned char pad[2];
834 void gen6_mfc_avc_pipeline_programing(VADriverContextP ctx,
835 struct encode_state *encode_state,
836 struct gen6_encoder_context *gen6_encoder_context)
838 struct i965_driver_data *i965 = i965_driver_data(ctx);
839 struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
840 struct gen6_mfc_context *mfc_context = &gen6_encoder_context->mfc_context;
841 struct gen6_vme_context *vme_context = &gen6_encoder_context->vme_context;
842 VAEncSequenceParameterBufferH264Ext *pSequenceParameter = (VAEncSequenceParameterBufferH264Ext *)encode_state->seq_param_ext->buffer;
843 VAEncPictureParameterBufferH264Ext *pPicParameter = (VAEncPictureParameterBufferH264Ext *)encode_state->pic_param_ext->buffer;
844 VAEncSliceParameterBufferH264Ext *pSliceParameter = (VAEncSliceParameterBufferH264Ext *)encode_state->slice_params_ext[0]->buffer; /* FIXME: multi slices */
845 VAEncH264DecRefPicMarkingBuffer *pDecRefPicMarking = NULL;
846 unsigned int *msg = NULL, offset = 0;
847 int emit_new_state = 1, object_len_in_bytes;
848 int is_intra = pSliceParameter->slice_type == SLICE_TYPE_I;
849 int width_in_mbs = (mfc_context->surface_state.width + 15) / 16;
850 int height_in_mbs = (mfc_context->surface_state.height + 15) / 16;
852 int rate_control_mode = pSequenceParameter->rate_control_method;
853 float fps = pSequenceParameter->time_scale * 0.5 / pSequenceParameter->num_units_in_tick ;
854 int inter_mb_size = pSequenceParameter->bits_per_second * 1.0 / fps / width_in_mbs / height_in_mbs;
855 int intra_mb_size = inter_mb_size * 5.0;
856 int qp = pPicParameter->pic_init_qp;
857 unsigned char *slice_header = NULL;
858 int slice_header_length_in_bits = 0;
859 unsigned int tail_data[] = { 0x0 };
860 struct packed_data_format *packed_sps = NULL, *packed_pps = NULL;
862 if (encode_state->dec_ref_pic_marking)
863 pDecRefPicMarking = (VAEncH264DecRefPicMarkingBuffer *)encode_state->dec_ref_pic_marking->buffer;
865 if (encode_state->packed_sps)
866 packed_sps = (struct packed_data_format *)encode_state->packed_sps->buffer;
868 if (encode_state->packed_pps)
869 packed_pps = (struct packed_data_format *)encode_state->packed_pps->buffer;
871 slice_header_length_in_bits = build_avc_slice_header(pSequenceParameter, pPicParameter, pSliceParameter, pDecRefPicMarking, &slice_header);
873 if ( rate_control_mode != 2) {
875 if ( intra_mb_size > 384*8) //ONE MB raw data is 384 bytes
876 intra_mb_size = 384*8;
877 if ( inter_mb_size > 256*8)
878 intra_mb_size = 256*8;
881 intel_batchbuffer_start_atomic_bcs(batch, 0x1000);
884 dri_bo_map(vme_context->vme_output.bo , 1);
885 msg = (unsigned int *)vme_context->vme_output.bo->virtual;
888 for (y = 0; y < height_in_mbs; y++) {
889 for (x = 0; x < width_in_mbs; x++) {
890 int last_mb = (y == (height_in_mbs-1)) && ( x == (width_in_mbs-1) );
892 if (emit_new_state) {
893 intel_batchbuffer_emit_mi_flush(batch);
895 if (IS_GEN7(i965->intel.device_id)) {
896 gen7_mfc_pipe_mode_select(ctx, MFX_FORMAT_AVC, gen6_encoder_context);
897 gen7_mfc_surface_state(ctx, gen6_encoder_context);
898 gen7_mfc_ind_obj_base_addr_state(ctx, gen6_encoder_context);
900 gen6_mfc_pipe_mode_select(ctx, gen6_encoder_context);
901 gen6_mfc_surface_state(ctx, gen6_encoder_context);
902 gen6_mfc_ind_obj_base_addr_state(ctx, gen6_encoder_context);
905 gen6_mfc_pipe_buf_addr_state(ctx, gen6_encoder_context);
906 gen6_mfc_bsp_buf_base_addr_state(ctx, gen6_encoder_context);
908 if (IS_GEN7(i965->intel.device_id)) {
909 gen7_mfc_avc_img_state(ctx, gen6_encoder_context);
910 gen7_mfc_avc_qm_state(ctx, gen6_encoder_context);
911 gen7_mfc_avc_fqm_state(ctx, gen6_encoder_context);
913 gen6_mfc_avc_img_state(ctx, encode_state,gen6_encoder_context);
914 gen6_mfc_avc_qm_state(ctx, gen6_encoder_context);
915 gen6_mfc_avc_fqm_state(ctx, gen6_encoder_context);
918 gen6_mfc_avc_directmode_state(ctx, gen6_encoder_context);
919 gen6_mfc_avc_ref_idx_state(ctx, gen6_encoder_context);
920 gen6_mfc_avc_slice_state(ctx, pSliceParameter->slice_type,
921 encode_state, gen6_encoder_context,
922 rate_control_mode == 0, qp);
925 gen6_mfc_avc_insert_object(ctx, gen6_encoder_context,
926 (unsigned int *)(packed_sps + 1), ALIGN(packed_sps->length_in_bits, 32) >> 5, packed_sps->length_in_bits & 0x1f,
927 packed_sps->num_skip_bytes, 0, 0, !!(packed_sps->flag & 0x1));
931 gen6_mfc_avc_insert_object(ctx, gen6_encoder_context,
932 (unsigned int *)(packed_pps + 1), ALIGN(packed_pps->length_in_bits, 32) >> 5, packed_pps->length_in_bits & 0x1f,
933 packed_pps->num_skip_bytes, 0, 0, !!(packed_pps->flag & 0x1));
936 gen6_mfc_avc_insert_object(ctx, gen6_encoder_context,
937 (unsigned int *)slice_header, ALIGN(slice_header_length_in_bits, 32) >> 5, slice_header_length_in_bits & 0x1f,
938 5, /* first 5 bytes are start code + nal unit type */
945 object_len_in_bytes = gen6_mfc_avc_pak_object_intra(ctx, x, y, last_mb, qp, msg, gen6_encoder_context, intra_mb_size);
948 object_len_in_bytes = gen6_mfc_avc_pak_object_inter(ctx, x, y, last_mb, qp, offset, gen6_encoder_context, inter_mb_size, pSliceParameter->slice_type);
952 if (intel_batchbuffer_check_free_space(batch, object_len_in_bytes) == 0) {
954 intel_batchbuffer_end_atomic(batch);
955 intel_batchbuffer_flush(batch);
957 intel_batchbuffer_start_atomic_bcs(batch, 0x1000);
962 gen6_mfc_avc_insert_object(ctx, gen6_encoder_context,
963 tail_data, sizeof(tail_data) >> 2, 32,
964 sizeof(tail_data), 1, 1, 1);
967 dri_bo_unmap(vme_context->vme_output.bo);
971 intel_batchbuffer_end_atomic(batch);
975 gen6_mfc_free_avc_surface(void **data)
977 struct gen6_mfc_avc_surface_aux *avc_surface = *data;
982 dri_bo_unreference(avc_surface->dmv_top);
983 avc_surface->dmv_top = NULL;
984 dri_bo_unreference(avc_surface->dmv_bottom);
985 avc_surface->dmv_bottom = NULL;
991 static VAStatus gen6_mfc_avc_prepare(VADriverContextP ctx,
992 struct encode_state *encode_state,
993 struct gen6_encoder_context *gen6_encoder_context)
995 struct i965_driver_data *i965 = i965_driver_data(ctx);
996 struct gen6_mfc_context *mfc_context = &gen6_encoder_context->mfc_context;
997 struct object_surface *obj_surface;
998 struct object_buffer *obj_buffer;
999 struct gen6_mfc_avc_surface_aux* gen6_avc_surface;
1001 VAEncPictureParameterBufferH264Ext *pPicParameter = (VAEncPictureParameterBufferH264Ext *)encode_state->pic_param_ext->buffer;
1002 VAStatus vaStatus = VA_STATUS_SUCCESS;
1005 /*Setup all the input&output object*/
1007 /* Setup current frame and current direct mv buffer*/
1008 obj_surface = SURFACE(pPicParameter->CurrPic.picture_id);
1009 assert(obj_surface);
1010 i965_check_alloc_surface_bo(ctx, obj_surface, 1, VA_FOURCC('N','V','1','2'));
1011 if ( obj_surface->private_data == NULL) {
1012 gen6_avc_surface = calloc(sizeof(struct gen6_mfc_avc_surface_aux), 1);
1013 gen6_avc_surface->dmv_top =
1014 dri_bo_alloc(i965->intel.bufmgr,
1018 gen6_avc_surface->dmv_bottom =
1019 dri_bo_alloc(i965->intel.bufmgr,
1023 assert(gen6_avc_surface->dmv_top);
1024 assert(gen6_avc_surface->dmv_bottom);
1025 obj_surface->private_data = (void *)gen6_avc_surface;
1026 obj_surface->free_private_data = (void *)gen6_mfc_free_avc_surface;
1028 gen6_avc_surface = (struct gen6_mfc_avc_surface_aux*) obj_surface->private_data;
1029 mfc_context->direct_mv_buffers[NUM_MFC_DMV_BUFFERS - 2].bo = gen6_avc_surface->dmv_top;
1030 mfc_context->direct_mv_buffers[NUM_MFC_DMV_BUFFERS - 1].bo = gen6_avc_surface->dmv_bottom;
1031 dri_bo_reference(gen6_avc_surface->dmv_top);
1032 dri_bo_reference(gen6_avc_surface->dmv_bottom);
1034 mfc_context->post_deblocking_output.bo = obj_surface->bo;
1035 dri_bo_reference(mfc_context->post_deblocking_output.bo);
1037 mfc_context->surface_state.width = obj_surface->orig_width;
1038 mfc_context->surface_state.height = obj_surface->orig_height;
1039 mfc_context->surface_state.w_pitch = obj_surface->width;
1040 mfc_context->surface_state.h_pitch = obj_surface->height;
1042 /* Setup reference frames and direct mv buffers*/
1043 for(i = 0; i < MAX_MFC_REFERENCE_SURFACES; i++) {
1044 if ( pPicParameter->ReferenceFrames[i].picture_id != VA_INVALID_ID ) {
1045 obj_surface = SURFACE(pPicParameter->ReferenceFrames[i].picture_id);
1046 assert(obj_surface);
1047 if (obj_surface->bo != NULL) {
1048 mfc_context->reference_surfaces[i].bo = obj_surface->bo;
1049 dri_bo_reference(obj_surface->bo);
1051 /* Check DMV buffer */
1052 if ( obj_surface->private_data == NULL) {
1054 gen6_avc_surface = calloc(sizeof(struct gen6_mfc_avc_surface_aux), 1);
1055 gen6_avc_surface->dmv_top =
1056 dri_bo_alloc(i965->intel.bufmgr,
1060 gen6_avc_surface->dmv_bottom =
1061 dri_bo_alloc(i965->intel.bufmgr,
1065 assert(gen6_avc_surface->dmv_top);
1066 assert(gen6_avc_surface->dmv_bottom);
1067 obj_surface->private_data = gen6_avc_surface;
1068 obj_surface->free_private_data = gen6_mfc_free_avc_surface;
1071 gen6_avc_surface = (struct gen6_mfc_avc_surface_aux*) obj_surface->private_data;
1072 /* Setup DMV buffer */
1073 mfc_context->direct_mv_buffers[i*2].bo = gen6_avc_surface->dmv_top;
1074 mfc_context->direct_mv_buffers[i*2+1].bo = gen6_avc_surface->dmv_bottom;
1075 dri_bo_reference(gen6_avc_surface->dmv_top);
1076 dri_bo_reference(gen6_avc_surface->dmv_bottom);
1082 obj_surface = SURFACE(encode_state->current_render_target);
1083 assert(obj_surface && obj_surface->bo);
1084 mfc_context->uncompressed_picture_source.bo = obj_surface->bo;
1085 dri_bo_reference(mfc_context->uncompressed_picture_source.bo);
1087 obj_buffer = BUFFER (pPicParameter->CodedBuf); /* FIXME: fix this later */
1088 bo = obj_buffer->buffer_store->bo;
1090 mfc_context->mfc_indirect_pak_bse_object.bo = bo;
1091 mfc_context->mfc_indirect_pak_bse_object.offset = ALIGN(sizeof(VACodedBufferSegment), 64);
1092 mfc_context->mfc_indirect_pak_bse_object.end_offset = ALIGN (obj_buffer->size_element - 0x1000, 0x1000);
1093 dri_bo_reference(mfc_context->mfc_indirect_pak_bse_object.bo);
1095 /*Programing bcs pipeline*/
1096 gen6_mfc_avc_pipeline_programing(ctx, encode_state, gen6_encoder_context); //filling the pipeline
1101 static VAStatus gen6_mfc_run(VADriverContextP ctx,
1102 struct encode_state *encode_state,
1103 struct gen6_encoder_context *gen6_encoder_context)
1105 struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
1107 intel_batchbuffer_flush(batch); //run the pipeline
1109 return VA_STATUS_SUCCESS;
1112 static VAStatus gen6_mfc_stop(VADriverContextP ctx,
1113 struct encode_state *encode_state,
1114 struct gen6_encoder_context *gen6_encoder_context)
1117 struct i965_driver_data *i965 = i965_driver_data(ctx);
1118 struct gen6_mfc_context *mfc_context = &gen6_encoder_context->mfc_context;
1120 VAEncPictureParameterBufferH264Ext *pPicParameter = (VAEncPictureParameterBufferH264Ext *)encode_state->pic_param_ext->buffer;
1122 struct object_surface *obj_surface = SURFACE(pPicParameter->reconstructed_picture);
1123 //struct object_surface *obj_surface = SURFACE(pPicParameter->reference_picture[0]);
1124 //struct object_surface *obj_surface = SURFACE(encode_state->current_render_target);
1125 my_debug(obj_surface);
1129 return VA_STATUS_SUCCESS;
1133 gen6_mfc_avc_encode_picture(VADriverContextP ctx,
1134 struct encode_state *encode_state,
1135 struct gen6_encoder_context *gen6_encoder_context)
1137 gen6_mfc_init(ctx, gen6_encoder_context);
1138 gen6_mfc_avc_prepare(ctx, encode_state, gen6_encoder_context);
1139 gen6_mfc_run(ctx, encode_state, gen6_encoder_context);
1140 gen6_mfc_stop(ctx, encode_state, gen6_encoder_context);
1142 return VA_STATUS_SUCCESS;
1146 gen6_mfc_pipeline(VADriverContextP ctx,
1148 struct encode_state *encode_state,
1149 struct gen6_encoder_context *gen6_encoder_context)
1154 case VAProfileH264Baseline:
1155 vaStatus = gen6_mfc_avc_encode_picture(ctx, encode_state, gen6_encoder_context);
1158 /* FIXME: add for other profile */
1160 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_PROFILE;
1167 Bool gen6_mfc_context_init(VADriverContextP ctx, struct gen6_mfc_context *mfc_context)
1172 Bool gen6_mfc_context_destroy(struct gen6_mfc_context *mfc_context)
1176 dri_bo_unreference(mfc_context->post_deblocking_output.bo);
1177 mfc_context->post_deblocking_output.bo = NULL;
1179 dri_bo_unreference(mfc_context->pre_deblocking_output.bo);
1180 mfc_context->pre_deblocking_output.bo = NULL;
1182 dri_bo_unreference(mfc_context->uncompressed_picture_source.bo);
1183 mfc_context->uncompressed_picture_source.bo = NULL;
1185 dri_bo_unreference(mfc_context->mfc_indirect_pak_bse_object.bo);
1186 mfc_context->mfc_indirect_pak_bse_object.bo = NULL;
1188 for (i = 0; i < NUM_MFC_DMV_BUFFERS; i++){
1189 dri_bo_unreference(mfc_context->direct_mv_buffers[i].bo);
1190 mfc_context->direct_mv_buffers[i].bo = NULL;
1193 dri_bo_unreference(mfc_context->intra_row_store_scratch_buffer.bo);
1194 mfc_context->intra_row_store_scratch_buffer.bo = NULL;
1196 dri_bo_unreference(mfc_context->macroblock_status_buffer.bo);
1197 mfc_context->macroblock_status_buffer.bo = NULL;
1199 dri_bo_unreference(mfc_context->deblocking_filter_row_store_scratch_buffer.bo);
1200 mfc_context->deblocking_filter_row_store_scratch_buffer.bo = NULL;
1202 dri_bo_unreference(mfc_context->bsd_mpc_row_store_scratch_buffer.bo);
1203 mfc_context->bsd_mpc_row_store_scratch_buffer.bo = NULL;
1206 for (i = 0; i < MAX_MFC_REFERENCE_SURFACES; i++){
1207 dri_bo_unreference(mfc_context->reference_surfaces[i].bo);
1208 mfc_context->reference_surfaces[i].bo = NULL;