2 * Copyright © 2010-2011 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the
6 * "Software"), to deal in the Software without restriction, including
7 * without limitation the rights to use, copy, modify, merge, publish,
8 * distribute, sub license, and/or sell copies of the Software, and to
9 * permit persons to whom the Software is furnished to do so, subject to
10 * the following conditions:
12 * The above copyright notice and this permission notice (including the
13 * next paragraph) shall be included in all copies or substantial portions
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
17 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
19 * IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR
20 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 * Zhou Chang <chang.zhou@intel.com>
35 #include "intel_batchbuffer.h"
36 #include "i965_defines.h"
37 #include "i965_structs.h"
38 #include "i965_drv_video.h"
39 #include "i965_encoder.h"
40 #include "i965_encoder_utils.h"
43 gen6_mfc_pipe_mode_select(VADriverContextP ctx, struct gen6_encoder_context *gen6_encoder_context)
45 struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
47 BEGIN_BCS_BATCH(batch, 4);
49 OUT_BCS_BATCH(batch, MFX_PIPE_MODE_SELECT | (4 - 2));
51 (0 << 10) | /* disable Stream-Out , advanced QP/bitrate control need enable it*/
52 (1 << 9) | /* Post Deblocking Output */
53 (0 << 8) | /* Pre Deblocking Output */
54 (0 << 7) | /* disable TLB prefectch */
55 (0 << 5) | /* not in stitch mode */
56 (1 << 4) | /* encoding mode */
57 (2 << 0)); /* Standard Select: AVC */
59 (0 << 20) | /* round flag in PB slice */
60 (0 << 19) | /* round flag in Intra8x8 */
61 (0 << 7) | /* expand NOA bus flag */
62 (1 << 6) | /* must be 1 */
63 (0 << 5) | /* disable clock gating for NOA */
64 (0 << 4) | /* terminate if AVC motion and POC table error occurs */
65 (0 << 3) | /* terminate if AVC mbdata error occurs */
66 (0 << 2) | /* terminate if AVC CABAC/CAVLC decode error occurs */
67 (0 << 1) | /* AVC long field motion vector */
68 (0 << 0)); /* always calculate AVC ILDB boundary strength */
69 OUT_BCS_BATCH(batch, 0);
71 ADVANCE_BCS_BATCH(batch);
75 gen7_mfc_pipe_mode_select(VADriverContextP ctx,
77 struct gen6_encoder_context *gen6_encoder_context)
79 struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
81 assert(standard_select == MFX_FORMAT_MPEG2 ||
82 standard_select == MFX_FORMAT_AVC);
84 BEGIN_BCS_BATCH(batch, 5);
85 OUT_BCS_BATCH(batch, MFX_PIPE_MODE_SELECT | (5 - 2));
87 (MFX_LONG_MODE << 17) | /* Must be long format for encoder */
88 (MFD_MODE_VLD << 15) | /* VLD mode */
89 (0 << 10) | /* disable Stream-Out */
90 (1 << 9) | /* Post Deblocking Output */
91 (0 << 8) | /* Pre Deblocking Output */
92 (0 << 5) | /* not in stitch mode */
93 (1 << 4) | /* encoding mode */
94 (standard_select << 0)); /* standard select: avc or mpeg2 */
96 (0 << 7) | /* expand NOA bus flag */
97 (0 << 6) | /* disable slice-level clock gating */
98 (0 << 5) | /* disable clock gating for NOA */
99 (0 << 4) | /* terminate if AVC motion and POC table error occurs */
100 (0 << 3) | /* terminate if AVC mbdata error occurs */
101 (0 << 2) | /* terminate if AVC CABAC/CAVLC decode error occurs */
104 OUT_BCS_BATCH(batch, 0);
105 OUT_BCS_BATCH(batch, 0);
107 ADVANCE_BCS_BATCH(batch);
111 gen6_mfc_surface_state(VADriverContextP ctx, struct gen6_encoder_context *gen6_encoder_context)
113 struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
114 struct gen6_mfc_context *mfc_context = &gen6_encoder_context->mfc_context;
116 BEGIN_BCS_BATCH(batch, 6);
118 OUT_BCS_BATCH(batch, MFX_SURFACE_STATE | (6 - 2));
119 OUT_BCS_BATCH(batch, 0);
121 ((mfc_context->surface_state.height - 1) << 19) |
122 ((mfc_context->surface_state.width - 1) << 6));
124 (MFX_SURFACE_PLANAR_420_8 << 28) | /* 420 planar YUV surface */
125 (1 << 27) | /* must be 1 for interleave U/V, hardware requirement */
126 (0 << 22) | /* surface object control state, FIXME??? */
127 ((mfc_context->surface_state.w_pitch - 1) << 3) | /* pitch */
128 (0 << 2) | /* must be 0 for interleave U/V */
129 (1 << 1) | /* must be y-tiled */
130 (I965_TILEWALK_YMAJOR << 0)); /* tile walk, TILEWALK_YMAJOR */
132 (0 << 16) | /* must be 0 for interleave U/V */
133 (mfc_context->surface_state.h_pitch)); /* y offset for U(cb) */
134 OUT_BCS_BATCH(batch, 0);
135 ADVANCE_BCS_BATCH(batch);
139 gen7_mfc_surface_state(VADriverContextP ctx, struct gen6_encoder_context *gen6_encoder_context)
141 struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
142 struct gen6_mfc_context *mfc_context = &gen6_encoder_context->mfc_context;
144 BEGIN_BCS_BATCH(batch, 6);
146 OUT_BCS_BATCH(batch, MFX_SURFACE_STATE | (6 - 2));
147 OUT_BCS_BATCH(batch, 0);
149 ((mfc_context->surface_state.height - 1) << 18) |
150 ((mfc_context->surface_state.width - 1) << 4));
152 (MFX_SURFACE_PLANAR_420_8 << 28) | /* 420 planar YUV surface */
153 (1 << 27) | /* must be 1 for interleave U/V, hardware requirement */
154 (0 << 22) | /* surface object control state, FIXME??? */
155 ((mfc_context->surface_state.w_pitch - 1) << 3) | /* pitch */
156 (0 << 2) | /* must be 0 for interleave U/V */
157 (1 << 1) | /* must be tiled */
158 (I965_TILEWALK_YMAJOR << 0)); /* tile walk, TILEWALK_YMAJOR */
160 (0 << 16) | /* must be 0 for interleave U/V */
161 (mfc_context->surface_state.h_pitch)); /* y offset for U(cb) */
162 OUT_BCS_BATCH(batch, 0);
163 ADVANCE_BCS_BATCH(batch);
167 gen6_mfc_pipe_buf_addr_state(VADriverContextP ctx, struct gen6_encoder_context *gen6_encoder_context)
169 struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
170 struct gen6_mfc_context *mfc_context = &gen6_encoder_context->mfc_context;
173 BEGIN_BCS_BATCH(batch, 24);
175 OUT_BCS_BATCH(batch, MFX_PIPE_BUF_ADDR_STATE | (24 - 2));
177 OUT_BCS_BATCH(batch, 0); /* pre output addr */
179 OUT_BCS_RELOC(batch, mfc_context->post_deblocking_output.bo,
180 I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
181 0); /* post output addr */
183 OUT_BCS_RELOC(batch, mfc_context->uncompressed_picture_source.bo,
184 I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
185 0); /* uncompressed data */
186 OUT_BCS_RELOC(batch, mfc_context->macroblock_status_buffer.bo,
187 I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
188 0); /* StreamOut data*/
189 OUT_BCS_RELOC(batch, mfc_context->intra_row_store_scratch_buffer.bo,
190 I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
192 OUT_BCS_RELOC(batch, mfc_context->deblocking_filter_row_store_scratch_buffer.bo,
193 I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
195 /* 7..22 Reference pictures*/
196 for (i = 0; i < ARRAY_ELEMS(mfc_context->reference_surfaces); i++) {
197 if ( mfc_context->reference_surfaces[i].bo != NULL) {
198 OUT_BCS_RELOC(batch, mfc_context->reference_surfaces[i].bo,
199 I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
202 OUT_BCS_BATCH(batch, 0);
205 OUT_BCS_RELOC(batch, mfc_context->macroblock_status_buffer.bo,
206 I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
207 0); /* Macroblock status buffer*/
209 ADVANCE_BCS_BATCH(batch);
213 gen6_mfc_ind_obj_base_addr_state(VADriverContextP ctx, struct gen6_encoder_context *gen6_encoder_context)
215 struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
216 struct gen6_mfc_context *mfc_context = &gen6_encoder_context->mfc_context;
217 struct gen6_vme_context *vme_context = &gen6_encoder_context->vme_context;
219 BEGIN_BCS_BATCH(batch, 11);
221 OUT_BCS_BATCH(batch, MFX_IND_OBJ_BASE_ADDR_STATE | (11 - 2));
222 OUT_BCS_BATCH(batch, 0);
223 OUT_BCS_BATCH(batch, 0);
224 /* MFX Indirect MV Object Base Address */
225 OUT_BCS_RELOC(batch, vme_context->vme_output.bo, I915_GEM_DOMAIN_INSTRUCTION, 0, 0);
226 OUT_BCS_BATCH(batch, 0);
227 OUT_BCS_BATCH(batch, 0);
228 OUT_BCS_BATCH(batch, 0);
229 OUT_BCS_BATCH(batch, 0);
230 OUT_BCS_BATCH(batch, 0);
231 /*MFC Indirect PAK-BSE Object Base Address for Encoder*/
233 mfc_context->mfc_indirect_pak_bse_object.bo,
234 I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
237 mfc_context->mfc_indirect_pak_bse_object.bo,
238 I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
239 mfc_context->mfc_indirect_pak_bse_object.end_offset);
241 ADVANCE_BCS_BATCH(batch);
245 gen7_mfc_ind_obj_base_addr_state(VADriverContextP ctx, struct gen6_encoder_context *gen6_encoder_context)
247 struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
248 struct gen6_mfc_context *mfc_context = &gen6_encoder_context->mfc_context;
249 struct gen6_vme_context *vme_context = &gen6_encoder_context->vme_context;
251 BEGIN_BCS_BATCH(batch, 11);
253 OUT_BCS_BATCH(batch, MFX_IND_OBJ_BASE_ADDR_STATE | (11 - 2));
254 OUT_BCS_BATCH(batch, 0);
255 OUT_BCS_BATCH(batch, 0);
256 /* MFX Indirect MV Object Base Address */
257 OUT_BCS_RELOC(batch, vme_context->vme_output.bo, I915_GEM_DOMAIN_INSTRUCTION, 0, 0);
258 OUT_BCS_BATCH(batch, 0x80000000); /* must set, up to 2G */
259 OUT_BCS_BATCH(batch, 0);
260 OUT_BCS_BATCH(batch, 0);
261 OUT_BCS_BATCH(batch, 0);
262 OUT_BCS_BATCH(batch, 0);
263 /*MFC Indirect PAK-BSE Object Base Address for Encoder*/
265 mfc_context->mfc_indirect_pak_bse_object.bo,
266 I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
269 mfc_context->mfc_indirect_pak_bse_object.bo,
270 I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
271 mfc_context->mfc_indirect_pak_bse_object.end_offset);
273 ADVANCE_BCS_BATCH(batch);
277 gen6_mfc_bsp_buf_base_addr_state(VADriverContextP ctx, struct gen6_encoder_context *gen6_encoder_context)
279 struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
280 struct gen6_mfc_context *mfc_context = &gen6_encoder_context->mfc_context;
282 BEGIN_BCS_BATCH(batch, 4);
284 OUT_BCS_BATCH(batch, MFX_BSP_BUF_BASE_ADDR_STATE | (4 - 2));
285 OUT_BCS_RELOC(batch, mfc_context->bsd_mpc_row_store_scratch_buffer.bo,
286 I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
288 OUT_BCS_BATCH(batch, 0);
289 OUT_BCS_BATCH(batch, 0);
291 ADVANCE_BCS_BATCH(batch);
295 gen6_mfc_avc_img_state(VADriverContextP ctx, struct gen6_encoder_context *gen6_encoder_context)
297 struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
298 struct gen6_mfc_context *mfc_context = &gen6_encoder_context->mfc_context;
300 int width_in_mbs = (mfc_context->surface_state.width + 15) / 16;
301 int height_in_mbs = (mfc_context->surface_state.height + 15) / 16;
303 BEGIN_BCS_BATCH(batch, 13);
304 OUT_BCS_BATCH(batch, MFX_AVC_IMG_STATE | (13 - 2));
306 ((width_in_mbs * height_in_mbs) & 0xFFFF));
308 (height_in_mbs << 16) |
309 (width_in_mbs << 0));
311 (0 << 24) | /*Second Chroma QP Offset*/
312 (0 << 16) | /*Chroma QP Offset*/
313 (0 << 14) | /*Max-bit conformance Intra flag*/
314 (0 << 13) | /*Max Macroblock size conformance Inter flag*/
315 (1 << 12) | /*Should always be written as "1" */
316 (0 << 10) | /*QM Preset FLag */
317 (0 << 8) | /*Image Structure*/
318 (0 << 0) ); /*Current Decoed Image Frame Store ID, reserved in Encode mode*/
320 (400 << 16) | /*Mininum Frame size*/
321 (0 << 15) | /*Disable reading of Macroblock Status Buffer*/
322 (0 << 14) | /*Load BitStream Pointer only once, 1 slic 1 frame*/
323 (0 << 13) | /*CABAC 0 word insertion test enable*/
324 (1 << 12) | /*MVUnpackedEnable,compliant to DXVA*/
325 (1 << 10) | /*Chroma Format IDC, 4:2:0*/
326 (1 << 7) | /*0:CAVLC encoding mode,1:CABAC*/
327 (0 << 6) | /*Only valid for VLD decoding mode*/
328 (0 << 5) | /*Constrained Intra Predition Flag, from PPS*/
329 (0 << 4) | /*Direct 8x8 inference flag*/
330 (0 << 3) | /*Only 8x8 IDCT Transform Mode Flag*/
331 (1 << 2) | /*Frame MB only flag*/
332 (0 << 1) | /*MBAFF mode is in active*/
333 (0 << 0) ); /*Field picture flag*/
335 (1<<16) | /*Frame Size Rate Control Flag*/
337 (1<<9) | /*MB level Rate Control Enabling Flag*/
338 (1 << 3) | /*FrameBitRateMinReportMask*/
339 (1 << 2) | /*FrameBitRateMaxReportMask*/
340 (1 << 1) | /*InterMBMaxSizeReportMask*/
341 (1 << 0) ); /*IntraMBMaxSizeReportMask*/
342 OUT_BCS_BATCH(batch, /*Inter and Intra Conformance Max size limit*/
343 (0x0600 << 16) | /*InterMbMaxSz 192 Byte*/
344 (0x0800) ); /*IntraMbMaxSz 256 Byte*/
345 OUT_BCS_BATCH(batch, 0x00000000); /*Reserved : MBZReserved*/
346 OUT_BCS_BATCH(batch, 0x01020304); /*Slice QP Delta for bitrate control*/
347 OUT_BCS_BATCH(batch, 0xFEFDFCFB);
348 OUT_BCS_BATCH(batch, 0x80601004); /*MAX = 128KB, MIN = 64KB*/
349 OUT_BCS_BATCH(batch, 0x00800001);
350 OUT_BCS_BATCH(batch, 0);
352 ADVANCE_BCS_BATCH(batch);
356 gen7_mfc_avc_img_state(VADriverContextP ctx, struct gen6_encoder_context *gen6_encoder_context)
358 struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
359 struct gen6_mfc_context *mfc_context = &gen6_encoder_context->mfc_context;
361 int width_in_mbs = (mfc_context->surface_state.width + 15) / 16;
362 int height_in_mbs = (mfc_context->surface_state.height + 15) / 16;
364 BEGIN_BCS_BATCH(batch, 16);
365 OUT_BCS_BATCH(batch, MFX_AVC_IMG_STATE | (16 - 2));
367 ((width_in_mbs * height_in_mbs) & 0xFFFF));
369 ((height_in_mbs - 1) << 16) |
370 ((width_in_mbs - 1) << 0));
372 (0 << 24) | /* Second Chroma QP Offset */
373 (0 << 16) | /* Chroma QP Offset */
374 (0 << 14) | /* Max-bit conformance Intra flag */
375 (0 << 13) | /* Max Macroblock size conformance Inter flag */
376 (0 << 12) | /* FIXME: Weighted_Pred_Flag */
377 (0 << 10) | /* FIXME: Weighted_BiPred_Idc */
378 (0 << 8) | /* FIXME: Image Structure */
379 (0 << 0) ); /* Current Decoed Image Frame Store ID, reserved in Encode mode */
381 (0 << 16) | /* Mininum Frame size */
382 (0 << 15) | /* Disable reading of Macroblock Status Buffer */
383 (0 << 14) | /* Load BitStream Pointer only once, 1 slic 1 frame */
384 (0 << 13) | /* CABAC 0 word insertion test enable */
385 (1 << 12) | /* MVUnpackedEnable,compliant to DXVA */
386 (1 << 10) | /* Chroma Format IDC, 4:2:0 */
387 (0 << 9) | /* FIXME: MbMvFormatFlag */
388 (1 << 7) | /* 0:CAVLC encoding mode,1:CABAC */
389 (0 << 6) | /* Only valid for VLD decoding mode */
390 (0 << 5) | /* Constrained Intra Predition Flag, from PPS */
391 (0 << 4) | /* Direct 8x8 inference flag */
392 (0 << 3) | /* Only 8x8 IDCT Transform Mode Flag */
393 (1 << 2) | /* Frame MB only flag */
394 (0 << 1) | /* MBAFF mode is in active */
395 (0 << 0)); /* Field picture flag */
396 OUT_BCS_BATCH(batch, 0); /* Mainly about MB rate control and debug, just ignoring */
397 OUT_BCS_BATCH(batch, /* Inter and Intra Conformance Max size limit */
398 (0xBB8 << 16) | /* InterMbMaxSz */
399 (0xEE8) ); /* IntraMbMaxSz */
400 OUT_BCS_BATCH(batch, 0); /* Reserved */
401 OUT_BCS_BATCH(batch, 0); /* Slice QP Delta for bitrate control */
402 OUT_BCS_BATCH(batch, 0); /* Slice QP Delta for bitrate control */
403 OUT_BCS_BATCH(batch, 0x8C000000);
404 OUT_BCS_BATCH(batch, 0x00010000);
405 OUT_BCS_BATCH(batch, 0);
406 OUT_BCS_BATCH(batch, 0);
407 OUT_BCS_BATCH(batch, 0);
408 OUT_BCS_BATCH(batch, 0);
410 ADVANCE_BCS_BATCH(batch);
413 static void gen6_mfc_avc_directmode_state(VADriverContextP ctx, struct gen6_encoder_context *gen6_encoder_context)
415 struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
418 BEGIN_BCS_BATCH(batch, 69);
420 OUT_BCS_BATCH(batch, MFX_AVC_DIRECTMODE_STATE | (69 - 2));
421 //TODO: reference DMV
422 for(i = 0; i < 16; i++){
423 OUT_BCS_BATCH(batch, 0);
424 OUT_BCS_BATCH(batch, 0);
427 //TODO: current DMV just for test
429 OUT_BCS_RELOC(batch, mfc_context->direct_mv_buffers[0].bo,
430 I915_GEM_DOMAIN_INSTRUCTION, I915_GEM_DOMAIN_INSTRUCTION,
433 //drm_intel_bo_pin(mfc_context->direct_mv_buffers[0].bo, 0x1000);
434 //OUT_BCS_BATCH(batch, mfc_context->direct_mv_buffers[0].bo->offset);
435 OUT_BCS_BATCH(batch, 0);
439 OUT_BCS_BATCH(batch, 0);
442 for(i = 0; i < 34; i++) {
443 OUT_BCS_BATCH(batch, 0);
446 ADVANCE_BCS_BATCH(batch);
449 static void gen6_mfc_avc_slice_state(VADriverContextP ctx,
451 struct gen6_encoder_context *gen6_encoder_context,
452 int rate_control_enable,
455 struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
456 struct gen6_mfc_context *mfc_context = &gen6_encoder_context->mfc_context;
458 BEGIN_BCS_BATCH(batch, 11);;
460 OUT_BCS_BATCH(batch, MFX_AVC_SLICE_STATE | (11 - 2) );
462 OUT_BCS_BATCH(batch, slice_type); /*Slice Type: I:P:B Slice*/
464 if ( slice_type == SLICE_TYPE_I )
465 OUT_BCS_BATCH(batch, 0); /*no reference frames and pred_weight_table*/
467 OUT_BCS_BATCH(batch, 0x00010000); /*1 reference frame*/
469 OUT_BCS_BATCH(batch, (0<<24) | /*Enable deblocking operation*/
470 (qp<<16) | /*Slice Quantization Parameter*/
472 OUT_BCS_BATCH(batch, 0); /*First MB X&Y , the postion of current slice*/
473 OUT_BCS_BATCH(batch, ( ((mfc_context->surface_state.height+15)/16) << 16) );
476 (rate_control_enable<<31) | /*in CBR mode RateControlCounterEnable = enable*/
477 (1<<30) | /*ResetRateControlCounter*/
478 (0<<28) | /*RC Triggle Mode = Always Rate Control*/
479 (8<<24) | /*RC Stable Tolerance, middle level*/
480 (rate_control_enable<<23) | /*RC Panic Enable*/
481 (0<<22) | /*QP mode, don't modfiy CBP*/
482 (1<<19) | /*IsLastSlice*/
483 (0<<18) | /*BitstreamOutputFlag Compressed BitStream Output Disable Flag 0:enable 1:disable*/
484 (1<<17) | /*HeaderPresentFlag*/
485 (1<<16) | /*SliceData PresentFlag*/
486 (1<<15) | /*TailPresentFlag*/
487 (1<<13) | /*RBSP NAL TYPE*/
488 (0<<12) ); /*CabacZeroWordInsertionEnable*/
490 OUT_BCS_BATCH(batch, mfc_context->mfc_indirect_pak_bse_object.offset);
492 OUT_BCS_BATCH(batch, (24<<24) | /*Target QP - 24 is lowest QP*/
493 (20<<16) | /*Target QP + 20 is highest QP*/
498 OUT_BCS_BATCH(batch, 0x08888888);
499 OUT_BCS_BATCH(batch, 0);
501 ADVANCE_BCS_BATCH(batch);
503 static void gen6_mfc_avc_qm_state(VADriverContextP ctx, struct gen6_encoder_context *gen6_encoder_context)
505 struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
508 BEGIN_BCS_BATCH(batch, 58);
510 OUT_BCS_BATCH(batch, MFX_AVC_QM_STATE | 56);
511 OUT_BCS_BATCH(batch, 0xFF ) ;
512 for( i = 0; i < 56; i++) {
513 OUT_BCS_BATCH(batch, 0x10101010);
516 ADVANCE_BCS_BATCH(batch);
519 static void gen6_mfc_avc_fqm_state(VADriverContextP ctx, struct gen6_encoder_context *gen6_encoder_context)
521 struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
524 BEGIN_BCS_BATCH(batch, 113);
525 OUT_BCS_BATCH(batch, MFC_AVC_FQM_STATE | (113 - 2));
527 for(i = 0; i < 112;i++) {
528 OUT_BCS_BATCH(batch, 0x10001000);
531 ADVANCE_BCS_BATCH(batch);
535 gen7_mfc_qm_state(VADriverContextP ctx,
539 struct gen6_encoder_context *gen6_encoder_context)
541 struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
542 unsigned int qm_buffer[16];
544 assert(qm_length <= 16);
545 assert(sizeof(*qm) == 4);
546 memcpy(qm_buffer, qm, qm_length * 4);
548 BEGIN_BCS_BATCH(batch, 18);
549 OUT_BCS_BATCH(batch, MFX_QM_STATE | (18 - 2));
550 OUT_BCS_BATCH(batch, qm_type << 0);
551 intel_batchbuffer_data(batch, qm_buffer, 16 * 4);
552 ADVANCE_BCS_BATCH(batch);
555 static void gen7_mfc_avc_qm_state(VADriverContextP ctx, struct gen6_encoder_context *gen6_encoder_context)
557 unsigned int qm[16] = {
558 0x10101010, 0x10101010, 0x10101010, 0x10101010,
559 0x10101010, 0x10101010, 0x10101010, 0x10101010,
560 0x10101010, 0x10101010, 0x10101010, 0x10101010,
561 0x10101010, 0x10101010, 0x10101010, 0x10101010
564 gen7_mfc_qm_state(ctx, MFX_QM_AVC_4X4_INTRA_MATRIX, qm, 12, gen6_encoder_context);
565 gen7_mfc_qm_state(ctx, MFX_QM_AVC_4X4_INTER_MATRIX, qm, 12, gen6_encoder_context);
566 gen7_mfc_qm_state(ctx, MFX_QM_AVC_8x8_INTRA_MATRIX, qm, 16, gen6_encoder_context);
567 gen7_mfc_qm_state(ctx, MFX_QM_AVC_8x8_INTER_MATRIX, qm, 16, gen6_encoder_context);
571 gen7_mfc_fqm_state(VADriverContextP ctx,
575 struct gen6_encoder_context *gen6_encoder_context)
577 struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
578 unsigned int fqm_buffer[32];
580 assert(fqm_length <= 32);
581 assert(sizeof(*fqm) == 4);
582 memcpy(fqm_buffer, fqm, fqm_length * 4);
584 BEGIN_BCS_BATCH(batch, 34);
585 OUT_BCS_BATCH(batch, MFX_FQM_STATE | (34 - 2));
586 OUT_BCS_BATCH(batch, fqm_type << 0);
587 intel_batchbuffer_data(batch, fqm_buffer, 32 * 4);
588 ADVANCE_BCS_BATCH(batch);
591 static void gen7_mfc_avc_fqm_state(VADriverContextP ctx, struct gen6_encoder_context *gen6_encoder_context)
593 unsigned int qm[32] = {
594 0x10001000, 0x10001000, 0x10001000, 0x10001000,
595 0x10001000, 0x10001000, 0x10001000, 0x10001000,
596 0x10001000, 0x10001000, 0x10001000, 0x10001000,
597 0x10001000, 0x10001000, 0x10001000, 0x10001000,
598 0x10001000, 0x10001000, 0x10001000, 0x10001000,
599 0x10001000, 0x10001000, 0x10001000, 0x10001000,
600 0x10001000, 0x10001000, 0x10001000, 0x10001000,
601 0x10001000, 0x10001000, 0x10001000, 0x10001000
604 gen7_mfc_fqm_state(ctx, MFX_QM_AVC_4X4_INTRA_MATRIX, qm, 24, gen6_encoder_context);
605 gen7_mfc_fqm_state(ctx, MFX_QM_AVC_4X4_INTER_MATRIX, qm, 24, gen6_encoder_context);
606 gen7_mfc_fqm_state(ctx, MFX_QM_AVC_8x8_INTRA_MATRIX, qm, 32, gen6_encoder_context);
607 gen7_mfc_fqm_state(ctx, MFX_QM_AVC_8x8_INTER_MATRIX, qm, 32, gen6_encoder_context);
610 static void gen6_mfc_avc_ref_idx_state(VADriverContextP ctx, struct gen6_encoder_context *gen6_encoder_context)
612 struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
615 BEGIN_BCS_BATCH(batch, 10);
616 OUT_BCS_BATCH(batch, MFX_AVC_REF_IDX_STATE | 8);
617 OUT_BCS_BATCH(batch, 0); //Select L0
618 OUT_BCS_BATCH(batch, 0x80808020); //Only 1 reference
619 for(i = 0; i < 7; i++) {
620 OUT_BCS_BATCH(batch, 0x80808080);
622 ADVANCE_BCS_BATCH(batch);
624 BEGIN_BCS_BATCH(batch, 10);
625 OUT_BCS_BATCH(batch, MFX_AVC_REF_IDX_STATE | 8);
626 OUT_BCS_BATCH(batch, 1); //Select L1
627 OUT_BCS_BATCH(batch, 0x80808022); //Only 1 reference
628 for(i = 0; i < 7; i++) {
629 OUT_BCS_BATCH(batch, 0x80808080);
631 ADVANCE_BCS_BATCH(batch);
635 gen6_mfc_avc_insert_object(VADriverContextP ctx, struct gen6_encoder_context *gen6_encoder_context,
636 unsigned int *insert_data, int lenght_in_dws, int data_bits_in_last_dw,
637 int skip_emul_byte_count, int is_last_header, int is_end_of_slice)
639 struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
641 BEGIN_BCS_BATCH(batch, lenght_in_dws + 2);
643 OUT_BCS_BATCH(batch, MFC_AVC_INSERT_OBJECT | (lenght_in_dws + 2 - 2));
645 (0 << 16) | /* always start at offset 0 */
646 (data_bits_in_last_dw << 8) |
647 (skip_emul_byte_count << 4) |
648 (1 << 3) | /* FIXME: ??? */
649 ((!!is_last_header) << 2) |
650 ((!!is_end_of_slice) << 1) |
651 (0 << 0)); /* FIXME: ??? */
653 intel_batchbuffer_data(batch, insert_data, lenght_in_dws * 4);
654 ADVANCE_BCS_BATCH(batch);
658 gen6_mfc_avc_pak_object_intra(VADriverContextP ctx, int x, int y, int end_mb, int qp,unsigned int *msg,
659 struct gen6_encoder_context *gen6_encoder_context,
660 int intra_mb_size_in_bits)
662 struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
663 int len_in_dwords = 11;
664 unsigned char target_mb_size = intra_mb_size_in_bits / 16; //In Words
665 unsigned char max_mb_size = target_mb_size * 2 > 255? 255: target_mb_size * 2 ;
667 BEGIN_BCS_BATCH(batch, len_in_dwords);
669 OUT_BCS_BATCH(batch, MFC_AVC_PAK_OBJECT | (len_in_dwords - 2));
670 OUT_BCS_BATCH(batch, 0);
671 OUT_BCS_BATCH(batch, 0);
673 (0 << 24) | /* PackedMvNum, Debug*/
674 (0 << 20) | /* No motion vector */
675 (1 << 19) | /* CbpDcY */
676 (1 << 18) | /* CbpDcU */
677 (1 << 17) | /* CbpDcV */
680 OUT_BCS_BATCH(batch, (0xFFFF<<16) | (y << 8) | x); /* Code Block Pattern for Y*/
681 OUT_BCS_BATCH(batch, 0x000F000F); /* Code Block Pattern */
682 OUT_BCS_BATCH(batch, (0 << 27) | (end_mb << 26) | qp); /* Last MB */
684 /*Stuff for Intra MB*/
685 OUT_BCS_BATCH(batch, msg[1]); /* We using Intra16x16 no 4x4 predmode*/
686 OUT_BCS_BATCH(batch, msg[2]);
687 OUT_BCS_BATCH(batch, msg[3]&0xFC);
689 /*MaxSizeInWord and TargetSzieInWord*/
690 OUT_BCS_BATCH(batch, (max_mb_size << 24) |
691 (target_mb_size << 16) );
693 ADVANCE_BCS_BATCH(batch);
695 return len_in_dwords;
698 static int gen6_mfc_avc_pak_object_inter(VADriverContextP ctx, int x, int y, int end_mb, int qp, unsigned int offset,
699 struct gen6_encoder_context *gen6_encoder_context,
700 int inter_mb_size_in_bits)
702 struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
703 int len_in_dwords = 11;
704 unsigned char target_mb_size = inter_mb_size_in_bits / 16; //In Words
705 unsigned char max_mb_size = target_mb_size * 16 > 255? 255: target_mb_size * 16 ;
707 BEGIN_BCS_BATCH(batch, len_in_dwords);
709 OUT_BCS_BATCH(batch, MFC_AVC_PAK_OBJECT | (len_in_dwords - 2));
711 OUT_BCS_BATCH(batch, 32); /* 32 MV*/
712 OUT_BCS_BATCH(batch, offset);
715 (1 << 24) | /* PackedMvNum, Debug*/
716 (4 << 20) | /* 8 MV, SNB don't use it*/
717 (1 << 19) | /* CbpDcY */
718 (1 << 18) | /* CbpDcU */
719 (1 << 17) | /* CbpDcV */
720 (0 << 15) | /* Transform8x8Flag = 0*/
721 (0 << 14) | /* Frame based*/
722 (0 << 13) | /* Inter MB */
723 (1 << 8) | /* MbType = P_L0_16x16 */
724 (0 << 7) | /* MBZ for frame */
726 (2 << 4) | /* MBZ for inter*/
728 (0 << 2) | /* SkipMbFlag */
729 (0 << 0)); /* InterMbMode */
731 OUT_BCS_BATCH(batch, (0xFFFF<<16) | (y << 8) | x); /* Code Block Pattern for Y*/
732 OUT_BCS_BATCH(batch, 0x000F000F); /* Code Block Pattern */
733 OUT_BCS_BATCH(batch, (0 << 27) | (end_mb << 26) | qp); /* Last MB */
735 /*Stuff for Inter MB*/
736 OUT_BCS_BATCH(batch, 0x0);
737 OUT_BCS_BATCH(batch, 0x0);
738 OUT_BCS_BATCH(batch, 0x0);
740 /*MaxSizeInWord and TargetSzieInWord*/
741 OUT_BCS_BATCH(batch, (max_mb_size << 24) |
742 (target_mb_size << 16) );
744 ADVANCE_BCS_BATCH(batch);
746 return len_in_dwords;
749 static void gen6_mfc_init(VADriverContextP ctx, struct gen6_encoder_context *gen6_encoder_context)
751 struct i965_driver_data *i965 = i965_driver_data(ctx);
752 struct gen6_mfc_context *mfc_context = &gen6_encoder_context->mfc_context;
756 /*Encode common setup for MFC*/
757 dri_bo_unreference(mfc_context->post_deblocking_output.bo);
758 mfc_context->post_deblocking_output.bo = NULL;
760 dri_bo_unreference(mfc_context->pre_deblocking_output.bo);
761 mfc_context->pre_deblocking_output.bo = NULL;
763 dri_bo_unreference(mfc_context->uncompressed_picture_source.bo);
764 mfc_context->uncompressed_picture_source.bo = NULL;
766 dri_bo_unreference(mfc_context->mfc_indirect_pak_bse_object.bo);
767 mfc_context->mfc_indirect_pak_bse_object.bo = NULL;
769 for (i = 0; i < NUM_MFC_DMV_BUFFERS; i++){
770 dri_bo_unreference(mfc_context->direct_mv_buffers[i].bo);
771 mfc_context->direct_mv_buffers[i].bo = NULL;
774 for (i = 0; i < MAX_MFC_REFERENCE_SURFACES; i++){
775 if (mfc_context->reference_surfaces[i].bo != NULL)
776 dri_bo_unreference(mfc_context->reference_surfaces[i].bo);
777 mfc_context->reference_surfaces[i].bo = NULL;
780 dri_bo_unreference(mfc_context->intra_row_store_scratch_buffer.bo);
781 bo = dri_bo_alloc(i965->intel.bufmgr,
786 mfc_context->intra_row_store_scratch_buffer.bo = bo;
788 dri_bo_unreference(mfc_context->macroblock_status_buffer.bo);
789 bo = dri_bo_alloc(i965->intel.bufmgr,
794 mfc_context->macroblock_status_buffer.bo = bo;
796 dri_bo_unreference(mfc_context->deblocking_filter_row_store_scratch_buffer.bo);
797 bo = dri_bo_alloc(i965->intel.bufmgr,
799 49152, /* 6 * 128 * 64 */
802 mfc_context->deblocking_filter_row_store_scratch_buffer.bo = bo;
804 dri_bo_unreference(mfc_context->bsd_mpc_row_store_scratch_buffer.bo);
805 bo = dri_bo_alloc(i965->intel.bufmgr,
807 12288, /* 1.5 * 128 * 64 */
810 mfc_context->bsd_mpc_row_store_scratch_buffer.bo = bo;
813 void gen6_mfc_avc_pipeline_programing(VADriverContextP ctx,
814 struct encode_state *encode_state,
815 struct gen6_encoder_context *gen6_encoder_context)
817 struct i965_driver_data *i965 = i965_driver_data(ctx);
818 struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
819 struct gen6_mfc_context *mfc_context = &gen6_encoder_context->mfc_context;
820 struct gen6_vme_context *vme_context = &gen6_encoder_context->vme_context;
821 VAEncSequenceParameterBufferH264Ext *pSequenceParameter = (VAEncSequenceParameterBufferH264Ext *)encode_state->seq_param_ext->buffer;
822 VAEncPictureParameterBufferH264Ext *pPicParameter = (VAEncPictureParameterBufferH264Ext *)encode_state->pic_param_ext->buffer;
823 VAEncSliceParameterBufferH264Ext *pSliceParameter = (VAEncSliceParameterBufferH264Ext *)encode_state->slice_params_ext[0]->buffer; /* FIXME: multi slices */
824 VAEncH264DecRefPicMarkingBuffer *pDecRefPicMarking = NULL;
825 unsigned int *msg = NULL, offset = 0;
826 int emit_new_state = 1, object_len_in_bytes;
827 int is_intra = pSliceParameter->slice_type == SLICE_TYPE_I;
828 int width_in_mbs = (mfc_context->surface_state.width + 15) / 16;
829 int height_in_mbs = (mfc_context->surface_state.height + 15) / 16;
831 int rate_control_mode = pSequenceParameter->rate_control_method;
832 float fps = pSequenceParameter->time_scale * 0.5 / pSequenceParameter->num_units_in_tick ;
833 int inter_mb_size = pSequenceParameter->bits_per_second * 1.0 / fps / width_in_mbs / height_in_mbs;
834 int intra_mb_size = inter_mb_size * 5.0;
835 int qp = pPicParameter->pic_init_qp;
836 unsigned char *slice_header = NULL;
837 int slice_header_length_in_bits = 0;
838 unsigned int tail_data[] = { 0x0 };
840 if (encode_state->dec_ref_pic_marking)
841 pDecRefPicMarking = (VAEncH264DecRefPicMarkingBuffer *)encode_state->dec_ref_pic_marking->buffer;
843 slice_header_length_in_bits = build_avc_slice_header(pSequenceParameter, pPicParameter, pSliceParameter, pDecRefPicMarking, &slice_header);
845 if ( rate_control_mode != 2) {
847 if ( intra_mb_size > 384*8) //ONE MB raw data is 384 bytes
848 intra_mb_size = 384*8;
849 if ( inter_mb_size > 256*8)
850 intra_mb_size = 256*8;
853 intel_batchbuffer_start_atomic_bcs(batch, 0x1000);
856 dri_bo_map(vme_context->vme_output.bo , 1);
857 msg = (unsigned int *)vme_context->vme_output.bo->virtual;
860 for (y = 0; y < height_in_mbs; y++) {
861 for (x = 0; x < width_in_mbs; x++) {
862 int last_mb = (y == (height_in_mbs-1)) && ( x == (width_in_mbs-1) );
864 if (emit_new_state) {
865 intel_batchbuffer_emit_mi_flush(batch);
867 if (IS_GEN7(i965->intel.device_id)) {
868 gen7_mfc_pipe_mode_select(ctx, MFX_FORMAT_AVC, gen6_encoder_context);
869 gen7_mfc_surface_state(ctx, gen6_encoder_context);
870 gen7_mfc_ind_obj_base_addr_state(ctx, gen6_encoder_context);
872 gen6_mfc_pipe_mode_select(ctx, gen6_encoder_context);
873 gen6_mfc_surface_state(ctx, gen6_encoder_context);
874 gen6_mfc_ind_obj_base_addr_state(ctx, gen6_encoder_context);
877 gen6_mfc_pipe_buf_addr_state(ctx, gen6_encoder_context);
878 gen6_mfc_bsp_buf_base_addr_state(ctx, gen6_encoder_context);
880 if (IS_GEN7(i965->intel.device_id)) {
881 gen7_mfc_avc_img_state(ctx, gen6_encoder_context);
882 gen7_mfc_avc_qm_state(ctx, gen6_encoder_context);
883 gen7_mfc_avc_fqm_state(ctx, gen6_encoder_context);
885 gen6_mfc_avc_img_state(ctx, gen6_encoder_context);
886 gen6_mfc_avc_qm_state(ctx, gen6_encoder_context);
887 gen6_mfc_avc_fqm_state(ctx, gen6_encoder_context);
890 gen6_mfc_avc_ref_idx_state(ctx, gen6_encoder_context);
891 gen6_mfc_avc_slice_state(ctx, pSliceParameter->slice_type, gen6_encoder_context, rate_control_mode == 0, qp);
892 gen6_mfc_avc_insert_object(ctx, gen6_encoder_context,
893 (unsigned int *)slice_header, ALIGN(slice_header_length_in_bits, 32) >> 5, slice_header_length_in_bits & 0x1f,
894 5, 1, 0); /* first 5 bytes are start code + nal unit type */
900 object_len_in_bytes = gen6_mfc_avc_pak_object_intra(ctx, x, y, last_mb, qp, msg, gen6_encoder_context, intra_mb_size);
903 object_len_in_bytes = gen6_mfc_avc_pak_object_inter(ctx, x, y, last_mb, qp, offset, gen6_encoder_context, inter_mb_size);
907 if (intel_batchbuffer_check_free_space(batch, object_len_in_bytes) == 0) {
909 intel_batchbuffer_end_atomic(batch);
910 intel_batchbuffer_flush(batch);
912 intel_batchbuffer_start_atomic_bcs(batch, 0x1000);
917 gen6_mfc_avc_insert_object(ctx, gen6_encoder_context,
918 tail_data, sizeof(tail_data) >> 2, 32,
919 sizeof(tail_data), 1, 1);
922 dri_bo_unmap(vme_context->vme_output.bo);
926 intel_batchbuffer_end_atomic(batch);
929 static VAStatus gen6_mfc_avc_prepare(VADriverContextP ctx,
930 struct encode_state *encode_state,
931 struct gen6_encoder_context *gen6_encoder_context)
933 struct i965_driver_data *i965 = i965_driver_data(ctx);
934 struct gen6_mfc_context *mfc_context = &gen6_encoder_context->mfc_context;
935 struct object_surface *obj_surface;
936 struct object_buffer *obj_buffer;
938 VAEncPictureParameterBufferH264Ext *pPicParameter = (VAEncPictureParameterBufferH264Ext *)encode_state->pic_param_ext->buffer;
939 VAStatus vaStatus = VA_STATUS_SUCCESS;
942 /*Setup all the input&output object*/
943 obj_surface = SURFACE(pPicParameter->CurrPic.picture_id);
945 i965_check_alloc_surface_bo(ctx, obj_surface, 1, VA_FOURCC('N','V','1','2'));
947 mfc_context->post_deblocking_output.bo = obj_surface->bo;
948 dri_bo_reference(mfc_context->post_deblocking_output.bo);
950 mfc_context->surface_state.width = obj_surface->orig_width;
951 mfc_context->surface_state.height = obj_surface->orig_height;
952 mfc_context->surface_state.w_pitch = obj_surface->width;
953 mfc_context->surface_state.h_pitch = obj_surface->height;
955 for(i = 0; i < MAX_MFC_REFERENCE_SURFACES; i++) {
956 if ( pPicParameter->ReferenceFrames[i].picture_id != VA_INVALID_ID ) {
957 obj_surface = SURFACE(pPicParameter->ReferenceFrames[i].picture_id);
959 if (obj_surface->bo != NULL) {
960 mfc_context->reference_surfaces[i].bo = obj_surface->bo;
961 dri_bo_reference(obj_surface->bo);
968 obj_surface = SURFACE(encode_state->current_render_target);
969 assert(obj_surface && obj_surface->bo);
970 mfc_context->uncompressed_picture_source.bo = obj_surface->bo;
971 dri_bo_reference(mfc_context->uncompressed_picture_source.bo);
973 obj_buffer = BUFFER (pPicParameter->CodedBuf); /* FIXME: fix this later */
974 bo = obj_buffer->buffer_store->bo;
976 mfc_context->mfc_indirect_pak_bse_object.bo = bo;
977 mfc_context->mfc_indirect_pak_bse_object.offset = ALIGN(sizeof(VACodedBufferSegment), 64);
978 mfc_context->mfc_indirect_pak_bse_object.end_offset = ALIGN (obj_buffer->size_element - 0x1000, 0x1000);
979 dri_bo_reference(mfc_context->mfc_indirect_pak_bse_object.bo);
981 /*Programing bcs pipeline*/
982 gen6_mfc_avc_pipeline_programing(ctx, encode_state, gen6_encoder_context); //filling the pipeline
987 static VAStatus gen6_mfc_run(VADriverContextP ctx,
988 struct encode_state *encode_state,
989 struct gen6_encoder_context *gen6_encoder_context)
991 struct intel_batchbuffer *batch = gen6_encoder_context->base.batch;
993 intel_batchbuffer_flush(batch); //run the pipeline
995 return VA_STATUS_SUCCESS;
998 static VAStatus gen6_mfc_stop(VADriverContextP ctx,
999 struct encode_state *encode_state,
1000 struct gen6_encoder_context *gen6_encoder_context)
1003 struct i965_driver_data *i965 = i965_driver_data(ctx);
1004 struct gen6_mfc_context *mfc_context = &gen6_encoder_context->mfc_context;
1006 VAEncPictureParameterBufferH264Ext *pPicParameter = (VAEncPictureParameterBufferH264Ext *)encode_state->pic_param_ext->buffer;
1008 struct object_surface *obj_surface = SURFACE(pPicParameter->reconstructed_picture);
1009 //struct object_surface *obj_surface = SURFACE(pPicParameter->reference_picture[0]);
1010 //struct object_surface *obj_surface = SURFACE(encode_state->current_render_target);
1011 my_debug(obj_surface);
1015 return VA_STATUS_SUCCESS;
1019 gen6_mfc_avc_encode_picture(VADriverContextP ctx,
1020 struct encode_state *encode_state,
1021 struct gen6_encoder_context *gen6_encoder_context)
1023 gen6_mfc_init(ctx, gen6_encoder_context);
1024 gen6_mfc_avc_prepare(ctx, encode_state, gen6_encoder_context);
1025 gen6_mfc_run(ctx, encode_state, gen6_encoder_context);
1026 gen6_mfc_stop(ctx, encode_state, gen6_encoder_context);
1028 return VA_STATUS_SUCCESS;
1032 gen6_mfc_pipeline(VADriverContextP ctx,
1034 struct encode_state *encode_state,
1035 struct gen6_encoder_context *gen6_encoder_context)
1040 case VAProfileH264Baseline:
1041 vaStatus = gen6_mfc_avc_encode_picture(ctx, encode_state, gen6_encoder_context);
1044 /* FIXME: add for other profile */
1046 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_PROFILE;
1053 Bool gen6_mfc_context_init(VADriverContextP ctx, struct gen6_mfc_context *mfc_context)
1058 Bool gen6_mfc_context_destroy(struct gen6_mfc_context *mfc_context)
1062 dri_bo_unreference(mfc_context->post_deblocking_output.bo);
1063 mfc_context->post_deblocking_output.bo = NULL;
1065 dri_bo_unreference(mfc_context->pre_deblocking_output.bo);
1066 mfc_context->pre_deblocking_output.bo = NULL;
1068 dri_bo_unreference(mfc_context->uncompressed_picture_source.bo);
1069 mfc_context->uncompressed_picture_source.bo = NULL;
1071 dri_bo_unreference(mfc_context->mfc_indirect_pak_bse_object.bo);
1072 mfc_context->mfc_indirect_pak_bse_object.bo = NULL;
1074 for (i = 0; i < NUM_MFC_DMV_BUFFERS; i++){
1075 dri_bo_unreference(mfc_context->direct_mv_buffers[i].bo);
1076 mfc_context->direct_mv_buffers[i].bo = NULL;
1079 dri_bo_unreference(mfc_context->intra_row_store_scratch_buffer.bo);
1080 mfc_context->intra_row_store_scratch_buffer.bo = NULL;
1082 dri_bo_unreference(mfc_context->macroblock_status_buffer.bo);
1083 mfc_context->macroblock_status_buffer.bo = NULL;
1085 dri_bo_unreference(mfc_context->deblocking_filter_row_store_scratch_buffer.bo);
1086 mfc_context->deblocking_filter_row_store_scratch_buffer.bo = NULL;
1088 dri_bo_unreference(mfc_context->bsd_mpc_row_store_scratch_buffer.bo);
1089 mfc_context->bsd_mpc_row_store_scratch_buffer.bo = NULL;