2 * Copyright © 2010 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the
6 * "Software"), to deal in the Software without restriction, including
7 * without limitation the rights to use, copy, modify, merge, publish,
8 * distribute, sub license, and/or sell copies of the Software, and to
9 * permit persons to whom the Software is furnished to do so, subject to
10 * the following conditions:
12 * The above copyright notice and this permission notice (including the
13 * next paragraph) shall be included in all copies or substantial portions
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
17 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
19 * IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR
20 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 * Xiang Haihao <haihao.xiang@intel.com>
33 #include "va_backend.h"
35 #include "intel_batchbuffer.h"
36 #include "intel_driver.h"
38 #include "i965_defines.h"
39 #include "i965_drv_video.h"
40 #include "i965_avc_ildb.h"
41 #include "i965_media_h264.h"
42 #include "i965_media.h"
45 #include "shaders/h264/mc/export.inc"
48 #include "shaders/h264/mc/export.inc.gen5"
50 #define PICTURE_FRAME 0
51 #define PICTURE_FIELD 1
52 #define PICTURE_MBAFF 2
55 AVC_ILDB_ROOT_Y_ILDB_FRAME,
56 AVC_ILDB_CHILD_Y_ILDB_FRAME,
57 AVC_ILDB_ROOT_UV_ILDB_FRAME,
58 AVC_ILDB_CHILD_UV_ILDB_FRAME,
59 AVC_ILDB_ROOT_Y_ILDB_FIELD,
60 AVC_ILDB_CHILD_Y_ILDB_FIELD,
61 AVC_ILDB_ROOT_UV_ILDB_FIELD,
62 AVC_ILDB_CHILD_UV_ILDB_FIELD,
63 AVC_ILDB_ROOT_Y_ILDB_MBAFF,
64 AVC_ILDB_CHILD_Y_ILDB_MBAFF,
65 AVC_ILDB_ROOT_UV_ILDB_MBAFF,
66 AVC_ILDB_CHILD_UV_ILDB_MBAFF
69 static unsigned long avc_ildb_kernel_offset_gen4[] = {
70 AVC_ILDB_ROOT_Y_ILDB_FRAME_IP * INST_UNIT_GEN4,
71 AVC_ILDB_CHILD_Y_ILDB_FRAME_IP * INST_UNIT_GEN4,
72 AVC_ILDB_ROOT_UV_ILDB_FRAME_IP * INST_UNIT_GEN4,
73 AVC_ILDB_CHILD_UV_ILDB_FRAME_IP * INST_UNIT_GEN4,
74 AVC_ILDB_ROOT_Y_ILDB_FIELD_IP * INST_UNIT_GEN4,
75 AVC_ILDB_CHILD_Y_ILDB_FIELD_IP * INST_UNIT_GEN4,
76 AVC_ILDB_ROOT_UV_ILDB_FIELD_IP * INST_UNIT_GEN4,
77 AVC_ILDB_CHILD_UV_ILDB_FIELD_IP * INST_UNIT_GEN4,
78 AVC_ILDB_ROOT_Y_ILDB_MBAFF_IP * INST_UNIT_GEN4,
79 AVC_ILDB_CHILD_Y_ILDB_MBAFF_IP * INST_UNIT_GEN4,
80 AVC_ILDB_ROOT_UV_ILDB_MBAFF_IP * INST_UNIT_GEN4,
81 AVC_ILDB_CHILD_UV_ILDB_MBAFF_IP * INST_UNIT_GEN4
84 static unsigned long avc_ildb_kernel_offset_gen5[] = {
85 AVC_ILDB_ROOT_Y_ILDB_FRAME_IP_GEN5 * INST_UNIT_GEN5,
86 AVC_ILDB_CHILD_Y_ILDB_FRAME_IP_GEN5 * INST_UNIT_GEN5,
87 AVC_ILDB_ROOT_UV_ILDB_FRAME_IP_GEN5 * INST_UNIT_GEN5,
88 AVC_ILDB_CHILD_UV_ILDB_FRAME_IP_GEN5 * INST_UNIT_GEN5,
89 AVC_ILDB_ROOT_Y_ILDB_FIELD_IP_GEN5 * INST_UNIT_GEN5,
90 AVC_ILDB_CHILD_Y_ILDB_FIELD_IP_GEN5 * INST_UNIT_GEN5,
91 AVC_ILDB_ROOT_UV_ILDB_FIELD_IP_GEN5 * INST_UNIT_GEN5,
92 AVC_ILDB_CHILD_UV_ILDB_FIELD_IP_GEN5 * INST_UNIT_GEN5,
93 AVC_ILDB_ROOT_Y_ILDB_MBAFF_IP_GEN5 * INST_UNIT_GEN5,
94 AVC_ILDB_CHILD_Y_ILDB_MBAFF_IP_GEN5 * INST_UNIT_GEN5,
95 AVC_ILDB_ROOT_UV_ILDB_MBAFF_IP_GEN5 * INST_UNIT_GEN5,
96 AVC_ILDB_CHILD_UV_ILDB_MBAFF_IP_GEN5 * INST_UNIT_GEN5
99 struct avc_ildb_root_input
101 unsigned int blocks_per_row : 16;
102 unsigned int blocks_per_column : 16;
104 unsigned int picture_type : 16;
105 unsigned int max_concurrent_threads : 16;
107 unsigned int debug_field : 16;
108 unsigned int mbaff_frame_flag : 1;
109 unsigned int bottom_field_flag : 1;
110 unsigned int control_data_expansion_flag : 1;
111 unsigned int chroma_format : 1;
112 unsigned int pad0 : 12;
114 unsigned int ramp_constant_0;
116 unsigned int ramp_constant_1;
126 #define NUM_AVC_ILDB_INTERFACES ARRAY_ELEMS(avc_ildb_kernel_offset_gen4)
127 static unsigned long *avc_ildb_kernel_offset = NULL;
130 i965_avc_ildb_surface_state(VADriverContextP ctx,
131 struct decode_state *decode_state,
132 struct i965_h264_context *i965_h264_context)
134 struct i965_driver_data *i965 = i965_driver_data(ctx);
135 struct i965_avc_ildb_context *avc_ildb_context = &i965_h264_context->avc_ildb_context;
136 struct i965_surface_state *ss;
137 struct object_surface *obj_surface;
138 VAPictureParameterBufferH264 *pic_param;
139 VAPictureH264 *va_pic;
143 assert(decode_state->pic_param && decode_state->pic_param->buffer);
144 pic_param = (VAPictureParameterBufferH264 *)decode_state->pic_param->buffer;
145 va_pic = &pic_param->CurrPic;
146 assert(!(va_pic->flags & VA_PICTURE_H264_INVALID));
147 obj_surface = SURFACE(va_pic->picture_id);
150 avc_ildb_context->surface[SURFACE_EDGE_CONTROL_DATA].s_bo = i965_h264_context->avc_ildb_data.bo;
151 dri_bo_reference(avc_ildb_context->surface[SURFACE_EDGE_CONTROL_DATA].s_bo);
152 avc_ildb_context->surface[SURFACE_EDGE_CONTROL_DATA].offset = 0;
153 avc_ildb_context->surface[SURFACE_EDGE_CONTROL_DATA].surface_type = I965_SURFACE_BUFFER;
154 avc_ildb_context->surface[SURFACE_EDGE_CONTROL_DATA].width = ((avc_ildb_context->mbs_per_picture * EDGE_CONTROL_DATA_IN_DWS - 1) & 0x7f);
155 avc_ildb_context->surface[SURFACE_EDGE_CONTROL_DATA].height = (((avc_ildb_context->mbs_per_picture * EDGE_CONTROL_DATA_IN_DWS - 1) >> 7) & 0x1fff);
156 avc_ildb_context->surface[SURFACE_EDGE_CONTROL_DATA].depth = (((avc_ildb_context->mbs_per_picture * EDGE_CONTROL_DATA_IN_DWS - 1) >> 20) & 0x7f);
157 avc_ildb_context->surface[SURFACE_EDGE_CONTROL_DATA].pitch = EDGE_CONTROL_DATA_IN_BTYES - 1;
158 avc_ildb_context->surface[SURFACE_EDGE_CONTROL_DATA].is_target = 0;
160 avc_ildb_context->surface[SURFACE_SRC_Y].s_bo = obj_surface->bo;
161 dri_bo_reference(avc_ildb_context->surface[SURFACE_SRC_Y].s_bo);
162 avc_ildb_context->surface[SURFACE_SRC_Y].offset = 0;
163 avc_ildb_context->surface[SURFACE_SRC_Y].surface_type = I965_SURFACE_2D;
164 avc_ildb_context->surface[SURFACE_SRC_Y].format = I965_SURFACEFORMAT_R8_SINT;
165 avc_ildb_context->surface[SURFACE_SRC_Y].width = obj_surface->width / 4 - 1;
166 avc_ildb_context->surface[SURFACE_SRC_Y].height = obj_surface->height - 1;
167 avc_ildb_context->surface[SURFACE_SRC_Y].depth = 0;
168 avc_ildb_context->surface[SURFACE_SRC_Y].pitch = obj_surface->width - 1;
169 avc_ildb_context->surface[SURFACE_SRC_Y].vert_line_stride = !!(va_pic->flags & (VA_PICTURE_H264_TOP_FIELD | VA_PICTURE_H264_BOTTOM_FIELD));
170 avc_ildb_context->surface[SURFACE_SRC_Y].vert_line_stride_ofs = !!(va_pic->flags & VA_PICTURE_H264_BOTTOM_FIELD);
171 avc_ildb_context->surface[SURFACE_SRC_Y].is_target = 0;
173 avc_ildb_context->surface[SURFACE_SRC_UV].s_bo = obj_surface->bo;
174 dri_bo_reference(avc_ildb_context->surface[SURFACE_SRC_UV].s_bo);
175 avc_ildb_context->surface[SURFACE_SRC_UV].offset = obj_surface->width * obj_surface->height;
176 avc_ildb_context->surface[SURFACE_SRC_UV].surface_type = I965_SURFACE_2D;
177 avc_ildb_context->surface[SURFACE_SRC_UV].format = I965_SURFACEFORMAT_R8G8_SINT;
178 avc_ildb_context->surface[SURFACE_SRC_UV].width = obj_surface->width / 4 - 1;
179 avc_ildb_context->surface[SURFACE_SRC_UV].height = obj_surface->height / 2 - 1;
180 avc_ildb_context->surface[SURFACE_SRC_UV].depth = 0;
181 avc_ildb_context->surface[SURFACE_SRC_UV].pitch = obj_surface->width - 1;
182 avc_ildb_context->surface[SURFACE_SRC_UV].vert_line_stride = !!(va_pic->flags & (VA_PICTURE_H264_TOP_FIELD | VA_PICTURE_H264_BOTTOM_FIELD));
183 avc_ildb_context->surface[SURFACE_SRC_UV].vert_line_stride_ofs = !!(va_pic->flags & VA_PICTURE_H264_BOTTOM_FIELD);
184 avc_ildb_context->surface[SURFACE_SRC_UV].is_target = 0;
186 avc_ildb_context->surface[SURFACE_DEST_Y].s_bo = obj_surface->bo;
187 dri_bo_reference(avc_ildb_context->surface[SURFACE_DEST_Y].s_bo);
188 avc_ildb_context->surface[SURFACE_DEST_Y].offset = 0;
189 avc_ildb_context->surface[SURFACE_DEST_Y].surface_type = I965_SURFACE_2D;
190 avc_ildb_context->surface[SURFACE_DEST_Y].format = I965_SURFACEFORMAT_R8_SINT;
191 avc_ildb_context->surface[SURFACE_DEST_Y].width = obj_surface->width / 4 - 1;
192 avc_ildb_context->surface[SURFACE_DEST_Y].height = obj_surface->height - 1;
193 avc_ildb_context->surface[SURFACE_DEST_Y].depth = 0;
194 avc_ildb_context->surface[SURFACE_DEST_Y].pitch = obj_surface->width - 1;
195 avc_ildb_context->surface[SURFACE_DEST_Y].vert_line_stride = !!(va_pic->flags & (VA_PICTURE_H264_TOP_FIELD | VA_PICTURE_H264_BOTTOM_FIELD));
196 avc_ildb_context->surface[SURFACE_DEST_Y].vert_line_stride_ofs = !!(va_pic->flags & VA_PICTURE_H264_BOTTOM_FIELD);
197 avc_ildb_context->surface[SURFACE_DEST_Y].is_target = 1;
199 avc_ildb_context->surface[SURFACE_DEST_UV].s_bo = obj_surface->bo;
200 dri_bo_reference(avc_ildb_context->surface[SURFACE_DEST_UV].s_bo);
201 avc_ildb_context->surface[SURFACE_DEST_UV].offset = obj_surface->width * obj_surface->height;
202 avc_ildb_context->surface[SURFACE_DEST_UV].surface_type = I965_SURFACE_2D;
203 avc_ildb_context->surface[SURFACE_DEST_UV].format = I965_SURFACEFORMAT_R8G8_SINT;
204 avc_ildb_context->surface[SURFACE_DEST_UV].width = obj_surface->width / 4 - 1;
205 avc_ildb_context->surface[SURFACE_DEST_UV].height = obj_surface->height / 2 - 1;
206 avc_ildb_context->surface[SURFACE_DEST_UV].depth = 0;
207 avc_ildb_context->surface[SURFACE_DEST_UV].pitch = obj_surface->width - 1;
208 avc_ildb_context->surface[SURFACE_DEST_UV].vert_line_stride = !!(va_pic->flags & (VA_PICTURE_H264_TOP_FIELD | VA_PICTURE_H264_BOTTOM_FIELD));
209 avc_ildb_context->surface[SURFACE_DEST_UV].vert_line_stride_ofs = !!(va_pic->flags & VA_PICTURE_H264_BOTTOM_FIELD);
210 avc_ildb_context->surface[SURFACE_DEST_UV].is_target = 1;
212 for (i = 0; i < NUM_AVC_ILDB_SURFACES; i++) {
213 bo = avc_ildb_context->surface[i].ss_bo;
217 memset(ss, 0, sizeof(*ss));
218 ss->ss0.surface_type = avc_ildb_context->surface[i].surface_type;
219 ss->ss0.surface_format = avc_ildb_context->surface[i].format;
220 ss->ss0.vert_line_stride = avc_ildb_context->surface[i].vert_line_stride;
221 ss->ss0.vert_line_stride_ofs = avc_ildb_context->surface[i].vert_line_stride_ofs;
222 ss->ss1.base_addr = avc_ildb_context->surface[i].s_bo->offset + avc_ildb_context->surface[i].offset;
223 ss->ss2.width = avc_ildb_context->surface[i].width;
224 ss->ss2.height = avc_ildb_context->surface[i].height;
225 ss->ss3.depth = avc_ildb_context->surface[i].depth;
226 ss->ss3.pitch = avc_ildb_context->surface[i].pitch;
227 dri_bo_emit_reloc(bo,
228 I915_GEM_DOMAIN_RENDER,
229 avc_ildb_context->surface[i].is_target ? I915_GEM_DOMAIN_RENDER : 0,
230 avc_ildb_context->surface[i].offset,
231 offsetof(struct i965_surface_state, ss1),
232 avc_ildb_context->surface[i].s_bo);
238 i965_avc_ildb_binding_table(VADriverContextP ctx, struct i965_h264_context *i965_h264_context)
240 struct i965_avc_ildb_context *avc_ildb_context = &i965_h264_context->avc_ildb_context;
241 unsigned int *binding_table;
242 dri_bo *bo = avc_ildb_context->binding_table.bo;
247 binding_table = bo->virtual;
248 memset(binding_table, 0, bo->size);
250 for (i = 0; i < NUM_AVC_ILDB_SURFACES; i++) {
251 binding_table[i] = avc_ildb_context->surface[i].ss_bo->offset;
252 dri_bo_emit_reloc(bo,
253 I915_GEM_DOMAIN_INSTRUCTION, 0,
255 i * sizeof(*binding_table),
256 avc_ildb_context->surface[i].ss_bo);
263 i965_avc_ildb_interface_descriptor_table(VADriverContextP ctx, struct i965_h264_context *i965_h264_context)
265 struct i965_avc_ildb_context *avc_ildb_context = &i965_h264_context->avc_ildb_context;
266 struct i965_interface_descriptor *desc;
270 bo = avc_ildb_context->idrt.bo;
275 for (i = 0; i < NUM_AVC_ILDB_INTERFACES; i++) {
276 int kernel_offset = avc_ildb_kernel_offset[i];
277 memset(desc, 0, sizeof(*desc));
278 desc->desc0.grf_reg_blocks = 7;
279 desc->desc0.kernel_start_pointer = (i965_h264_context->avc_kernels[H264_AVC_COMBINED].bo->offset + kernel_offset) >> 6; /* reloc */
280 desc->desc1.const_urb_entry_read_offset = 0;
281 desc->desc1.const_urb_entry_read_len = ((i == AVC_ILDB_ROOT_Y_ILDB_FRAME ||
282 i == AVC_ILDB_ROOT_Y_ILDB_FIELD ||
283 i == AVC_ILDB_ROOT_Y_ILDB_MBAFF) ? 1 : 0);
284 desc->desc3.binding_table_entry_count = 0;
285 desc->desc3.binding_table_pointer =
286 avc_ildb_context->binding_table.bo->offset >> 5; /*reloc */
288 dri_bo_emit_reloc(bo,
289 I915_GEM_DOMAIN_INSTRUCTION, 0,
290 desc->desc0.grf_reg_blocks + kernel_offset,
291 i * sizeof(*desc) + offsetof(struct i965_interface_descriptor, desc0),
292 i965_h264_context->avc_kernels[H264_AVC_COMBINED].bo);
294 dri_bo_emit_reloc(bo,
295 I915_GEM_DOMAIN_INSTRUCTION, 0,
296 desc->desc3.binding_table_entry_count,
297 i * sizeof(*desc) + offsetof(struct i965_interface_descriptor, desc3),
298 avc_ildb_context->binding_table.bo);
306 i965_avc_ildb_vfe_state(VADriverContextP ctx, struct i965_h264_context *i965_h264_context)
308 struct i965_avc_ildb_context *avc_ildb_context = &i965_h264_context->avc_ildb_context;
309 struct i965_vfe_state *vfe_state;
312 bo = avc_ildb_context->vfe_state.bo;
315 vfe_state = bo->virtual;
316 memset(vfe_state, 0, sizeof(*vfe_state));
317 vfe_state->vfe1.max_threads = 0;
318 vfe_state->vfe1.urb_entry_alloc_size = avc_ildb_context->urb.size_vfe_entry - 1;
319 vfe_state->vfe1.num_urb_entries = avc_ildb_context->urb.num_vfe_entries;
320 vfe_state->vfe1.vfe_mode = VFE_GENERIC_MODE;
321 vfe_state->vfe1.children_present = 1;
322 vfe_state->vfe2.interface_descriptor_base =
323 avc_ildb_context->idrt.bo->offset >> 4; /* reloc */
324 dri_bo_emit_reloc(bo,
325 I915_GEM_DOMAIN_INSTRUCTION, 0,
327 offsetof(struct i965_vfe_state, vfe2),
328 avc_ildb_context->idrt.bo);
333 i965_avc_ildb_upload_constants(VADriverContextP ctx,
334 struct decode_state *decode_state,
335 struct i965_h264_context *i965_h264_context)
337 struct i965_driver_data *i965 = i965_driver_data(ctx);
338 struct i965_avc_ildb_context *avc_ildb_context = &i965_h264_context->avc_ildb_context;
339 VAPictureParameterBufferH264 *pic_param;
340 struct avc_ildb_root_input *root_input;
342 assert(decode_state->pic_param && decode_state->pic_param->buffer);
343 pic_param = (VAPictureParameterBufferH264 *)decode_state->pic_param->buffer;
345 dri_bo_map(avc_ildb_context->curbe.bo, 1);
346 assert(avc_ildb_context->curbe.bo->virtual);
347 root_input = avc_ildb_context->curbe.bo->virtual;
349 if (IS_IRONLAKE(i965->intel.device_id)) {
350 root_input->max_concurrent_threads = 76; /* 72 - 2 + 8 - 2 */
352 root_input->max_concurrent_threads = 54; /* 50 - 2 + 8 - 2 */
355 if (pic_param->pic_fields.bits.field_pic_flag)
356 root_input->picture_type = PICTURE_FIELD;
358 if (pic_param->seq_fields.bits.mb_adaptive_frame_field_flag)
359 root_input->picture_type = PICTURE_MBAFF;
361 root_input->picture_type = PICTURE_FRAME;
364 avc_ildb_context->picture_type = root_input->picture_type;
365 root_input->blocks_per_row = pic_param->picture_width_in_mbs_minus1 + 1;
366 root_input->blocks_per_column = (pic_param->picture_height_in_mbs_minus1 + 1) /
367 (1 + (root_input->picture_type != PICTURE_FRAME));
368 avc_ildb_context->mbs_per_picture = (pic_param->picture_width_in_mbs_minus1 + 1) *
369 (pic_param->picture_height_in_mbs_minus1 + 1);
371 root_input->mbaff_frame_flag = (root_input->picture_type == PICTURE_MBAFF);
372 root_input->bottom_field_flag = !!(pic_param->CurrPic.flags & VA_PICTURE_H264_BOTTOM_FIELD);
373 root_input->control_data_expansion_flag = 1; /* Always 1 on G4x+ */
374 root_input->chroma_format = (pic_param->seq_fields.bits.chroma_format_idc != 1); /* 0=4:0:0, 1=4:2:0 */
376 root_input->ramp_constant_0 = 0x03020100;
378 root_input->ramp_constant_1 = 0x07060504;
380 root_input->constant_0 = -2;
381 root_input->constant_1 = 1;
383 dri_bo_unmap(avc_ildb_context->curbe.bo);
387 i965_avc_ildb_states_setup(VADriverContextP ctx,
388 struct decode_state *decode_state,
389 struct i965_h264_context *i965_h264_context)
391 i965_avc_ildb_surface_state(ctx, decode_state, i965_h264_context);
392 i965_avc_ildb_binding_table(ctx, i965_h264_context);
393 i965_avc_ildb_interface_descriptor_table(ctx, i965_h264_context);
394 i965_avc_ildb_vfe_state(ctx, i965_h264_context);
395 i965_avc_ildb_upload_constants(ctx, decode_state, i965_h264_context);
399 i965_avc_ildb_pipeline_select(VADriverContextP ctx, struct i965_h264_context *i965_h264_context)
401 struct intel_batchbuffer *batch = i965_h264_context->batch;
403 BEGIN_BATCH(batch, 1);
404 OUT_BATCH(batch, CMD_PIPELINE_SELECT | PIPELINE_SELECT_MEDIA);
405 ADVANCE_BATCH(batch);
409 i965_avc_ildb_urb_layout(VADriverContextP ctx, struct i965_h264_context *i965_h264_context)
411 struct i965_driver_data *i965 = i965_driver_data(ctx);
412 struct i965_avc_ildb_context *avc_ildb_context = &i965_h264_context->avc_ildb_context;
413 struct intel_batchbuffer *batch = i965_h264_context->batch;
414 unsigned int vfe_fence, cs_fence;
416 vfe_fence = avc_ildb_context->urb.cs_start;
417 cs_fence = URB_SIZE((&i965->intel));
419 BEGIN_BATCH(batch, 3);
420 OUT_BATCH(batch, CMD_URB_FENCE | UF0_VFE_REALLOC | UF0_CS_REALLOC | 1);
423 (vfe_fence << UF2_VFE_FENCE_SHIFT) | /* VFE_SIZE */
424 (cs_fence << UF2_CS_FENCE_SHIFT)); /* CS_SIZE */
425 ADVANCE_BATCH(batch);
429 i965_avc_ildb_state_base_address(VADriverContextP ctx, struct i965_h264_context *i965_h264_context)
431 struct i965_driver_data *i965 = i965_driver_data(ctx);
432 struct intel_batchbuffer *batch = i965_h264_context->batch;
434 if (IS_IRONLAKE(i965->intel.device_id)) {
435 BEGIN_BATCH(batch, 8);
436 OUT_BATCH(batch, CMD_STATE_BASE_ADDRESS | 6);
437 OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
438 OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
439 OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
440 OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
441 OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
442 OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
443 OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
444 ADVANCE_BATCH(batch);
446 BEGIN_BATCH(batch, 6);
447 OUT_BATCH(batch, CMD_STATE_BASE_ADDRESS | 4);
448 OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
449 OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
450 OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
451 OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
452 OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
453 ADVANCE_BATCH(batch);
458 i965_avc_ildb_state_pointers(VADriverContextP ctx, struct i965_h264_context *i965_h264_context)
460 struct i965_avc_ildb_context *avc_ildb_context = &i965_h264_context->avc_ildb_context;
461 struct intel_batchbuffer *batch = i965_h264_context->batch;
463 BEGIN_BATCH(batch, 3);
464 OUT_BATCH(batch, CMD_MEDIA_STATE_POINTERS | 1);
466 OUT_RELOC(batch, avc_ildb_context->vfe_state.bo, I915_GEM_DOMAIN_INSTRUCTION, 0, 0);
467 ADVANCE_BATCH(batch);
471 i965_avc_ildb_cs_urb_layout(VADriverContextP ctx, struct i965_h264_context *i965_h264_context)
473 struct i965_avc_ildb_context *avc_ildb_context = &i965_h264_context->avc_ildb_context;
474 struct intel_batchbuffer *batch = i965_h264_context->batch;
476 BEGIN_BATCH(batch, 2);
477 OUT_BATCH(batch, CMD_CS_URB_STATE | 0);
479 ((avc_ildb_context->urb.size_cs_entry - 1) << 4) | /* URB Entry Allocation Size */
480 (avc_ildb_context->urb.num_cs_entries << 0)); /* Number of URB Entries */
481 ADVANCE_BATCH(batch);
485 i965_avc_ildb_constant_buffer(VADriverContextP ctx, struct i965_h264_context *i965_h264_context)
487 struct i965_avc_ildb_context *avc_ildb_context = &i965_h264_context->avc_ildb_context;
488 struct intel_batchbuffer *batch = i965_h264_context->batch;
490 BEGIN_BATCH(batch, 2);
491 OUT_BATCH(batch, CMD_CONSTANT_BUFFER | (1 << 8) | (2 - 2));
492 OUT_RELOC(batch, avc_ildb_context->curbe.bo,
493 I915_GEM_DOMAIN_INSTRUCTION, 0,
494 avc_ildb_context->urb.size_cs_entry - 1);
495 ADVANCE_BATCH(batch);
499 i965_avc_ildb_objects(VADriverContextP ctx, struct i965_h264_context *i965_h264_context)
501 struct i965_avc_ildb_context *avc_ildb_context = &i965_h264_context->avc_ildb_context;
502 struct intel_batchbuffer *batch = i965_h264_context->batch;
504 BEGIN_BATCH(batch, 6);
505 OUT_BATCH(batch, CMD_MEDIA_OBJECT | 4);
507 switch (avc_ildb_context->picture_type) {
509 OUT_BATCH(batch, AVC_ILDB_ROOT_Y_ILDB_FRAME);
513 OUT_BATCH(batch, AVC_ILDB_ROOT_Y_ILDB_FIELD);
517 OUT_BATCH(batch, AVC_ILDB_ROOT_Y_ILDB_MBAFF);
526 OUT_BATCH(batch, 0); /* no indirect data */
530 ADVANCE_BATCH(batch);
534 i965_avc_ildb_pipeline_setup(VADriverContextP ctx, struct i965_h264_context *i965_h264_context)
536 struct intel_batchbuffer *batch = i965_h264_context->batch;
538 intel_batchbuffer_emit_mi_flush(batch);
539 i965_avc_ildb_pipeline_select(ctx, i965_h264_context);
540 i965_avc_ildb_state_base_address(ctx, i965_h264_context);
541 i965_avc_ildb_state_pointers(ctx, i965_h264_context);
542 i965_avc_ildb_urb_layout(ctx, i965_h264_context);
543 i965_avc_ildb_cs_urb_layout(ctx, i965_h264_context);
544 i965_avc_ildb_constant_buffer(ctx, i965_h264_context);
545 i965_avc_ildb_objects(ctx, i965_h264_context);
549 i965_avc_ildb(VADriverContextP ctx, struct decode_state *decode_state, void *h264_context)
551 struct i965_h264_context *i965_h264_context = (struct i965_h264_context *)h264_context;
553 if (i965_h264_context->enable_avc_ildb) {
554 i965_avc_ildb_states_setup(ctx, decode_state, i965_h264_context);
555 i965_avc_ildb_pipeline_setup(ctx, i965_h264_context);
560 i965_avc_ildb_decode_init(VADriverContextP ctx, void *h264_context)
562 struct i965_driver_data *i965 = i965_driver_data(ctx);
563 struct i965_h264_context *i965_h264_context = (struct i965_h264_context *)h264_context;
564 struct i965_avc_ildb_context *avc_ildb_context = &i965_h264_context->avc_ildb_context;;
568 dri_bo_unreference(avc_ildb_context->curbe.bo);
569 bo = dri_bo_alloc(i965->intel.bufmgr,
573 avc_ildb_context->curbe.bo = bo;
575 dri_bo_unreference(avc_ildb_context->binding_table.bo);
576 bo = dri_bo_alloc(i965->intel.bufmgr,
578 NUM_AVC_ILDB_SURFACES * sizeof(unsigned int), 32);
580 avc_ildb_context->binding_table.bo = bo;
582 dri_bo_unreference(avc_ildb_context->idrt.bo);
583 bo = dri_bo_alloc(i965->intel.bufmgr,
584 "interface discriptor",
585 NUM_AVC_ILDB_INTERFACES * sizeof(struct i965_interface_descriptor), 16);
587 avc_ildb_context->idrt.bo = bo;
589 dri_bo_unreference(avc_ildb_context->vfe_state.bo);
590 bo = dri_bo_alloc(i965->intel.bufmgr,
592 sizeof(struct i965_vfe_state), 32);
594 avc_ildb_context->vfe_state.bo = bo;
596 avc_ildb_context->urb.num_vfe_entries = 1;
597 avc_ildb_context->urb.size_vfe_entry = 640;
598 avc_ildb_context->urb.num_cs_entries = 1;
599 avc_ildb_context->urb.size_cs_entry = 1;
600 avc_ildb_context->urb.vfe_start = 0;
601 avc_ildb_context->urb.cs_start = avc_ildb_context->urb.vfe_start +
602 avc_ildb_context->urb.num_vfe_entries * avc_ildb_context->urb.size_vfe_entry;
603 assert(avc_ildb_context->urb.cs_start +
604 avc_ildb_context->urb.num_cs_entries * avc_ildb_context->urb.size_cs_entry <= URB_SIZE((&i965->intel)));
606 for (i = 0; i < NUM_AVC_ILDB_SURFACES; i++) {
607 dri_bo_unreference(avc_ildb_context->surface[i].s_bo);
608 avc_ildb_context->surface[i].s_bo = NULL;
610 dri_bo_unreference(avc_ildb_context->surface[i].ss_bo);
611 bo = dri_bo_alloc(i965->intel.bufmgr,
613 sizeof(struct i965_surface_state), 32);
615 avc_ildb_context->surface[i].ss_bo = bo;
619 assert(NUM_AVC_ILDB_INTERFACES == ARRAY_ELEMS(avc_ildb_kernel_offset_gen5));
621 if (IS_IRONLAKE(i965->intel.device_id)) {
622 avc_ildb_kernel_offset = avc_ildb_kernel_offset_gen5;
624 avc_ildb_kernel_offset = avc_ildb_kernel_offset_gen4;
629 i965_avc_ildb_ternimate(struct i965_avc_ildb_context *avc_ildb_context)
633 dri_bo_unreference(avc_ildb_context->curbe.bo);
634 avc_ildb_context->curbe.bo = NULL;
636 dri_bo_unreference(avc_ildb_context->binding_table.bo);
637 avc_ildb_context->binding_table.bo = NULL;
639 dri_bo_unreference(avc_ildb_context->idrt.bo);
640 avc_ildb_context->idrt.bo = NULL;
642 dri_bo_unreference(avc_ildb_context->vfe_state.bo);
643 avc_ildb_context->vfe_state.bo = NULL;
645 for (i = 0; i < NUM_AVC_ILDB_SURFACES; i++) {
646 dri_bo_unreference(avc_ildb_context->surface[i].ss_bo);
647 avc_ildb_context->surface[i].ss_bo = NULL;
649 dri_bo_unreference(avc_ildb_context->surface[i].s_bo);
650 avc_ildb_context->surface[i].s_bo = NULL;