2 * Copyright © 2009 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the
6 * "Software"), to deal in the Software without restriction, including
7 * without limitation the rights to use, copy, modify, merge, publish,
8 * distribute, sub license, and/or sell copies of the Software, and to
9 * permit persons to whom the Software is furnished to do so, subject to
10 * the following conditions:
12 * The above copyright notice and this permission notice (including the
13 * next paragraph) shall be included in all copies or substantial portions
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
17 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
19 * IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR
20 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 * Xiang Haihao <haihao.xiang@intel.com>
26 * Zou Nan hai <nanhai.zou@intel.com>
35 #include "intel_batchbuffer.h"
36 #include "intel_driver.h"
37 #include "i965_defines.h"
38 #include "i965_drv_video.h"
40 #include "i965_media.h"
41 #include "i965_media_mpeg2.h"
42 #include "i965_media_h264.h"
45 i965_media_pipeline_select(VADriverContextP ctx, struct i965_media_context *media_context)
47 struct intel_batchbuffer *batch = media_context->base.batch;
49 BEGIN_BATCH(batch, 1);
50 OUT_BATCH(batch, CMD_PIPELINE_SELECT | PIPELINE_SELECT_MEDIA);
55 i965_media_urb_layout(VADriverContextP ctx, struct i965_media_context *media_context)
57 struct i965_driver_data *i965 = i965_driver_data(ctx);
58 struct intel_batchbuffer *batch = media_context->base.batch;
59 unsigned int vfe_fence, cs_fence;
61 vfe_fence = media_context->urb.cs_start;
62 cs_fence = URB_SIZE((&i965->intel));
64 BEGIN_BATCH(batch, 3);
65 OUT_BATCH(batch, CMD_URB_FENCE | UF0_VFE_REALLOC | UF0_CS_REALLOC | 1);
68 (vfe_fence << UF2_VFE_FENCE_SHIFT) | /* VFE_SIZE */
69 (cs_fence << UF2_CS_FENCE_SHIFT)); /* CS_SIZE */
74 i965_media_state_base_address(VADriverContextP ctx, struct i965_media_context *media_context)
76 struct i965_driver_data *i965 = i965_driver_data(ctx);
77 struct intel_batchbuffer *batch = media_context->base.batch;
79 if (IS_IRONLAKE(i965->intel.device_id)) {
80 BEGIN_BATCH(batch, 8);
81 OUT_BATCH(batch, CMD_STATE_BASE_ADDRESS | 6);
82 OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
83 OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
85 if (media_context->indirect_object.bo) {
86 OUT_RELOC(batch, media_context->indirect_object.bo, I915_GEM_DOMAIN_INSTRUCTION, 0,
87 media_context->indirect_object.offset | BASE_ADDRESS_MODIFY);
89 OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
92 OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
93 OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
94 OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
95 OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
98 BEGIN_BATCH(batch, 6);
99 OUT_BATCH(batch, CMD_STATE_BASE_ADDRESS | 4);
100 OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
101 OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
103 if (media_context->indirect_object.bo) {
104 OUT_RELOC(batch, media_context->indirect_object.bo, I915_GEM_DOMAIN_INSTRUCTION, 0,
105 media_context->indirect_object.offset | BASE_ADDRESS_MODIFY);
107 OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
110 OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
111 OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
112 ADVANCE_BATCH(batch);
117 i965_media_state_pointers(VADriverContextP ctx, struct i965_media_context *media_context)
119 struct intel_batchbuffer *batch = media_context->base.batch;
121 BEGIN_BATCH(batch, 3);
122 OUT_BATCH(batch, CMD_MEDIA_STATE_POINTERS | 1);
124 if (media_context->extended_state.enabled)
125 OUT_RELOC(batch, media_context->extended_state.bo, I915_GEM_DOMAIN_INSTRUCTION, 0, 1);
129 OUT_RELOC(batch, media_context->vfe_state.bo, I915_GEM_DOMAIN_INSTRUCTION, 0, 0);
130 ADVANCE_BATCH(batch);
134 i965_media_cs_urb_layout(VADriverContextP ctx, struct i965_media_context *media_context)
136 struct intel_batchbuffer *batch = media_context->base.batch;
138 BEGIN_BATCH(batch, 2);
139 OUT_BATCH(batch, CMD_CS_URB_STATE | 0);
141 ((media_context->urb.size_cs_entry - 1) << 4) | /* URB Entry Allocation Size */
142 (media_context->urb.num_cs_entries << 0)); /* Number of URB Entries */
143 ADVANCE_BATCH(batch);
147 i965_media_pipeline_state(VADriverContextP ctx, struct i965_media_context *media_context)
149 i965_media_state_base_address(ctx, media_context);
150 i965_media_state_pointers(ctx, media_context);
151 i965_media_cs_urb_layout(ctx, media_context);
155 i965_media_constant_buffer(VADriverContextP ctx, struct decode_state *decode_state, struct i965_media_context *media_context)
157 struct intel_batchbuffer *batch = media_context->base.batch;
159 BEGIN_BATCH(batch, 2);
160 OUT_BATCH(batch, CMD_CONSTANT_BUFFER | (1 << 8) | (2 - 2));
161 OUT_RELOC(batch, media_context->curbe.bo,
162 I915_GEM_DOMAIN_INSTRUCTION, 0,
163 media_context->urb.size_cs_entry - 1);
164 ADVANCE_BATCH(batch);
168 i965_media_depth_buffer(VADriverContextP ctx, struct i965_media_context *media_context)
170 struct intel_batchbuffer *batch = media_context->base.batch;
172 BEGIN_BATCH(batch, 6);
173 OUT_BATCH(batch, CMD_DEPTH_BUFFER | 4);
174 OUT_BATCH(batch, (I965_DEPTHFORMAT_D32_FLOAT << 18) |
175 (I965_SURFACE_NULL << 29));
180 ADVANCE_BATCH(batch);
184 i965_media_pipeline_setup(VADriverContextP ctx,
185 struct decode_state *decode_state,
186 struct i965_media_context *media_context)
188 struct intel_batchbuffer *batch = media_context->base.batch;
190 intel_batchbuffer_start_atomic(batch, 0x1000);
191 intel_batchbuffer_emit_mi_flush(batch); /* step 1 */
192 i965_media_depth_buffer(ctx, media_context);
193 i965_media_pipeline_select(ctx, media_context); /* step 2 */
194 i965_media_urb_layout(ctx, media_context); /* step 3 */
195 i965_media_pipeline_state(ctx, media_context); /* step 4 */
196 i965_media_constant_buffer(ctx, decode_state, media_context); /* step 5 */
197 assert(media_context->media_objects);
198 media_context->media_objects(ctx, decode_state, media_context); /* step 6 */
199 intel_batchbuffer_end_atomic(batch);
203 i965_media_decode_init(VADriverContextP ctx,
205 struct decode_state *decode_state,
206 struct i965_media_context *media_context)
209 struct i965_driver_data *i965 = i965_driver_data(ctx);
212 /* constant buffer */
213 dri_bo_unreference(media_context->curbe.bo);
214 bo = dri_bo_alloc(i965->intel.bufmgr,
218 media_context->curbe.bo = bo;
221 for (i = 0; i < MAX_MEDIA_SURFACES; i++) {
222 dri_bo_unreference(media_context->surface_state[i].bo);
223 media_context->surface_state[i].bo = NULL;
227 dri_bo_unreference(media_context->binding_table.bo);
228 bo = dri_bo_alloc(i965->intel.bufmgr,
230 MAX_MEDIA_SURFACES * sizeof(unsigned int), 32);
232 media_context->binding_table.bo = bo;
234 /* interface descriptor remapping table */
235 dri_bo_unreference(media_context->idrt.bo);
236 bo = dri_bo_alloc(i965->intel.bufmgr,
237 "interface discriptor",
238 MAX_INTERFACE_DESC * sizeof(struct i965_interface_descriptor), 16);
240 media_context->idrt.bo = bo;
243 dri_bo_unreference(media_context->vfe_state.bo);
244 bo = dri_bo_alloc(i965->intel.bufmgr,
246 sizeof(struct i965_vfe_state), 32);
248 media_context->vfe_state.bo = bo;
251 media_context->extended_state.enabled = 0;
254 case VAProfileMPEG2Simple:
255 case VAProfileMPEG2Main:
256 i965_media_mpeg2_decode_init(ctx, decode_state, media_context);
259 case VAProfileH264Baseline:
260 case VAProfileH264Main:
261 case VAProfileH264High:
262 i965_media_h264_decode_init(ctx, decode_state, media_context);
272 i965_media_decode_picture(VADriverContextP ctx,
274 union codec_state *codec_state,
275 struct hw_context *hw_context)
277 struct i965_media_context *media_context = (struct i965_media_context *)hw_context;
278 struct decode_state *decode_state = &codec_state->decode;
280 i965_media_decode_init(ctx, profile, decode_state, media_context);
281 assert(media_context->media_states_setup);
282 media_context->media_states_setup(ctx, decode_state, media_context);
283 i965_media_pipeline_setup(ctx, decode_state, media_context);
284 intel_batchbuffer_flush(hw_context->batch);
288 i965_media_context_destroy(void *hw_context)
290 struct i965_media_context *media_context = (struct i965_media_context *)hw_context;
293 if (media_context->free_private_context)
294 media_context->free_private_context(&media_context->private_context);
296 for (i = 0; i < MAX_MEDIA_SURFACES; i++) {
297 dri_bo_unreference(media_context->surface_state[i].bo);
298 media_context->surface_state[i].bo = NULL;
301 dri_bo_unreference(media_context->extended_state.bo);
302 media_context->extended_state.bo = NULL;
304 dri_bo_unreference(media_context->vfe_state.bo);
305 media_context->vfe_state.bo = NULL;
307 dri_bo_unreference(media_context->idrt.bo);
308 media_context->idrt.bo = NULL;
310 dri_bo_unreference(media_context->binding_table.bo);
311 media_context->binding_table.bo = NULL;
313 dri_bo_unreference(media_context->curbe.bo);
314 media_context->curbe.bo = NULL;
316 dri_bo_unreference(media_context->indirect_object.bo);
317 media_context->indirect_object.bo = NULL;
319 intel_batchbuffer_free(media_context->base.batch);
324 g4x_dec_hw_context_init(VADriverContextP ctx, struct object_config *obj_config)
326 struct intel_driver_data *intel = intel_driver_data(ctx);
327 struct i965_media_context *media_context = calloc(1, sizeof(struct i965_media_context));
329 media_context->base.destroy = i965_media_context_destroy;
330 media_context->base.run = i965_media_decode_picture;
331 media_context->base.batch = intel_batchbuffer_new(intel, I915_EXEC_RENDER);
333 switch (obj_config->profile) {
334 case VAProfileMPEG2Simple:
335 case VAProfileMPEG2Main:
336 i965_media_mpeg2_dec_context_init(ctx, media_context);
339 case VAProfileH264Baseline:
340 case VAProfileH264Main:
341 case VAProfileH264High:
342 case VAProfileVC1Simple:
343 case VAProfileVC1Main:
344 case VAProfileVC1Advanced:
350 return (struct hw_context *)media_context;
354 ironlake_dec_hw_context_init(VADriverContextP ctx, struct object_config *obj_config)
356 struct intel_driver_data *intel = intel_driver_data(ctx);
357 struct i965_media_context *media_context = calloc(1, sizeof(struct i965_media_context));
359 media_context->base.destroy = i965_media_context_destroy;
360 media_context->base.run = i965_media_decode_picture;
361 media_context->base.batch = intel_batchbuffer_new(intel, I915_EXEC_RENDER);
363 switch (obj_config->profile) {
364 case VAProfileMPEG2Simple:
365 case VAProfileMPEG2Main:
366 i965_media_mpeg2_dec_context_init(ctx, media_context);
369 case VAProfileH264Baseline:
370 case VAProfileH264Main:
371 case VAProfileH264High:
372 i965_media_h264_dec_context_init(ctx, media_context);
375 case VAProfileVC1Simple:
376 case VAProfileVC1Main:
377 case VAProfileVC1Advanced:
383 return (struct hw_context *)media_context;