2 * Copyright © 2009 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the
6 * "Software"), to deal in the Software without restriction, including
7 * without limitation the rights to use, copy, modify, merge, publish,
8 * distribute, sub license, and/or sell copies of the Software, and to
9 * permit persons to whom the Software is furnished to do so, subject to
10 * the following conditions:
12 * The above copyright notice and this permission notice (including the
13 * next paragraph) shall be included in all copies or substantial portions
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
17 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
19 * IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR
20 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 * Xiang Haihao <haihao.xiang@intel.com>
26 * Zou Nan hai <nanhai.zou@intel.com>
35 #include <va/va_backend.h>
37 #include "intel_batchbuffer.h"
38 #include "intel_driver.h"
39 #include "i965_defines.h"
40 #include "i965_drv_video.h"
42 #include "i965_media.h"
43 #include "i965_media_mpeg2.h"
44 #include "i965_media_h264.h"
47 i965_media_pipeline_select(VADriverContextP ctx, struct i965_media_context *media_context)
49 struct intel_batchbuffer *batch = media_context->base.batch;
51 BEGIN_BATCH(batch, 1);
52 OUT_BATCH(batch, CMD_PIPELINE_SELECT | PIPELINE_SELECT_MEDIA);
57 i965_media_urb_layout(VADriverContextP ctx, struct i965_media_context *media_context)
59 struct i965_driver_data *i965 = i965_driver_data(ctx);
60 struct intel_batchbuffer *batch = media_context->base.batch;
61 unsigned int vfe_fence, cs_fence;
63 vfe_fence = media_context->urb.cs_start;
64 cs_fence = URB_SIZE((&i965->intel));
66 BEGIN_BATCH(batch, 3);
67 OUT_BATCH(batch, CMD_URB_FENCE | UF0_VFE_REALLOC | UF0_CS_REALLOC | 1);
70 (vfe_fence << UF2_VFE_FENCE_SHIFT) | /* VFE_SIZE */
71 (cs_fence << UF2_CS_FENCE_SHIFT)); /* CS_SIZE */
76 i965_media_state_base_address(VADriverContextP ctx, struct i965_media_context *media_context)
78 struct i965_driver_data *i965 = i965_driver_data(ctx);
79 struct intel_batchbuffer *batch = media_context->base.batch;
81 if (IS_IRONLAKE(i965->intel.device_id)) {
82 BEGIN_BATCH(batch, 8);
83 OUT_BATCH(batch, CMD_STATE_BASE_ADDRESS | 6);
84 OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
85 OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
87 if (media_context->indirect_object.bo) {
88 OUT_RELOC(batch, media_context->indirect_object.bo, I915_GEM_DOMAIN_INSTRUCTION, 0,
89 media_context->indirect_object.offset | BASE_ADDRESS_MODIFY);
91 OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
94 OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
95 OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
96 OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
97 OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
100 BEGIN_BATCH(batch, 6);
101 OUT_BATCH(batch, CMD_STATE_BASE_ADDRESS | 4);
102 OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
103 OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
105 if (media_context->indirect_object.bo) {
106 OUT_RELOC(batch, media_context->indirect_object.bo, I915_GEM_DOMAIN_INSTRUCTION, 0,
107 media_context->indirect_object.offset | BASE_ADDRESS_MODIFY);
109 OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
112 OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
113 OUT_BATCH(batch, 0 | BASE_ADDRESS_MODIFY);
114 ADVANCE_BATCH(batch);
119 i965_media_state_pointers(VADriverContextP ctx, struct i965_media_context *media_context)
121 struct intel_batchbuffer *batch = media_context->base.batch;
123 BEGIN_BATCH(batch, 3);
124 OUT_BATCH(batch, CMD_MEDIA_STATE_POINTERS | 1);
126 if (media_context->extended_state.enabled)
127 OUT_RELOC(batch, media_context->extended_state.bo, I915_GEM_DOMAIN_INSTRUCTION, 0, 1);
131 OUT_RELOC(batch, media_context->vfe_state.bo, I915_GEM_DOMAIN_INSTRUCTION, 0, 0);
132 ADVANCE_BATCH(batch);
136 i965_media_cs_urb_layout(VADriverContextP ctx, struct i965_media_context *media_context)
138 struct intel_batchbuffer *batch = media_context->base.batch;
140 BEGIN_BATCH(batch, 2);
141 OUT_BATCH(batch, CMD_CS_URB_STATE | 0);
143 ((media_context->urb.size_cs_entry - 1) << 4) | /* URB Entry Allocation Size */
144 (media_context->urb.num_cs_entries << 0)); /* Number of URB Entries */
145 ADVANCE_BATCH(batch);
149 i965_media_pipeline_state(VADriverContextP ctx, struct i965_media_context *media_context)
151 i965_media_state_base_address(ctx, media_context);
152 i965_media_state_pointers(ctx, media_context);
153 i965_media_cs_urb_layout(ctx, media_context);
157 i965_media_constant_buffer(VADriverContextP ctx, struct decode_state *decode_state, struct i965_media_context *media_context)
159 struct intel_batchbuffer *batch = media_context->base.batch;
161 BEGIN_BATCH(batch, 2);
162 OUT_BATCH(batch, CMD_CONSTANT_BUFFER | (1 << 8) | (2 - 2));
163 OUT_RELOC(batch, media_context->curbe.bo,
164 I915_GEM_DOMAIN_INSTRUCTION, 0,
165 media_context->urb.size_cs_entry - 1);
166 ADVANCE_BATCH(batch);
170 i965_media_depth_buffer(VADriverContextP ctx, struct i965_media_context *media_context)
172 struct intel_batchbuffer *batch = media_context->base.batch;
174 BEGIN_BATCH(batch, 6);
175 OUT_BATCH(batch, CMD_DEPTH_BUFFER | 4);
176 OUT_BATCH(batch, (I965_DEPTHFORMAT_D32_FLOAT << 18) |
177 (I965_SURFACE_NULL << 29));
182 ADVANCE_BATCH(batch);
186 i965_media_pipeline_setup(VADriverContextP ctx,
187 struct decode_state *decode_state,
188 struct i965_media_context *media_context)
190 struct intel_batchbuffer *batch = media_context->base.batch;
192 intel_batchbuffer_start_atomic(batch, 0x1000);
193 intel_batchbuffer_emit_mi_flush(batch); /* step 1 */
194 i965_media_depth_buffer(ctx, media_context);
195 i965_media_pipeline_select(ctx, media_context); /* step 2 */
196 i965_media_urb_layout(ctx, media_context); /* step 3 */
197 i965_media_pipeline_state(ctx, media_context); /* step 4 */
198 i965_media_constant_buffer(ctx, decode_state, media_context); /* step 5 */
199 assert(media_context->media_objects);
200 media_context->media_objects(ctx, decode_state, media_context); /* step 6 */
201 intel_batchbuffer_end_atomic(batch);
205 i965_media_decode_init(VADriverContextP ctx,
207 struct decode_state *decode_state,
208 struct i965_media_context *media_context)
211 struct i965_driver_data *i965 = i965_driver_data(ctx);
214 /* constant buffer */
215 dri_bo_unreference(media_context->curbe.bo);
216 bo = dri_bo_alloc(i965->intel.bufmgr,
220 media_context->curbe.bo = bo;
223 for (i = 0; i < MAX_MEDIA_SURFACES; i++) {
224 dri_bo_unreference(media_context->surface_state[i].bo);
225 media_context->surface_state[i].bo = NULL;
229 dri_bo_unreference(media_context->binding_table.bo);
230 bo = dri_bo_alloc(i965->intel.bufmgr,
232 MAX_MEDIA_SURFACES * sizeof(unsigned int), 32);
234 media_context->binding_table.bo = bo;
236 /* interface descriptor remapping table */
237 dri_bo_unreference(media_context->idrt.bo);
238 bo = dri_bo_alloc(i965->intel.bufmgr,
239 "interface discriptor",
240 MAX_INTERFACE_DESC * sizeof(struct i965_interface_descriptor), 16);
242 media_context->idrt.bo = bo;
245 dri_bo_unreference(media_context->vfe_state.bo);
246 bo = dri_bo_alloc(i965->intel.bufmgr,
248 sizeof(struct i965_vfe_state), 32);
250 media_context->vfe_state.bo = bo;
253 media_context->extended_state.enabled = 0;
256 case VAProfileMPEG2Simple:
257 case VAProfileMPEG2Main:
258 i965_media_mpeg2_decode_init(ctx, decode_state, media_context);
261 case VAProfileH264Baseline:
262 case VAProfileH264Main:
263 case VAProfileH264High:
264 i965_media_h264_decode_init(ctx, decode_state, media_context);
274 i965_media_decode_picture(VADriverContextP ctx,
276 union codec_state *codec_state,
277 struct hw_context *hw_context)
279 struct i965_media_context *media_context = (struct i965_media_context *)hw_context;
280 struct decode_state *decode_state = &codec_state->dec;
282 i965_media_decode_init(ctx, profile, decode_state, media_context);
283 assert(media_context->media_states_setup);
284 media_context->media_states_setup(ctx, decode_state, media_context);
285 i965_media_pipeline_setup(ctx, decode_state, media_context);
286 intel_batchbuffer_flush(hw_context->batch);
290 i965_media_context_destroy(void *hw_context)
292 struct i965_media_context *media_context = (struct i965_media_context *)hw_context;
295 if (media_context->free_private_context)
296 media_context->free_private_context(&media_context->private_context);
298 for (i = 0; i < MAX_MEDIA_SURFACES; i++) {
299 dri_bo_unreference(media_context->surface_state[i].bo);
300 media_context->surface_state[i].bo = NULL;
303 dri_bo_unreference(media_context->extended_state.bo);
304 media_context->extended_state.bo = NULL;
306 dri_bo_unreference(media_context->vfe_state.bo);
307 media_context->vfe_state.bo = NULL;
309 dri_bo_unreference(media_context->idrt.bo);
310 media_context->idrt.bo = NULL;
312 dri_bo_unreference(media_context->binding_table.bo);
313 media_context->binding_table.bo = NULL;
315 dri_bo_unreference(media_context->curbe.bo);
316 media_context->curbe.bo = NULL;
318 dri_bo_unreference(media_context->indirect_object.bo);
319 media_context->indirect_object.bo = NULL;
321 intel_batchbuffer_free(media_context->base.batch);
326 g4x_dec_hw_context_init(VADriverContextP ctx, VAProfile profile)
328 struct intel_driver_data *intel = intel_driver_data(ctx);
329 struct i965_media_context *media_context = calloc(1, sizeof(struct i965_media_context));
331 media_context->base.destroy = i965_media_context_destroy;
332 media_context->base.run = i965_media_decode_picture;
333 media_context->base.batch = intel_batchbuffer_new(intel, I915_EXEC_RENDER);
336 case VAProfileMPEG2Simple:
337 case VAProfileMPEG2Main:
338 i965_media_mpeg2_dec_context_init(ctx, media_context);
341 case VAProfileH264Baseline:
342 case VAProfileH264Main:
343 case VAProfileH264High:
344 case VAProfileVC1Simple:
345 case VAProfileVC1Main:
346 case VAProfileVC1Advanced:
352 return (struct hw_context *)media_context;
356 ironlake_dec_hw_context_init(VADriverContextP ctx, VAProfile profile)
358 struct intel_driver_data *intel = intel_driver_data(ctx);
359 struct i965_media_context *media_context = calloc(1, sizeof(struct i965_media_context));
361 media_context->base.destroy = i965_media_context_destroy;
362 media_context->base.run = i965_media_decode_picture;
363 media_context->base.batch = intel_batchbuffer_new(intel, I915_EXEC_RENDER);
366 case VAProfileMPEG2Simple:
367 case VAProfileMPEG2Main:
368 i965_media_mpeg2_dec_context_init(ctx, media_context);
371 case VAProfileH264Baseline:
372 case VAProfileH264Main:
373 case VAProfileH264High:
374 i965_media_h264_dec_context_init(ctx, media_context);
377 case VAProfileVC1Simple:
378 case VAProfileVC1Main:
379 case VAProfileVC1Advanced:
385 return (struct hw_context *)media_context;