1 /**************************************************************************
3 * Copyright 2006 Tungsten Graphics, Inc., Cedar Park, Texas.
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the
8 * "Software"), to deal in the Software without restriction, including
9 * without limitation the rights to use, copy, modify, merge, publish,
10 * distribute, sub license, and/or sell copies of the Software, and to
11 * permit persons to whom the Software is furnished to do so, subject to
12 * the following conditions:
14 * The above copyright notice and this permission notice (including the
15 * next paragraph) shall be included in all copies or substantial portions
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
19 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
21 * IN NO EVENT SHALL TUNGSTEN GRAPHICS AND/OR ITS SUPPLIERS BE LIABLE FOR
22 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
23 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
24 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
26 **************************************************************************/
32 #include "intel_batchbuffer.h"
35 intel_batchbuffer_reset(struct intel_batchbuffer *batch)
37 struct intel_driver_data *intel = batch->intel;
38 int batch_size = BATCH_SIZE;
40 assert(batch->flag == I915_EXEC_RENDER ||
41 batch->flag == I915_EXEC_BLT ||
42 batch->flag == I915_EXEC_BSD);
44 dri_bo_unreference(batch->buffer);
45 batch->buffer = dri_bo_alloc(intel->bufmgr,
49 assert(batch->buffer);
50 dri_bo_map(batch->buffer, 1);
51 assert(batch->buffer->virtual);
52 batch->map = batch->buffer->virtual;
53 batch->size = batch_size;
54 batch->ptr = batch->map;
59 intel_batchbuffer_space(struct intel_batchbuffer *batch)
61 return (batch->size - BATCH_RESERVED) - (batch->ptr - batch->map);
65 struct intel_batchbuffer *
66 intel_batchbuffer_new(struct intel_driver_data *intel, int flag)
68 struct intel_batchbuffer *batch = calloc(1, sizeof(*batch));
69 assert(flag == I915_EXEC_RENDER ||
70 flag == I915_EXEC_BSD ||
71 flag == I915_EXEC_BLT);
75 batch->run = drm_intel_bo_mrb_exec;
76 intel_batchbuffer_reset(batch);
81 void intel_batchbuffer_free(struct intel_batchbuffer *batch)
84 dri_bo_unmap(batch->buffer);
88 dri_bo_unreference(batch->buffer);
93 intel_batchbuffer_flush(struct intel_batchbuffer *batch)
95 unsigned int used = batch->ptr - batch->map;
101 if ((used & 4) == 0) {
102 *(unsigned int*)batch->ptr = 0;
106 *(unsigned int*)batch->ptr = MI_BATCH_BUFFER_END;
108 dri_bo_unmap(batch->buffer);
109 used = batch->ptr - batch->map;
110 batch->run(batch->buffer, used, 0, 0, 0, batch->flag);
111 intel_batchbuffer_reset(batch);
115 intel_batchbuffer_emit_dword(struct intel_batchbuffer *batch, unsigned int x)
117 assert(intel_batchbuffer_space(batch) >= 4);
118 *(unsigned int *)batch->ptr = x;
123 intel_batchbuffer_emit_reloc(struct intel_batchbuffer *batch, dri_bo *bo,
124 uint32_t read_domains, uint32_t write_domains,
127 assert(batch->ptr - batch->map < batch->size);
128 dri_bo_emit_reloc(batch->buffer, read_domains, write_domains,
129 delta, batch->ptr - batch->map, bo);
130 intel_batchbuffer_emit_dword(batch, bo->offset + delta);
134 intel_batchbuffer_require_space(struct intel_batchbuffer *batch,
137 assert(size < batch->size - 8);
139 if (intel_batchbuffer_space(batch) < size) {
140 intel_batchbuffer_flush(batch);
145 intel_batchbuffer_data(struct intel_batchbuffer *batch,
149 assert((size & 3) == 0);
150 intel_batchbuffer_require_space(batch, size);
153 memcpy(batch->ptr, data, size);
158 intel_batchbuffer_emit_mi_flush(struct intel_batchbuffer *batch)
160 struct intel_driver_data *intel = batch->intel;
162 if (IS_GEN6(intel->device_id) ||
163 IS_GEN7(intel->device_id)) {
164 if (batch->flag == I915_EXEC_RENDER) {
165 BEGIN_BATCH(batch, 4);
166 OUT_BATCH(batch, CMD_PIPE_CONTROL | 0x2);
168 if (IS_GEN6(intel->device_id))
170 CMD_PIPE_CONTROL_WC_FLUSH |
171 CMD_PIPE_CONTROL_TC_FLUSH |
172 CMD_PIPE_CONTROL_NOWRITE);
175 CMD_PIPE_CONTROL_WC_FLUSH |
176 CMD_PIPE_CONTROL_TC_FLUSH |
177 CMD_PIPE_CONTROL_DC_FLUSH |
178 CMD_PIPE_CONTROL_NOWRITE);
182 ADVANCE_BATCH(batch);
184 if (batch->flag == I915_EXEC_BLT) {
185 BEGIN_BLT_BATCH(batch, 4);
186 OUT_BLT_BATCH(batch, MI_FLUSH_DW);
187 OUT_BLT_BATCH(batch, 0);
188 OUT_BLT_BATCH(batch, 0);
189 OUT_BLT_BATCH(batch, 0);
190 ADVANCE_BLT_BATCH(batch);
192 assert(batch->flag == I915_EXEC_BSD);
193 BEGIN_BCS_BATCH(batch, 4);
194 OUT_BCS_BATCH(batch, MI_FLUSH_DW | MI_FLUSH_DW_VIDEO_PIPELINE_CACHE_INVALIDATE);
195 OUT_BCS_BATCH(batch, 0);
196 OUT_BCS_BATCH(batch, 0);
197 OUT_BCS_BATCH(batch, 0);
198 ADVANCE_BCS_BATCH(batch);
202 if (batch->flag == I915_EXEC_RENDER) {
203 BEGIN_BATCH(batch, 1);
204 OUT_BATCH(batch, MI_FLUSH | MI_FLUSH_STATE_INSTRUCTION_CACHE_INVALIDATE);
205 ADVANCE_BATCH(batch);
207 assert(batch->flag == I915_EXEC_BSD);
208 BEGIN_BCS_BATCH(batch, 1);
209 OUT_BCS_BATCH(batch, MI_FLUSH | MI_FLUSH_STATE_INSTRUCTION_CACHE_INVALIDATE);
210 ADVANCE_BCS_BATCH(batch);
216 intel_batchbuffer_begin_batch(struct intel_batchbuffer *batch, int total)
218 batch->emit_total = total * 4;
219 batch->emit_start = batch->ptr;
223 intel_batchbuffer_advance_batch(struct intel_batchbuffer *batch)
225 assert(batch->emit_total == (batch->ptr - batch->emit_start));
229 intel_batchbuffer_check_batchbuffer_flag(struct intel_batchbuffer *batch, int flag)
231 if (flag != I915_EXEC_RENDER &&
232 flag != I915_EXEC_BLT &&
233 flag != I915_EXEC_BSD)
236 if (batch->flag == flag)
239 intel_batchbuffer_flush(batch);
244 intel_batchbuffer_check_free_space(struct intel_batchbuffer *batch, int size)
246 return intel_batchbuffer_space(batch) >= size;
250 intel_batchbuffer_start_atomic_helper(struct intel_batchbuffer *batch,
254 assert(!batch->atomic);
255 intel_batchbuffer_check_batchbuffer_flag(batch, flag);
256 intel_batchbuffer_require_space(batch, size);
261 intel_batchbuffer_start_atomic(struct intel_batchbuffer *batch, unsigned int size)
263 intel_batchbuffer_start_atomic_helper(batch, I915_EXEC_RENDER, size);
267 intel_batchbuffer_start_atomic_blt(struct intel_batchbuffer *batch, unsigned int size)
269 intel_batchbuffer_start_atomic_helper(batch, I915_EXEC_BLT, size);
273 intel_batchbuffer_start_atomic_bcs(struct intel_batchbuffer *batch, unsigned int size)
275 intel_batchbuffer_start_atomic_helper(batch, I915_EXEC_BSD, size);
279 intel_batchbuffer_end_atomic(struct intel_batchbuffer *batch)
281 assert(batch->atomic);
286 intel_batchbuffer_used_size(struct intel_batchbuffer *batch)
288 return batch->ptr - batch->map;
292 intel_batchbuffer_align(struct intel_batchbuffer *batch, unsigned int alignedment)
294 int used = batch->ptr - batch->map;
297 assert((alignedment & 3) == 0);
298 pad_size = ALIGN(used, alignedment) - used;
299 assert((pad_size & 3) == 0);
300 assert(intel_batchbuffer_space(batch) >= pad_size);
302 while (pad_size >= 4) {
303 intel_batchbuffer_emit_dword(batch, 0);