#define MFD_JPEG_BSD_OBJECT MFX(2, 7, 1, 8)
+#define VEB(pipeline, op, sub_opa, sub_opb) \
+ (3 << 29 | \
+ (pipeline) << 27 | \
+ (op) << 24 | \
+ (sub_opa) << 21 | \
+ (sub_opb) << 16)
+
+#define VEB_SURFACE_STATE VEB(2, 4, 0, 0)
+#define VEB_STATE VEB(2, 4, 0, 2)
+#define VEB_DNDI_IECP_STATE VEB(2, 4, 0, 3)
+
#define I965_DEPTHFORMAT_D32_FLOAT 1
#define BASE_ADDRESS_MODIFY (1 << 0)
assert(batch->flag == I915_EXEC_RENDER ||
batch->flag == I915_EXEC_BLT ||
- batch->flag == I915_EXEC_BSD);
+ batch->flag == I915_EXEC_BSD ||
+ batch->flag == I915_EXEC_VEBOX);
dri_bo_unreference(batch->buffer);
batch->buffer = dri_bo_alloc(intel->bufmgr,
struct intel_batchbuffer *batch = calloc(1, sizeof(*batch));
assert(flag == I915_EXEC_RENDER ||
flag == I915_EXEC_BSD ||
- flag == I915_EXEC_BLT);
+ flag == I915_EXEC_BLT ||
+ flag == I915_EXEC_VEBOX);
batch->intel = intel;
batch->flag = flag;
OUT_BLT_BATCH(batch, 0);
OUT_BLT_BATCH(batch, 0);
ADVANCE_BLT_BATCH(batch);
+ }else if (batch->flag == I915_EXEC_VEBOX) {
+ BEGIN_VEB_BATCH(batch, 4);
+ OUT_VEB_BATCH(batch, MI_FLUSH_DW);
+ OUT_VEB_BATCH(batch, 0);
+ OUT_VEB_BATCH(batch, 0);
+ OUT_VEB_BATCH(batch, 0);
+ ADVANCE_VEB_BATCH(batch);
} else {
assert(batch->flag == I915_EXEC_BSD);
BEGIN_BCS_BATCH(batch, 4);
if (batch->flag == I915_EXEC_RENDER) {
BEGIN_BATCH(batch, 1);
OUT_BATCH(batch, MI_FLUSH | MI_FLUSH_STATE_INSTRUCTION_CACHE_INVALIDATE);
- ADVANCE_BATCH(batch);
- } else {
+ ADVANCE_BATCH(batch);
+ } else {
assert(batch->flag == I915_EXEC_BSD);
BEGIN_BCS_BATCH(batch, 1);
OUT_BCS_BATCH(batch, MI_FLUSH | MI_FLUSH_STATE_INSTRUCTION_CACHE_INVALIDATE);
{
if (flag != I915_EXEC_RENDER &&
flag != I915_EXEC_BLT &&
- flag != I915_EXEC_BSD)
+ flag != I915_EXEC_BSD &&
+ flag != I915_EXEC_VEBOX)
return;
if (batch->flag == flag)
}
void
+intel_batchbuffer_start_atomic_veb(struct intel_batchbuffer *batch, unsigned int size)
+{
+ intel_batchbuffer_start_atomic_helper(batch, I915_EXEC_VEBOX, size);
+}
+
+
+void
intel_batchbuffer_end_atomic(struct intel_batchbuffer *batch)
{
assert(batch->atomic);
#define BEGIN_BATCH(batch, n) __BEGIN_BATCH(batch, n, I915_EXEC_RENDER)
#define BEGIN_BLT_BATCH(batch, n) __BEGIN_BATCH(batch, n, I915_EXEC_BLT)
#define BEGIN_BCS_BATCH(batch, n) __BEGIN_BATCH(batch, n, I915_EXEC_BSD)
-
+#define BEGIN_VEB_BATCH(batch, n) __BEGIN_BATCH(batch, n, I915_EXEC_VEBOX)
#define OUT_BATCH(batch, d) __OUT_BATCH(batch, d)
#define OUT_BLT_BATCH(batch, d) __OUT_BATCH(batch, d)
#define OUT_BCS_BATCH(batch, d) __OUT_BATCH(batch, d)
+#define OUT_VEB_BATCH(batch, d) __OUT_BATCH(batch, d)
#define OUT_RELOC(batch, bo, read_domains, write_domain, delta) \
__OUT_RELOC(batch, bo, read_domains, write_domain, delta)
#define ADVANCE_BATCH(batch) __ADVANCE_BATCH(batch)
#define ADVANCE_BLT_BATCH(batch) __ADVANCE_BATCH(batch)
#define ADVANCE_BCS_BATCH(batch) __ADVANCE_BATCH(batch)
+#define ADVANCE_VEB_BATCH(batch) __ADVANCE_BATCH(batch)
#endif /* _INTEL_BATCHBUFFER_H_ */