assert(batch->atomic);
batch->atomic = 0;
}
+
+int
+intel_batchbuffer_used_size(struct intel_batchbuffer *batch)
+{
+ return batch->ptr - batch->map;
+}
+
+void
+intel_batchbuffer_align(struct intel_batchbuffer *batch, unsigned int alignedment)
+{
+ int used = batch->ptr - batch->map;
+ int pad_size;
+
+ assert((alignedment & 3) == 0);
+ pad_size = ALIGN(used, alignedment) - used;
+ assert((pad_size & 3) == 0);
+ assert(intel_batchbuffer_space(batch) >= pad_size);
+
+ while (pad_size >= 4) {
+ intel_batchbuffer_emit_dword(batch, 0);
+ pad_size -= 4;
+ }
+}
+
void intel_batchbuffer_advance_batch(struct intel_batchbuffer *batch);
void intel_batchbuffer_check_batchbuffer_flag(struct intel_batchbuffer *batch, int flag);
int intel_batchbuffer_check_free_space(struct intel_batchbuffer *batch, int size);
+int intel_batchbuffer_used_size(struct intel_batchbuffer *batch);
+void intel_batchbuffer_align(struct intel_batchbuffer *batch, unsigned int alignedment);
#define __BEGIN_BATCH(batch, n, f) do { \
assert(f == batch->flag); \