const struct dzn_pipeline *pipeline =
cmdbuf->state.bindpoint[bindpoint].pipeline;
- if (!(cmdbuf->state.bindpoint[bindpoint].dirty & DZN_CMD_BINDPOINT_DIRTY_HEAPS))
+ /* The set of dirty bits that are cleared by running this function. Notably,
+ * for bindless, descriptor sets that are bound but unused by the currently
+ * set pipeline are not processed, meaning their dirty bits should persist
+ * until such a point as a pipeline does use them. For not-bindless,
+ * all sets are processed. */
+ uint32_t dirty_bits_bindless =
+ (pipeline->dynamic_buffer_count ? DZN_CMD_BINDPOINT_DIRTY_DYNAMIC_BUFFERS : 0) |
+ (((DZN_CMD_BINDPOINT_DIRTY_DESC_SET0 << pipeline->set_count) - 1) & DZN_CMD_BINDPOINT_DIRTY_DESC_SETS);
+ uint32_t dirty_bits = (device->bindless ? dirty_bits_bindless : DZN_CMD_BINDPOINT_DIRTY_DESC_SETS | DZN_CMD_BINDPOINT_DIRTY_DYNAMIC_BUFFERS);
+ if (!(cmdbuf->state.bindpoint[bindpoint].dirty & dirty_bits))
return;
dzn_foreach_pool_type (type) {
new_heap_offsets[type] = dst_heap_offset;
update_root_desc_table[type] = true;
- for (uint32_t s = 0; s < cmdbuf->state.pipeline->root.sets_param_count; s++) {
+ for (uint32_t s = 0; s < MAX_SETS; s++) {
const struct dzn_descriptor_set *set = desc_state->sets[s].set;
if (!set) continue;
}
if (device->bindless) {
- for (uint32_t s = 0; s < pipeline->root.sets_param_count; ++s) {
+ for (uint32_t s = 0; s < pipeline->set_count; ++s) {
const struct dzn_descriptor_set *set = desc_state->sets[s].set;
if (!set || !set->pool->bindless.buf)
continue;
gpuva);
}
}
+
+ cmdbuf->state.bindpoint[bindpoint].dirty &= ~dirty_bits;
}
static void
sizeof(cmdbuf->state.sysvals.compute) / 4,
&cmdbuf->state.sysvals.compute, 0);
}
+
+ cmdbuf->state.bindpoint[bindpoint].dirty &= ~DZN_CMD_BINDPOINT_DIRTY_SYSVALS;
}
static void
dzn_cmd_buffer_update_depth_bounds(cmdbuf);
/* Reset the dirty states */
- cmdbuf->state.bindpoint[VK_PIPELINE_BIND_POINT_GRAPHICS].dirty = 0;
+ cmdbuf->state.bindpoint[VK_PIPELINE_BIND_POINT_GRAPHICS].dirty &= DZN_CMD_BINDPOINT_DIRTY_HEAPS;
cmdbuf->state.dirty = 0;
}
dzn_cmd_buffer_update_push_constants(cmdbuf, VK_PIPELINE_BIND_POINT_COMPUTE);
/* Reset the dirty states */
- cmdbuf->state.bindpoint[VK_PIPELINE_BIND_POINT_COMPUTE].dirty = 0;
+ cmdbuf->state.bindpoint[VK_PIPELINE_BIND_POINT_COMPUTE].dirty &= DZN_CMD_BINDPOINT_DIRTY_HEAPS;
}
VKAPI_ATTR void VKAPI_CALL
DZN_CMD_BINDPOINT_DIRTY_DESC_SET5 = 1 << 8,
DZN_CMD_BINDPOINT_DIRTY_DESC_SET6 = 1 << 9,
DZN_CMD_BINDPOINT_DIRTY_DESC_SET7 = 1 << 10,
- DZN_CMD_BINDPOINT_DIRTY_HEAPS =
- DZN_CMD_BINDPOINT_DIRTY_DYNAMIC_BUFFERS |
- DZN_CMD_BINDPOINT_DIRTY_SYSVALS |
+ DZN_CMD_BINDPOINT_DIRTY_DESC_SETS =
DZN_CMD_BINDPOINT_DIRTY_DESC_SET0 |
DZN_CMD_BINDPOINT_DIRTY_DESC_SET1 |
DZN_CMD_BINDPOINT_DIRTY_DESC_SET2 |
DZN_CMD_BINDPOINT_DIRTY_DESC_SET5 |
DZN_CMD_BINDPOINT_DIRTY_DESC_SET6 |
DZN_CMD_BINDPOINT_DIRTY_DESC_SET7,
+ DZN_CMD_BINDPOINT_DIRTY_HEAPS =
+ DZN_CMD_BINDPOINT_DIRTY_DYNAMIC_BUFFERS |
+ DZN_CMD_BINDPOINT_DIRTY_SYSVALS |
+ DZN_CMD_BINDPOINT_DIRTY_DESC_SETS,
};
enum dzn_cmd_dirty {
ID3D12RootSignature *sig;
} root;
struct dzn_pipeline_layout_set sets[MAX_SETS];
+ uint32_t set_count;
uint32_t desc_count[NUM_POOL_TYPES];
uint32_t dynamic_buffer_count;
ID3D12PipelineState *state;