desc->root.sets[set_idx] = nvk_descriptor_set_addr(set);
desc->sets[set_idx] = set;
desc->sets_dirty |= BITFIELD_BIT(set_idx);
+
+ /* Binding descriptors invalidates push descriptors */
+ desc->push_dirty &= ~BITFIELD_BIT(set_idx);
}
if (set_layout->dynamic_buffer_count > 0) {
memcpy(desc->root.push + offset, pValues, size);
}
}
+
+VKAPI_ATTR void VKAPI_CALL
+nvk_CmdPushDescriptorSetKHR(VkCommandBuffer commandBuffer,
+ VkPipelineBindPoint pipelineBindPoint,
+ VkPipelineLayout layout,
+ uint32_t set,
+ uint32_t descriptorWriteCount,
+ const VkWriteDescriptorSet *pDescriptorWrites)
+{
+ VK_FROM_HANDLE(nvk_cmd_buffer, cmd, commandBuffer);
+ VK_FROM_HANDLE(nvk_pipeline_layout, pipeline_layout, layout);
+ struct nvk_descriptor_state *desc =
+ nvk_get_descriptors_state(cmd, pipelineBindPoint);
+
+ assert(set < NVK_MAX_SETS);
+ if (unlikely(desc->push[set] == NULL)) {
+ desc->push[set] = vk_zalloc(&cmd->vk.pool->alloc,
+ sizeof(*desc->push[set]), 8,
+ VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
+ if (unlikely(desc->push[set] == NULL)) {
+ vk_command_buffer_set_error(&cmd->vk, VK_ERROR_OUT_OF_HOST_MEMORY);
+ return;
+ }
+ }
+
+ /* Pushing descriptors replaces whatever sets are bound */
+ desc->sets[set] = NULL;
+
+ nvk_push_descriptor_set_update(desc->push[set],
+ pipeline_layout->set[set].layout,
+ descriptorWriteCount, pDescriptorWrites);
+ desc->push_dirty |= BITFIELD_BIT(set);
+}
+
+void
+nvk_cmd_buffer_flush_push_descriptors(struct nvk_cmd_buffer *cmd,
+ struct nvk_descriptor_state *desc)
+{
+ VkResult result;
+
+ if (!desc->push_dirty)
+ return;
+
+ u_foreach_bit(set_idx, desc->push_dirty) {
+ struct nvk_push_descriptor_set *push_set = desc->push[set_idx];
+ uint64_t push_set_addr;
+ result = nvk_cmd_buffer_upload_data(cmd, push_set->data,
+ sizeof(push_set->data),
+ NVK_MIN_UBO_ALIGNMENT,
+ &push_set_addr);
+ if (unlikely(result != VK_SUCCESS)) {
+ vk_command_buffer_set_error(&cmd->vk, result);
+ return;
+ }
+
+ desc->root.sets[set_idx] = push_set_addr;
+ }
+}
struct nvk_cmd_bo;
struct nvk_cmd_pool;
struct nvk_image_view;
+struct nvk_push_descriptor_set;
/** Root descriptor table. This gets pushed to the GPU directly */
struct nvk_root_descriptor_table {
struct nvk_root_descriptor_table root;
struct nvk_descriptor_set *sets[NVK_MAX_SETS];
uint32_t sets_dirty;
+
+ struct nvk_push_descriptor_set *push[NVK_MAX_SETS];
+ uint32_t push_dirty;
};
struct nvk_attachment {
const void *data, uint32_t size,
uint32_t alignment, uint64_t *addr);
+void
+nvk_cmd_buffer_flush_push_descriptors(struct nvk_cmd_buffer *cmd,
+ struct nvk_descriptor_state *desc);
+
#endif
struct nvk_descriptor_state *desc = &cmd->state.cs.descriptors;
VkResult result;
+ nvk_cmd_buffer_flush_push_descriptors(cmd, desc);
+
desc->root.cs.block_size[0] = shader->cp.block_size[0];
desc->root.cs.block_size[1] = shader->cp.block_size[1];
desc->root.cs.block_size[2] = shader->cp.block_size[2];
static void
nvk_flush_descriptors(struct nvk_cmd_buffer *cmd)
{
- const struct nvk_descriptor_state *desc = &cmd->state.gfx.descriptors;
+ struct nvk_descriptor_state *desc = &cmd->state.gfx.descriptors;
VkResult result;
+ nvk_cmd_buffer_flush_push_descriptors(cmd, desc);
+
uint64_t root_table_addr;
result = nvk_cmd_buffer_upload_data(cmd, &desc->root, sizeof(desc->root),
NVK_MIN_UBO_ALIGNMENT,
}
}
+void
+nvk_push_descriptor_set_update(struct nvk_push_descriptor_set *push_set,
+ struct nvk_descriptor_set_layout *layout,
+ uint32_t write_count,
+ const VkWriteDescriptorSet *writes)
+{
+ assert(layout->descriptor_buffer_size < sizeof(push_set->data));
+ struct nvk_descriptor_set set = {
+ .layout = layout,
+ .mapped_ptr = push_set->data,
+ };
+
+ for (uint32_t w = 0; w < write_count; w++) {
+ const VkWriteDescriptorSet *write = &writes[w];
+ assert(write->dstSet == VK_NULL_HANDLE);
+
+ switch (write->descriptorType) {
+ case VK_DESCRIPTOR_TYPE_SAMPLER:
+ case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
+ case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
+ case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
+ case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
+ for (uint32_t j = 0; j < write->descriptorCount; j++) {
+ write_image_view_desc(&set, write->pImageInfo + j,
+ write->dstBinding,
+ write->dstArrayElement + j,
+ write->descriptorType);
+ }
+ break;
+
+ case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
+ case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
+ for (uint32_t j = 0; j < write->descriptorCount; j++) {
+ write_buffer_view_desc(&set, write->pTexelBufferView[j],
+ write->dstBinding, write->dstArrayElement + j);
+ }
+ break;
+
+ case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
+ case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
+ for (uint32_t j = 0; j < write->descriptorCount; j++) {
+ write_buffer_desc(&set, write->pBufferInfo + j, write->dstBinding,
+ write->dstArrayElement + j);
+ }
+ break;
+
+ default:
+ break;
+ }
+ }
+}
+
static void
nvk_descriptor_set_destroy(struct nvk_device *device,
struct nvk_descriptor_pool *pool,
struct nvk_descriptor_set {
struct vk_object_base base;
struct nvk_descriptor_set_layout *layout;
- uint32_t buffer_count;
uint32_t bo_offset;
struct nouveau_ws_bo *bo;
void *mapped_ptr;
return set->bo->offset + set->bo_offset;
}
+struct nvk_push_descriptor_set {
+ uint32_t data[NVK_MAX_PUSH_DESCRIPTORS * NVK_MAX_DESCRIPTOR_SIZE];
+};
+
+void
+nvk_push_descriptor_set_update(struct nvk_push_descriptor_set *push_set,
+ struct nvk_descriptor_set_layout *layout,
+ uint32_t write_count,
+ const VkWriteDescriptorSet *writes);
+
#endif
default:
unreachable("Invalid descriptor type");
}
+
+ assert(*stride <= NVK_MAX_DESCRIPTOR_SIZE);
}
VKAPI_ATTR VkResult VKAPI_CALL
.KHR_dedicated_allocation = true,
.KHR_get_memory_requirements2 = true,
.KHR_format_feature_flags2 = true,
+ .KHR_push_descriptor = true,
#ifdef NVK_USE_WSI_PLATFORM
.KHR_swapchain = true,
.KHR_swapchain_mutable_format = true,
vk_foreach_struct(ext, pMemoryProperties->pNext)
{
switch (ext->sType) {
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR: {
+ VkPhysicalDevicePushDescriptorPropertiesKHR *p = (void *)ext;
+ p->maxPushDescriptors = NVK_MAX_PUSH_DESCRIPTORS;
+ break;
+ }
default:
nvk_debug_ignored_stype(ext->sType);
break;
#define NVK_MAX_RTS 8
#define NVK_MIN_UBO_ALIGNMENT 64
#define NVK_MAX_VIEWPORTS 16
+#define NVK_MAX_DESCRIPTOR_SIZE 16
+#define NVK_MAX_PUSH_DESCRIPTORS 32
/**
* Warn on ignored extension structs.