From e9fbac9002fc8ef24c35d01b5d693ad06fa93473 Mon Sep 17 00:00:00 2001 From: Faith Ekstrand Date: Mon, 30 Jan 2023 20:11:47 -0600 Subject: [PATCH] nvk: Re-format descriptor set layouts Part-of: --- src/nouveau/vulkan/nvk_descriptor_set_layout.c | 357 +++++++++++++------------ src/nouveau/vulkan/nvk_descriptor_set_layout.h | 70 ++--- 2 files changed, 219 insertions(+), 208 deletions(-) diff --git a/src/nouveau/vulkan/nvk_descriptor_set_layout.c b/src/nouveau/vulkan/nvk_descriptor_set_layout.c index f1eeaab..ff8ae7d 100644 --- a/src/nouveau/vulkan/nvk_descriptor_set_layout.c +++ b/src/nouveau/vulkan/nvk_descriptor_set_layout.c @@ -7,79 +7,84 @@ #include "util/mesa-sha1.h" static bool -binding_has_immutable_samplers(const VkDescriptorSetLayoutBinding *binding) { - switch (binding->descriptorType) { - case VK_DESCRIPTOR_TYPE_SAMPLER: - case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: - return binding->pImmutableSamplers != NULL; - - default: - return false; - } +binding_has_immutable_samplers(const VkDescriptorSetLayoutBinding *binding) +{ + switch (binding->descriptorType) { + case VK_DESCRIPTOR_TYPE_SAMPLER: + case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: + return binding->pImmutableSamplers != NULL; + + default: + return false; + } } -void nvk_descriptor_stride_align_for_type( - VkDescriptorType type, const VkMutableDescriptorTypeListVALVE *type_list, - uint32_t *stride, uint32_t *align) { - switch (type) { - case VK_DESCRIPTOR_TYPE_SAMPLER: - case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: - /* TODO: How do samplers work? */ - case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE: - case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE: - case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: - *stride = *align = sizeof(struct nvk_image_descriptor); - break; - - case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER: - case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: - unreachable("TODO: Implement texel buffers"); - - case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER: - case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER: - case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC: - case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: - *stride = *align = sizeof(struct nvk_buffer_address); - break; - - case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK: - *stride = 1; /* Array size is bytes */ - *align = NVK_MIN_UBO_ALIGNMENT; - break; - - case VK_DESCRIPTOR_TYPE_MUTABLE_VALVE: - *stride = *align = 0; - for (unsigned i = 0; i < type_list->descriptorTypeCount; i++) { - /* This shouldn't recurse */ - assert(type_list->pDescriptorTypes[i] != - VK_DESCRIPTOR_TYPE_MUTABLE_VALVE); - uint32_t desc_stride, desc_align; - nvk_descriptor_stride_align_for_type(type_list->pDescriptorTypes[i], - NULL, &desc_stride, &desc_align); - *stride = MAX2(*stride, desc_stride); - *align = MAX2(*align, desc_align); - } - *stride = ALIGN(*stride, *align); - break; - - default: - unreachable("Invalid descriptor type"); - } -} - -VKAPI_ATTR VkResult VKAPI_CALL nvk_CreateDescriptorSetLayout( - VkDevice _device, const VkDescriptorSetLayoutCreateInfo *pCreateInfo, - const VkAllocationCallbacks *pAllocator, - VkDescriptorSetLayout *pSetLayout) { - VK_FROM_HANDLE(nvk_device, device, _device); +void +nvk_descriptor_stride_align_for_type(VkDescriptorType type, + const VkMutableDescriptorTypeListVALVE *type_list, + uint32_t *stride, uint32_t *align) +{ + switch (type) { + case VK_DESCRIPTOR_TYPE_SAMPLER: + case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: + /* TODO: How do samplers work? */ + case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE: + case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE: + case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: + *stride = *align = sizeof(struct nvk_image_descriptor); + break; + + case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER: + case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: + unreachable("TODO: Implement texel buffers"); + + case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER: + case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER: + case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC: + case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: + *stride = *align = sizeof(struct nvk_buffer_address); + break; + + case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK: + *stride = 1; /* Array size is bytes */ + *align = NVK_MIN_UBO_ALIGNMENT; + break; + + case VK_DESCRIPTOR_TYPE_MUTABLE_VALVE: + *stride = *align = 0; + for (unsigned i = 0; i < type_list->descriptorTypeCount; i++) { + /* This shouldn't recurse */ + assert(type_list->pDescriptorTypes[i] != + VK_DESCRIPTOR_TYPE_MUTABLE_VALVE); + uint32_t desc_stride, desc_align; + nvk_descriptor_stride_align_for_type(type_list->pDescriptorTypes[i], + NULL, &desc_stride, &desc_align); + *stride = MAX2(*stride, desc_stride); + *align = MAX2(*align, desc_align); + } + *stride = ALIGN(*stride, *align); + break; - uint32_t num_bindings = 0; - uint32_t immutable_sampler_count = 0; - for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) { - const VkDescriptorSetLayoutBinding *binding = &pCreateInfo->pBindings[j]; - num_bindings = MAX2(num_bindings, binding->binding + 1); + default: + unreachable("Invalid descriptor type"); + } +} - /* From the Vulkan 1.1.97 spec for VkDescriptorSetLayoutBinding: +VKAPI_ATTR VkResult VKAPI_CALL +nvk_CreateDescriptorSetLayout(VkDevice _device, + const VkDescriptorSetLayoutCreateInfo *pCreateInfo, + const VkAllocationCallbacks *pAllocator, + VkDescriptorSetLayout *pSetLayout) +{ + VK_FROM_HANDLE(nvk_device, device, _device); + + uint32_t num_bindings = 0; + uint32_t immutable_sampler_count = 0; + for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) { + const VkDescriptorSetLayoutBinding *binding = &pCreateInfo->pBindings[j]; + num_bindings = MAX2(num_bindings, binding->binding + 1); + + /* From the Vulkan 1.1.97 spec for VkDescriptorSetLayoutBinding: * * "If descriptorType specifies a VK_DESCRIPTOR_TYPE_SAMPLER or * VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER type descriptor, then @@ -90,136 +95,140 @@ VKAPI_ATTR VkResult VKAPI_CALL nvk_CreateDescriptorSetLayout( * We need to be careful here and only parse pImmutableSamplers if we * have one of the right descriptor types. */ - if (binding_has_immutable_samplers(binding)) - immutable_sampler_count += binding->descriptorCount; - } - - VK_MULTIALLOC(ma); - VK_MULTIALLOC_DECL(&ma, struct nvk_descriptor_set_layout, layout, 1); - VK_MULTIALLOC_DECL(&ma, struct nvk_descriptor_set_binding_layout, bindings, - num_bindings); - VK_MULTIALLOC_DECL(&ma, struct nvk_sampler *, samplers, - immutable_sampler_count); - - if (!vk_multialloc_zalloc(&ma, &device->vk.alloc, - VK_SYSTEM_ALLOCATION_SCOPE_DEVICE)) - return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY); - - vk_object_base_init(&device->vk, &layout->base, - VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT); - - p_atomic_set(&layout->ref_cnt, 1); - layout->binding_count = num_bindings; - - for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) { - const VkDescriptorSetLayoutBinding *binding = &pCreateInfo->pBindings[j]; - uint32_t b = binding->binding; - /* We temporarily store pCreateInfo->pBindings[] index (plus one) in the + if (binding_has_immutable_samplers(binding)) + immutable_sampler_count += binding->descriptorCount; + } + + VK_MULTIALLOC(ma); + VK_MULTIALLOC_DECL(&ma, struct nvk_descriptor_set_layout, layout, 1); + VK_MULTIALLOC_DECL(&ma, struct nvk_descriptor_set_binding_layout, bindings, + num_bindings); + VK_MULTIALLOC_DECL(&ma, struct nvk_sampler *, samplers, + immutable_sampler_count); + + if (!vk_multialloc_zalloc(&ma, &device->vk.alloc, + VK_SYSTEM_ALLOCATION_SCOPE_DEVICE)) + return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY); + + vk_object_base_init(&device->vk, &layout->base, + VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT); + + p_atomic_set(&layout->ref_cnt, 1); + layout->binding_count = num_bindings; + + for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) { + const VkDescriptorSetLayoutBinding *binding = &pCreateInfo->pBindings[j]; + uint32_t b = binding->binding; + /* We temporarily store pCreateInfo->pBindings[] index (plus one) in the * immutable_samplers pointer. This provides us with a quick-and-dirty * way to sort the bindings by binding number. */ - layout->binding[b].immutable_samplers = (void *)(uintptr_t)(j + 1); - } + layout->binding[b].immutable_samplers = (void *)(uintptr_t)(j + 1); + } - const VkDescriptorSetLayoutBindingFlagsCreateInfo *binding_flags_info = + const VkDescriptorSetLayoutBindingFlagsCreateInfo *binding_flags_info = vk_find_struct_const(pCreateInfo->pNext, DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO); - const VkMutableDescriptorTypeCreateInfoVALVE *mutable_info = + const VkMutableDescriptorTypeCreateInfoVALVE *mutable_info = vk_find_struct_const(pCreateInfo->pNext, MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_VALVE); - uint32_t buffer_size = 0; - for (uint32_t b = 0; b < num_bindings; b++) { - /* We stashed the pCreateInfo->pBindings[] index (plus one) in the + uint32_t buffer_size = 0; + for (uint32_t b = 0; b < num_bindings; b++) { + /* We stashed the pCreateInfo->pBindings[] index (plus one) in the * immutable_samplers pointer. Check for NULL (empty binding) and then * reset it and compute the index. */ - if (layout->binding[b].immutable_samplers == NULL) - continue; - const uint32_t info_idx = - (uintptr_t)(void *)layout->binding[b].immutable_samplers - 1; - layout->binding[b].immutable_samplers = NULL; - - const VkDescriptorSetLayoutBinding *binding = - &pCreateInfo->pBindings[info_idx]; - - if (binding->descriptorCount == 0) - continue; - - layout->binding[b].type = binding->descriptorType; - - if (binding_flags_info && binding_flags_info->bindingCount > 0) { - assert(binding_flags_info->bindingCount == pCreateInfo->bindingCount); - layout->binding[b].flags = binding_flags_info->pBindingFlags[info_idx]; - } - - layout->binding[b].array_size = binding->descriptorCount; - - const VkMutableDescriptorTypeListVALVE *type_list = NULL; - if (binding->descriptorType == VK_DESCRIPTOR_TYPE_MUTABLE_VALVE) { - assert(mutable_info != NULL); - assert(info_idx <= mutable_info->mutableDescriptorTypeListCount); - type_list = &mutable_info->pMutableDescriptorTypeLists[info_idx]; - } - - uint32_t stride, align; - nvk_descriptor_stride_align_for_type(binding->descriptorType, type_list, - &stride, &align); - - layout->binding[b].offset = ALIGN_POT(buffer_size, align); - layout->binding[b].stride = stride; - buffer_size += stride * binding->descriptorCount; - - if (binding_has_immutable_samplers(binding)) { - layout->binding[b].immutable_samplers = samplers; - samplers += binding->descriptorCount; - for (uint32_t i = 0; i < binding->descriptorCount; i++) { - VK_FROM_HANDLE(nvk_sampler, sampler, binding->pImmutableSamplers[i]); - layout->binding[b].immutable_samplers[i] = sampler; + if (layout->binding[b].immutable_samplers == NULL) + continue; + const uint32_t info_idx = + (uintptr_t)(void *)layout->binding[b].immutable_samplers - 1; + layout->binding[b].immutable_samplers = NULL; + + const VkDescriptorSetLayoutBinding *binding = + &pCreateInfo->pBindings[info_idx]; + + if (binding->descriptorCount == 0) + continue; + + layout->binding[b].type = binding->descriptorType; + + if (binding_flags_info && binding_flags_info->bindingCount > 0) { + assert(binding_flags_info->bindingCount == pCreateInfo->bindingCount); + layout->binding[b].flags = binding_flags_info->pBindingFlags[info_idx]; + } + + layout->binding[b].array_size = binding->descriptorCount; + + const VkMutableDescriptorTypeListVALVE *type_list = NULL; + if (binding->descriptorType == VK_DESCRIPTOR_TYPE_MUTABLE_VALVE) { + assert(mutable_info != NULL); + assert(info_idx <= mutable_info->mutableDescriptorTypeListCount); + type_list = &mutable_info->pMutableDescriptorTypeLists[info_idx]; + } + + uint32_t stride, align; + nvk_descriptor_stride_align_for_type(binding->descriptorType, type_list, + &stride, &align); + + layout->binding[b].offset = ALIGN_POT(buffer_size, align); + layout->binding[b].stride = stride; + buffer_size += stride * binding->descriptorCount; + + if (binding_has_immutable_samplers(binding)) { + layout->binding[b].immutable_samplers = samplers; + samplers += binding->descriptorCount; + for (uint32_t i = 0; i < binding->descriptorCount; i++) { + VK_FROM_HANDLE(nvk_sampler, sampler, binding->pImmutableSamplers[i]); + layout->binding[b].immutable_samplers[i] = sampler; + } } - } - } + } - layout->descriptor_buffer_size = buffer_size; + layout->descriptor_buffer_size = buffer_size; - struct mesa_sha1 sha1_ctx; - _mesa_sha1_init(&sha1_ctx); + struct mesa_sha1 sha1_ctx; + _mesa_sha1_init(&sha1_ctx); #define SHA1_UPDATE_VALUE(x) _mesa_sha1_update(&sha1_ctx, &(x), sizeof(x)); - SHA1_UPDATE_VALUE(layout->descriptor_buffer_size); - SHA1_UPDATE_VALUE(layout->binding_count); - - for (uint32_t b = 0; b < num_bindings; b++) { - SHA1_UPDATE_VALUE(layout->binding[b].type); - SHA1_UPDATE_VALUE(layout->binding[b].flags); - SHA1_UPDATE_VALUE(layout->binding[b].array_size); - SHA1_UPDATE_VALUE(layout->binding[b].offset); - SHA1_UPDATE_VALUE(layout->binding[b].stride); - /* Immutable samplers are ignored for now */ - } + SHA1_UPDATE_VALUE(layout->descriptor_buffer_size); + SHA1_UPDATE_VALUE(layout->binding_count); + + for (uint32_t b = 0; b < num_bindings; b++) { + SHA1_UPDATE_VALUE(layout->binding[b].type); + SHA1_UPDATE_VALUE(layout->binding[b].flags); + SHA1_UPDATE_VALUE(layout->binding[b].array_size); + SHA1_UPDATE_VALUE(layout->binding[b].offset); + SHA1_UPDATE_VALUE(layout->binding[b].stride); + /* Immutable samplers are ignored for now */ + } #undef SHA1_UPDATE_VALUE - _mesa_sha1_final(&sha1_ctx, layout->sha1); + _mesa_sha1_final(&sha1_ctx, layout->sha1); - *pSetLayout = nvk_descriptor_set_layout_to_handle(layout); + *pSetLayout = nvk_descriptor_set_layout_to_handle(layout); - return VK_SUCCESS; + return VK_SUCCESS; } -void nvk_descriptor_set_layout_destroy( - struct nvk_device *device, struct nvk_descriptor_set_layout *layout) { - assert(layout->ref_cnt == 0); - vk_object_free(&device->vk, NULL, layout); +void +nvk_descriptor_set_layout_destroy( + struct nvk_device *device, struct nvk_descriptor_set_layout *layout) +{ + assert(layout->ref_cnt == 0); + vk_object_free(&device->vk, NULL, layout); } -VKAPI_ATTR void VKAPI_CALL nvk_DestroyDescriptorSetLayout( - VkDevice _device, VkDescriptorSetLayout descriptorSetLayout, - const VkAllocationCallbacks *pAllocator) { - VK_FROM_HANDLE(nvk_device, device, _device); - VK_FROM_HANDLE(nvk_descriptor_set_layout, layout, descriptorSetLayout); +VKAPI_ATTR void VKAPI_CALL +nvk_DestroyDescriptorSetLayout(VkDevice _device, + VkDescriptorSetLayout descriptorSetLayout, + const VkAllocationCallbacks *pAllocator) +{ + VK_FROM_HANDLE(nvk_device, device, _device); + VK_FROM_HANDLE(nvk_descriptor_set_layout, layout, descriptorSetLayout); - if (!layout) - return; + if (!layout) + return; - nvk_descriptor_set_layout_unref(device, layout); + nvk_descriptor_set_layout_unref(device, layout); } diff --git a/src/nouveau/vulkan/nvk_descriptor_set_layout.h b/src/nouveau/vulkan/nvk_descriptor_set_layout.h index 840f159..4117cf8 100644 --- a/src/nouveau/vulkan/nvk_descriptor_set_layout.h +++ b/src/nouveau/vulkan/nvk_descriptor_set_layout.h @@ -9,67 +9,69 @@ struct nvk_device; struct nvk_sampler; struct nvk_descriptor_set_binding_layout { - /* The type of the descriptors in this binding */ - VkDescriptorType type; + /* The type of the descriptors in this binding */ + VkDescriptorType type; - /* Flags provided when this binding was created */ - VkDescriptorBindingFlags flags; + /* Flags provided when this binding was created */ + VkDescriptorBindingFlags flags; - /* Number of array elements in this binding (or size in bytes for inline + /* Number of array elements in this binding (or size in bytes for inline * uniform data) */ - uint32_t array_size; + uint32_t array_size; - /* Offset into the descriptor buffer where this descriptor lives */ - uint32_t offset; + /* Offset into the descriptor buffer where this descriptor lives */ + uint32_t offset; - /* Stride between array elements in the descriptor buffer */ - uint32_t stride; + /* Stride between array elements in the descriptor buffer */ + uint32_t stride; - /* Immutable samplers (or NULL if no immutable samplers) */ - struct nvk_sampler **immutable_samplers; + /* Immutable samplers (or NULL if no immutable samplers) */ + struct nvk_sampler **immutable_samplers; }; struct nvk_descriptor_set_layout { - struct vk_object_base base; + struct vk_object_base base; - uint32_t ref_cnt; + uint32_t ref_cnt; - unsigned char sha1[20]; + unsigned char sha1[20]; - /* Size of the descriptor buffer for this descriptor set */ - uint32_t descriptor_buffer_size; + /* Size of the descriptor buffer for this descriptor set */ + uint32_t descriptor_buffer_size; - /* Number of bindings in this descriptor set */ - uint32_t binding_count; + /* Number of bindings in this descriptor set */ + uint32_t binding_count; - /* Bindings in this descriptor set */ - struct nvk_descriptor_set_binding_layout binding[0]; + /* Bindings in this descriptor set */ + struct nvk_descriptor_set_binding_layout binding[0]; }; VK_DEFINE_HANDLE_CASTS(nvk_descriptor_set_layout, base, VkDescriptorSetLayout, VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT) -void nvk_descriptor_stride_align_for_type( - VkDescriptorType type, const VkMutableDescriptorTypeListVALVE *type_list, - uint32_t *stride, uint32_t *align); +void nvk_descriptor_stride_align_for_type(VkDescriptorType type, + const VkMutableDescriptorTypeListVALVE *type_list, + uint32_t *stride, uint32_t *align); -void nvk_descriptor_set_layout_destroy( - struct nvk_device *device, struct nvk_descriptor_set_layout *layout); +void nvk_descriptor_set_layout_destroy(struct nvk_device *device, + struct nvk_descriptor_set_layout *layout); static inline struct nvk_descriptor_set_layout * -nvk_descriptor_set_layout_ref(struct nvk_descriptor_set_layout *layout) { - assert(layout && layout->ref_cnt >= 1); - p_atomic_inc(&layout->ref_cnt); - return layout; +nvk_descriptor_set_layout_ref(struct nvk_descriptor_set_layout *layout) +{ + assert(layout && layout->ref_cnt >= 1); + p_atomic_inc(&layout->ref_cnt); + return layout; } static inline void nvk_descriptor_set_layout_unref(struct nvk_device *device, - struct nvk_descriptor_set_layout *layout) { - assert(layout && layout->ref_cnt >= 1); - if (p_atomic_dec_zero(&layout->ref_cnt)) - nvk_descriptor_set_layout_destroy(device, layout); + struct nvk_descriptor_set_layout *layout) +{ + assert(layout && layout->ref_cnt >= 1); + if (p_atomic_dec_zero(&layout->ref_cnt)) + nvk_descriptor_set_layout_destroy(device, layout); } #endif -- 2.7.4