}
cmd_buf_queue(cmd_buffer, cmd);
}
+
+void lvp_CmdPushDescriptorSetWithTemplateKHR(
+ VkCommandBuffer commandBuffer,
+ VkDescriptorUpdateTemplate descriptorUpdateTemplate,
+ VkPipelineLayout _layout,
+ uint32_t set,
+ const void* pData)
+{
+ LVP_FROM_HANDLE(lvp_cmd_buffer, cmd_buffer, commandBuffer);
+ LVP_FROM_HANDLE(lvp_descriptor_update_template, templ, descriptorUpdateTemplate);
+ int cmd_size = 0;
+ struct lvp_cmd_buffer_entry *cmd;
+
+ cmd_size += templ->entry_count * sizeof(struct lvp_write_descriptor);
+
+ int count_descriptors = 0;
+ for (unsigned i = 0; i < templ->entry_count; i++) {
+ VkDescriptorUpdateTemplateEntry *entry = &templ->entry[i];
+ count_descriptors += entry->descriptorCount;
+ }
+ cmd_size += count_descriptors * sizeof(union lvp_descriptor_info);
+
+ cmd = cmd_buf_entry_alloc_size(cmd_buffer, cmd_size, LVP_CMD_PUSH_DESCRIPTOR_SET);
+ if (!cmd)
+ return;
+
+ cmd->u.push_descriptor_set.bind_point = templ->bind_point;
+ cmd->u.push_descriptor_set.layout = templ->pipeline_layout;
+ cmd->u.push_descriptor_set.set = templ->set;
+ cmd->u.push_descriptor_set.descriptor_write_count = templ->entry_count;
+ cmd->u.push_descriptor_set.descriptors = (struct lvp_write_descriptor *)(cmd + 1);
+ cmd->u.push_descriptor_set.infos = (union lvp_descriptor_info *)(cmd->u.push_descriptor_set.descriptors + templ->entry_count);
+
+ unsigned descriptor_index = 0;
+
+ for (unsigned i = 0; i < templ->entry_count; i++) {
+ struct lvp_write_descriptor *desc = &cmd->u.push_descriptor_set.descriptors[i];
+ struct VkDescriptorUpdateTemplateEntry *entry = &templ->entry[i];
+ const uint8_t *pSrc = ((const uint8_t *) pData) + entry->offset;
+
+ /* dstSet is ignored */
+ desc->dst_binding = entry->dstBinding;
+ desc->dst_array_element = entry->dstArrayElement;
+ desc->descriptor_count = entry->descriptorCount;
+ desc->descriptor_type = entry->descriptorType;
+
+ for (unsigned j = 0; j < desc->descriptor_count; j++) {
+ union lvp_descriptor_info *info = &cmd->u.push_descriptor_set.infos[descriptor_index + j];
+ switch (desc->descriptor_type) {
+ case VK_DESCRIPTOR_TYPE_SAMPLER:
+ info->sampler = lvp_sampler_from_handle(*(VkSampler *)pSrc);
+ break;
+ case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: {
+ VkDescriptorImageInfo *image_info = (VkDescriptorImageInfo *)pSrc;
+ info->sampler = lvp_sampler_from_handle(image_info->sampler);
+ info->iview = lvp_image_view_from_handle(image_info->imageView);
+ info->image_layout = image_info->imageLayout;
+ break;
+ }
+ case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
+ case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
+ case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: {
+ VkDescriptorImageInfo *image_info = (VkDescriptorImageInfo *)pSrc;
+ info->iview = lvp_image_view_from_handle(image_info->imageView);
+ info->image_layout = image_info->imageLayout;
+ break;
+ }
+ case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
+ case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
+ info->buffer_view = lvp_buffer_view_from_handle(*(VkBufferView *)pSrc);
+ break;
+ case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
+ case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
+ case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
+ case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
+ default: {
+ VkDescriptorBufferInfo *buffer_info = (VkDescriptorBufferInfo *)pSrc;
+ info->buffer = lvp_buffer_from_handle(buffer_info->buffer);
+ info->offset = buffer_info->offset;
+ info->range = buffer_info->range;
+ break;
+ }
+ }
+ pSrc += entry->stride;
+ }
+ descriptor_index += desc->descriptor_count;
+ }
+ cmd_buf_queue(cmd_buffer, cmd);
+}
{
}
+
+VkResult lvp_CreateDescriptorUpdateTemplate(VkDevice _device,
+ const VkDescriptorUpdateTemplateCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkDescriptorUpdateTemplate *pDescriptorUpdateTemplate)
+{
+ LVP_FROM_HANDLE(lvp_device, device, _device);
+ LVP_FROM_HANDLE(lvp_descriptor_set_layout, set_layout, pCreateInfo->descriptorSetLayout);
+ const uint32_t entry_count = pCreateInfo->descriptorUpdateEntryCount;
+ const size_t size = sizeof(struct lvp_descriptor_update_template) +
+ sizeof(VkDescriptorUpdateTemplateEntry) * entry_count;
+
+ struct lvp_descriptor_update_template *templ;
+
+ templ = vk_alloc2(&device->vk.alloc, pAllocator, size, 8, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
+ if (!templ)
+ return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
+
+ vk_object_base_init(&device->vk, &templ->base,
+ VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE);
+
+ templ->type = pCreateInfo->templateType;
+ templ->descriptor_set_layout = set_layout;
+ templ->bind_point = pCreateInfo->pipelineBindPoint;
+ templ->set = pCreateInfo->set;
+ templ->pipeline_layout = lvp_pipeline_layout_from_handle(pCreateInfo->pipelineLayout);
+ templ->entry_count = entry_count;
+
+ VkDescriptorUpdateTemplateEntry *entries = (VkDescriptorUpdateTemplateEntry *)(templ + 1);
+ for (unsigned i = 0; i < entry_count; i++) {
+ entries[i] = pCreateInfo->pDescriptorUpdateEntries[i];
+ }
+
+ *pDescriptorUpdateTemplate = lvp_descriptor_update_template_to_handle(templ);
+ return VK_SUCCESS;
+}
+
+void lvp_DestroyDescriptorUpdateTemplate(VkDevice _device,
+ VkDescriptorUpdateTemplate descriptorUpdateTemplate,
+ const VkAllocationCallbacks *pAllocator)
+{
+ LVP_FROM_HANDLE(lvp_device, device, _device);
+ LVP_FROM_HANDLE(lvp_descriptor_update_template, templ, descriptorUpdateTemplate);
+
+ if (!templ)
+ return;
+
+ vk_object_base_finish(&templ->base);
+ vk_free2(&device->vk.alloc, pAllocator, templ);
+}
+
+void lvp_UpdateDescriptorSetWithTemplate(VkDevice _device,
+ VkDescriptorSet descriptorSet,
+ VkDescriptorUpdateTemplate descriptorUpdateTemplate,
+ const void *pData)
+{
+ LVP_FROM_HANDLE(lvp_descriptor_set, set, descriptorSet);
+ LVP_FROM_HANDLE(lvp_descriptor_update_template, templ, descriptorUpdateTemplate);
+ uint32_t i, j;
+
+ for (i = 0; i < templ->entry_count; ++i) {
+ VkDescriptorUpdateTemplateEntry *entry = &templ->entry[i];
+ const uint8_t *pSrc = ((const uint8_t *) pData) + entry->offset;
+ const struct lvp_descriptor_set_binding_layout *bind_layout =
+ &set->layout->binding[entry->dstBinding];
+ struct lvp_descriptor *desc =
+ &set->descriptors[bind_layout->descriptor_index];
+ for (j = 0; j < entry->descriptorCount; ++j) {
+ switch (entry->descriptorType) {
+ case VK_DESCRIPTOR_TYPE_SAMPLER: {
+ LVP_FROM_HANDLE(lvp_sampler, sampler,
+ *(VkSampler *)pSrc);
+ desc[j] = (struct lvp_descriptor) {
+ .type = VK_DESCRIPTOR_TYPE_SAMPLER,
+ .info.sampler = sampler,
+ };
+ break;
+ }
+ case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: {
+ VkDescriptorImageInfo *info = (VkDescriptorImageInfo *)pSrc;
+ desc[j] = (struct lvp_descriptor) {
+ .type = entry->descriptorType,
+ .info.iview = lvp_image_view_from_handle(info->imageView),
+ .info.sampler = lvp_sampler_from_handle(info->sampler),
+ };
+ break;
+ }
+ case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
+ case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
+ case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: {
+ LVP_FROM_HANDLE(lvp_image_view, iview,
+ ((VkDescriptorImageInfo *)pSrc)->imageView);
+ desc[j] = (struct lvp_descriptor) {
+ .type = entry->descriptorType,
+ .info.iview = iview,
+ };
+ break;
+ }
+ case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
+ case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: {
+ LVP_FROM_HANDLE(lvp_buffer_view, bview,
+ *(VkBufferView *)pSrc);
+ desc[j] = (struct lvp_descriptor) {
+ .type = entry->descriptorType,
+ .info.buffer_view = bview,
+ };
+ break;
+ }
+
+ case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
+ case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
+ case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
+ case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: {
+ VkDescriptorBufferInfo *info = (VkDescriptorBufferInfo *)pSrc;
+ desc[j] = (struct lvp_descriptor) {
+ .type = entry->descriptorType,
+ .info.offset = info->offset,
+ .info.buffer = lvp_buffer_from_handle(info->buffer),
+ .info.range = info->range,
+ };
+ break;
+ }
+ default:
+ break;
+ }
+ pSrc += entry->stride;
+ }
+ }
+}
Extension('VK_KHR_create_renderpass2', 1, False),
Extension('VK_KHR_dedicated_allocation', 1, True),
Extension('VK_KHR_depth_stencil_resolve', 1, False),
- Extension('VK_KHR_descriptor_update_template', 1, False),
+ Extension('VK_KHR_descriptor_update_template', 1, True),
Extension('VK_KHR_device_group', 1, False),
Extension('VK_KHR_device_group_creation', 1, False),
Extension('VK_KHR_draw_indirect_count', 1, True),
LVP_DEFINE_NONDISP_HANDLE_CASTS(lvp_descriptor_pool, VkDescriptorPool)
LVP_DEFINE_NONDISP_HANDLE_CASTS(lvp_descriptor_set, VkDescriptorSet)
LVP_DEFINE_NONDISP_HANDLE_CASTS(lvp_descriptor_set_layout, VkDescriptorSetLayout)
+LVP_DEFINE_NONDISP_HANDLE_CASTS(lvp_descriptor_update_template, VkDescriptorUpdateTemplate)
LVP_DEFINE_NONDISP_HANDLE_CASTS(lvp_device_memory, VkDeviceMemory)
LVP_DEFINE_NONDISP_HANDLE_CASTS(lvp_event, VkEvent)
LVP_DEFINE_NONDISP_HANDLE_CASTS(lvp_framebuffer, VkFramebuffer)
struct list_head sets;
};
+struct lvp_descriptor_update_template {
+ struct vk_object_base base;
+ uint32_t entry_count;
+ uint32_t set;
+ VkDescriptorUpdateTemplateType type;
+ struct lvp_descriptor_set_layout *descriptor_set_layout;
+ VkPipelineBindPoint bind_point;
+ struct lvp_pipeline_layout *pipeline_layout;
+ VkDescriptorUpdateTemplateEntry entry[0];
+};
+
VkResult
lvp_descriptor_set_create(struct lvp_device *device,
const struct lvp_descriptor_set_layout *layout,