.bo = pool->bo,
.offset = set->desc_mem.offset,
};
+ set->desc_offset = anv_address_physical(set->desc_addr) -
+ device->physical->va.internal_surface_state_pool.addr;
enum isl_format format =
anv_isl_format_for_descriptor_type(device,
}
struct anv_address bind_addr = anv_address_add(buffer->address, offset);
- uint64_t bind_range = vk_buffer_range(&buffer->vk, offset, range);
+ desc->bind_range = vk_buffer_range(&buffer->vk, offset, range);
/* We report a bounds checking alignment of 32B for the sake of block
* messages which read an entire register worth at a time.
*/
if (type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER ||
type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC)
- bind_range = align64(bind_range, ANV_UBO_ALIGNMENT);
+ desc->bind_range = align64(desc->bind_range, ANV_UBO_ALIGNMENT);
if (data & ANV_DESCRIPTOR_INDIRECT_ADDRESS_RANGE) {
struct anv_address_range_descriptor desc_data = {
.address = anv_address_physical(bind_addr),
- .range = bind_range,
+ .range = desc->bind_range,
};
memcpy(desc_map, &desc_data, sizeof(desc_data));
}
.address = anv_address_physical(bind_addr),
.mocs = isl_mocs(&device->isl_dev, usage,
bind_addr.bo && bind_addr.bo->is_external),
- .size_B = desc->range,
+ .size_B = desc->bind_range,
.format = format,
.swizzle = ISL_SWIZZLE_IDENTITY,
.stride_B = 1);
desc->set_buffer_view = bview;
- bview->range = bind_range;
+ bview->range = desc->bind_range;
bview->address = bind_addr;
if (set->is_push)
return surface_state;
}
+static uint32_t
+emit_indirect_descriptor_binding_table_entry(struct anv_cmd_buffer *cmd_buffer,
+ struct anv_cmd_pipeline_state *pipe_state,
+ struct anv_pipeline_binding *binding,
+ const struct anv_descriptor *desc)
+{
+ struct anv_device *device = cmd_buffer->device;
+ struct anv_state surface_state;
+
+ /* Relative offset in the STATE_BASE_ADDRESS::SurfaceStateBaseAddress heap.
+ * Depending on where the descriptor surface state is allocated, they can
+ * either come from device->internal_surface_state_pool or
+ * device->bindless_surface_state_pool.
+ */
+ switch (desc->type) {
+ case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
+ case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
+ case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: {
+ if (desc->image_view) {
+ struct anv_surface_state sstate =
+ (desc->layout == VK_IMAGE_LAYOUT_GENERAL) ?
+ desc->image_view->planes[binding->plane].general_sampler :
+ desc->image_view->planes[binding->plane].optimal_sampler;
+ surface_state =
+ anv_bindless_state_for_binding_table(device, sstate.state);
+ assert(surface_state.alloc_size);
+ } else {
+ surface_state = anv_null_surface_state_for_binding_table(device);
+ }
+ break;
+ }
+
+ case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE: {
+ if (desc->image_view) {
+ struct anv_surface_state sstate =
+ desc->image_view->planes[binding->plane].storage;
+ surface_state = anv_bindless_state_for_binding_table(
+ device, sstate.state);
+ assert(surface_state.alloc_size);
+ } else {
+ surface_state =
+ anv_null_surface_state_for_binding_table(device);
+ }
+ break;
+ }
+
+ case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
+ case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
+ if (desc->set_buffer_view) {
+ surface_state = desc->set_buffer_view->general.state;
+ assert(surface_state.alloc_size);
+ } else {
+ surface_state = anv_null_surface_state_for_binding_table(device);
+ }
+ break;
+
+ case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
+ if (desc->buffer_view) {
+ surface_state = anv_bindless_state_for_binding_table(
+ device,
+ desc->buffer_view->general.state);
+ assert(surface_state.alloc_size);
+ } else {
+ surface_state = anv_null_surface_state_for_binding_table(device);
+ }
+ break;
+
+ case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
+ case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: {
+ if (desc->buffer) {
+ surface_state =
+ emit_dynamic_buffer_binding_table_entry(cmd_buffer, pipe_state,
+ binding, desc);
+ } else {
+ surface_state = anv_null_surface_state_for_binding_table(device);
+ }
+ break;
+ }
+
+ case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
+ if (desc->buffer_view) {
+ surface_state = anv_bindless_state_for_binding_table(
+ device, desc->buffer_view->storage.state);
+ assert(surface_state.alloc_size);
+ } else {
+ surface_state = anv_null_surface_state_for_binding_table(device);
+ }
+ break;
+
+ default:
+ unreachable("Invalid descriptor type");
+ }
+
+ assert(surface_state.map);
+ return surface_state.offset;
+}
+
+static uint32_t
+emit_direct_descriptor_binding_table_entry(struct anv_cmd_buffer *cmd_buffer,
+ struct anv_cmd_pipeline_state *pipe_state,
+ const struct anv_descriptor_set *set,
+ struct anv_pipeline_binding *binding,
+ const struct anv_descriptor *desc)
+{
+ struct anv_device *device = cmd_buffer->device;
+ uint32_t desc_offset;
+
+ /* Relative offset in the STATE_BASE_ADDRESS::SurfaceStateBaseAddress heap.
+ * Depending on where the descriptor surface state is allocated, they can
+ * either come from device->internal_surface_state_pool or
+ * device->bindless_surface_state_pool.
+ */
+ switch (desc->type) {
+ case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
+ case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
+ case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
+ case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
+ case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
+ case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
+ case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
+ case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
+ desc_offset = set->desc_offset + binding->set_offset;
+ break;
+
+ case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
+ case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: {
+ struct anv_state state = desc->buffer ?
+ emit_dynamic_buffer_binding_table_entry(cmd_buffer, pipe_state,
+ binding, desc) :
+ anv_null_surface_state_for_binding_table(device);
+ desc_offset = state.offset;
+ break;
+ }
+
+ default:
+ unreachable("Invalid descriptor type");
+ }
+
+ return desc_offset;
+}
+
static VkResult
emit_binding_table(struct anv_cmd_buffer *cmd_buffer,
struct anv_cmd_pipeline_state *pipe_state,
}
const struct anv_descriptor *desc = &set->descriptors[binding->index];
- /* Relative offset in the STATE_BASE_ADDRESS::SurfaceStateBaseAddress
- * heap. Depending on where the descriptor surface state is
- * allocated, they can either come from
- * device->internal_surface_state_pool or
- * device->bindless_surface_state_pool.
- */
- switch (desc->type) {
- case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR:
- case VK_DESCRIPTOR_TYPE_SAMPLER:
+ if (desc->type == VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR ||
+ desc->type == VK_DESCRIPTOR_TYPE_SAMPLER) {
/* Nothing for us to do here */
continue;
-
- case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
- case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
- case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: {
- if (desc->image_view) {
- struct anv_surface_state sstate =
- (desc->layout == VK_IMAGE_LAYOUT_GENERAL) ?
- desc->image_view->planes[binding->plane].general_sampler :
- desc->image_view->planes[binding->plane].optimal_sampler;
- surface_state =
- anv_bindless_state_for_binding_table(cmd_buffer->device, sstate.state);
- assert(surface_state.alloc_size);
- } else {
- surface_state =
- anv_null_surface_state_for_binding_table(cmd_buffer->device);
- }
- break;
- }
-
- case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE: {
- if (desc->image_view) {
- struct anv_surface_state sstate =
- desc->image_view->planes[binding->plane].storage;
- surface_state = anv_bindless_state_for_binding_table(
- cmd_buffer->device, sstate.state);
- assert(surface_state.alloc_size);
- } else {
- surface_state =
- anv_null_surface_state_for_binding_table(cmd_buffer->device);
- }
- break;
- }
-
- case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
- case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
- if (desc->set_buffer_view) {
- surface_state = desc->set_buffer_view->general.state;
- assert(surface_state.alloc_size);
- } else {
- surface_state =
- anv_null_surface_state_for_binding_table(cmd_buffer->device);
- }
- break;
-
- case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
- if (desc->buffer_view) {
- surface_state = anv_bindless_state_for_binding_table(
- cmd_buffer->device,
- desc->buffer_view->general.state);
- assert(surface_state.alloc_size);
- } else {
- surface_state =
- anv_null_surface_state_for_binding_table(cmd_buffer->device);
- }
- break;
-
- case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
- case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: {
- if (desc->buffer) {
- surface_state =
- emit_dynamic_buffer_binding_table_entry(cmd_buffer,
- pipe_state,
- binding, desc);
- } else {
- surface_state =
- anv_null_surface_state_for_binding_table(cmd_buffer->device);
- }
- break;
}
- case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
- if (desc->buffer_view) {
- surface_state = anv_bindless_state_for_binding_table(
- cmd_buffer->device,
- desc->buffer_view->storage.state);
- assert(surface_state.alloc_size);
- } else {
- surface_state =
- anv_null_surface_state_for_binding_table(cmd_buffer->device);
- }
- break;
-
- default:
- assert(!"Invalid descriptor type");
- continue;
+ const struct anv_pipeline *pipeline = pipe_state->pipeline;
+ uint32_t surface_state_offset;
+ if (pipeline->layout.type == ANV_PIPELINE_DESCRIPTOR_SET_LAYOUT_TYPE_INDIRECT) {
+ surface_state_offset =
+ emit_indirect_descriptor_binding_table_entry(cmd_buffer,
+ pipe_state,
+ binding, desc);
+ } else {
+ surface_state_offset =
+ emit_direct_descriptor_binding_table_entry(cmd_buffer, pipe_state,
+ set, binding, desc);
}
- assert(surface_state.map);
- bt_map[s] = surface_state.offset + state_offset;
+ bt_map[s] = surface_state_offset + state_offset;
break;
}
}
&set->descriptors[range->index];
if (desc->type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER) {
- if (desc->buffer_view)
- return desc->buffer_view->address;
+ if (desc->buffer) {
+ return anv_address_add(desc->buffer->address,
+ desc->offset);
+ }
} else {
assert(desc->type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC);
if (desc->buffer) {
/* Here we promote a UBO to a binding table entry so that we can avoid a layer of indirection.
* We use the descriptor set's internally allocated surface state to fill the binding table entry.
*/
- if (!desc->set_buffer_view)
+ if (!desc->buffer)
return 0;
- if (range->start * 32 > desc->set_buffer_view->range)
+ if (range->start * 32 > desc->bind_range)
return 0;
- return desc->set_buffer_view->range;
+ return desc->bind_range;
} else {
if (!desc->buffer)
return 0;