mai.pNext = NULL;
mai.allocationSize = reqs.size;
enum zink_heap heap = zink_heap_from_domain_flags(flags, aflags);
- mai.memoryTypeIndex = screen->heap_map[heap];
- if (unlikely(!(reqs.memoryTypeBits & BITFIELD_BIT(mai.memoryTypeIndex)))) {
+ mai.memoryTypeIndex = zink_heap_idx_from_bits(screen, heap, reqs.memoryTypeBits);
+ if (mai.memoryTypeIndex == UINT32_MAX) {
/* not valid based on reqs; demote to more compatible type */
switch (heap) {
case ZINK_HEAP_DEVICE_LOCAL_VISIBLE:
default:
break;
}
- mai.memoryTypeIndex = screen->heap_map[heap];
- assert(reqs.memoryTypeBits & BITFIELD_BIT(mai.memoryTypeIndex));
+ mai.memoryTypeIndex = zink_heap_idx_from_bits(screen, heap, reqs.memoryTypeBits);
+ assert(mai.memoryTypeIndex != UINT32_MAX);
}
+ assert(reqs.memoryTypeBits & BITFIELD_BIT(mai.memoryTypeIndex));
VkMemoryDedicatedAllocateInfo ded_alloc_info = {
.sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO,
};
unsigned size = UINT32_MAX;
for (unsigned i = 0; i < ARRAY_SIZE(heaps); i++) {
- unsigned heap_idx = screen->info.mem_props.memoryTypes[screen->heap_map[i]].heapIndex;
- size = MIN2(screen->info.mem_props.memoryHeaps[heap_idx].size, size);
+ for (unsigned j = 0; j < screen->heap_count[i]; j++) {
+ unsigned heap_idx = screen->info.mem_props.memoryTypes[screen->heap_map[i][j]].heapIndex;
+ size = MIN2(screen->info.mem_props.memoryHeaps[heap_idx].size, size);
+ }
}
return size;
}
for (unsigned j = 0; j < screen->info.mem_props.memoryTypeCount; j++) {
VkMemoryPropertyFlags domains = vk_domain_from_heap(i);
if ((screen->info.mem_props.memoryTypes[j].propertyFlags & domains) == domains) {
- assert(screen->heap_map[i] == UINT8_MAX);
- screen->heap_map[i] = j;
- break;
+ screen->heap_map[i][screen->heap_count[i]++] = j;
}
}
-
+ }
+ /* iterate again to check for missing heaps */
+ for (enum zink_heap i = 0; i < ZINK_HEAP_MAX; i++) {
/* not found: use compatible heap */
- if (screen->heap_map[i] == UINT8_MAX) {
+ if (screen->heap_map[i][0] == UINT8_MAX) {
/* only cached mem has a failure case for now */
assert(i == ZINK_HEAP_HOST_VISIBLE_CACHED || i == ZINK_HEAP_DEVICE_LOCAL_LAZY ||
i == ZINK_HEAP_DEVICE_LOCAL_VISIBLE);
- if (i == ZINK_HEAP_HOST_VISIBLE_CACHED)
- screen->heap_map[i] = screen->heap_map[ZINK_HEAP_HOST_VISIBLE_COHERENT];
- else
- screen->heap_map[i] = screen->heap_map[ZINK_HEAP_DEVICE_LOCAL];
+ if (i == ZINK_HEAP_HOST_VISIBLE_CACHED) {
+ memcpy(screen->heap_map[i], screen->heap_map[ZINK_HEAP_HOST_VISIBLE_COHERENT], screen->heap_count[ZINK_HEAP_HOST_VISIBLE_COHERENT]);
+ screen->heap_count[i] = screen->heap_count[ZINK_HEAP_HOST_VISIBLE_COHERENT];
+ } else {
+ memcpy(screen->heap_map[i], screen->heap_map[ZINK_HEAP_DEVICE_LOCAL], screen->heap_count[ZINK_HEAP_DEVICE_LOCAL]);
+ screen->heap_count[i] = screen->heap_count[ZINK_HEAP_DEVICE_LOCAL];
+ }
}
- screen->heap_flags[i] = screen->info.mem_props.memoryTypes[screen->heap_map[i]].propertyFlags;
}
{
- unsigned vis_vram = screen->heap_map[ZINK_HEAP_DEVICE_LOCAL_VISIBLE];
- unsigned vram = screen->heap_map[ZINK_HEAP_DEVICE_LOCAL];
+ uint64_t biggest_vis_vram = 0;
+ for (unsigned i = 0; i < screen->heap_count[ZINK_HEAP_DEVICE_LOCAL_VISIBLE]; i++)
+ biggest_vis_vram = MAX2(biggest_vis_vram, screen->info.mem_props.memoryHeaps[screen->info.mem_props.memoryTypes[i].heapIndex].size);
+ uint64_t biggest_vram = 0;
+ for (unsigned i = 0; i < screen->heap_count[ZINK_HEAP_DEVICE_LOCAL]; i++)
+ biggest_vram = MAX2(biggest_vis_vram, screen->info.mem_props.memoryHeaps[screen->info.mem_props.memoryTypes[i].heapIndex].size);
/* determine if vis vram is roughly equal to total vram */
- if (screen->info.mem_props.memoryHeaps[screen->info.mem_props.memoryTypes[vis_vram].heapIndex].size >
- screen->info.mem_props.memoryHeaps[screen->info.mem_props.memoryTypes[vram].heapIndex].size * 0.9)
+ if (biggest_vis_vram > biggest_vram * 0.9)
screen->resizable_bar = true;
}