binding->pImmutableSamplers = NULL;
}
-bool
-zink_descriptor_util_push_layouts_get(struct zink_context *ctx, struct zink_descriptor_layout **dsls, struct zink_descriptor_layout_key **layout_keys)
+static VkDescriptorType
+get_push_types(struct zink_screen *screen, enum zink_descriptor_type *dsl_type)
+{
+ *dsl_type = screen->descriptor_mode == ZINK_DESCRIPTOR_MODE_LAZY &&
+ screen->info.have_KHR_push_descriptor ? ZINK_DESCRIPTOR_TYPES : ZINK_DESCRIPTOR_TYPE_UBO;
+ return screen->descriptor_mode == ZINK_DESCRIPTOR_MODE_LAZY ?
+ VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER : VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
+}
+
+static struct zink_descriptor_layout *
+create_gfx_layout(struct zink_context *ctx, struct zink_descriptor_layout_key **layout_key, bool fbfetch)
{
struct zink_screen *screen = zink_screen(ctx->base.screen);
VkDescriptorSetLayoutBinding bindings[PIPE_SHADER_TYPES];
- VkDescriptorSetLayoutBinding compute_binding;
- VkDescriptorType vktype = screen->descriptor_mode == ZINK_DESCRIPTOR_MODE_LAZY ?
- VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER : VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
+ enum zink_descriptor_type dsl_type;
+ VkDescriptorType vktype = get_push_types(screen, &dsl_type);
for (unsigned i = 0; i < ZINK_SHADER_COUNT; i++)
init_push_binding(&bindings[i], i, vktype);
+ if (fbfetch) {
+ bindings[ZINK_SHADER_COUNT].binding = ZINK_FBFETCH_BINDING;
+ bindings[ZINK_SHADER_COUNT].descriptorType = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT;
+ bindings[ZINK_SHADER_COUNT].descriptorCount = 1;
+ bindings[ZINK_SHADER_COUNT].stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
+ bindings[ZINK_SHADER_COUNT].pImmutableSamplers = NULL;
+ }
+ return create_layout(ctx, dsl_type, bindings, fbfetch ? ARRAY_SIZE(bindings) : ARRAY_SIZE(bindings) - 1, layout_key);
+}
+
+bool
+zink_descriptor_util_push_layouts_get(struct zink_context *ctx, struct zink_descriptor_layout **dsls, struct zink_descriptor_layout_key **layout_keys)
+{
+ struct zink_screen *screen = zink_screen(ctx->base.screen);
+ VkDescriptorSetLayoutBinding compute_binding;
+ enum zink_descriptor_type dsl_type;
+ VkDescriptorType vktype = get_push_types(screen, &dsl_type);
init_push_binding(&compute_binding, PIPE_SHADER_COMPUTE, vktype);
- /* fbfetch */
- bindings[ZINK_SHADER_COUNT].binding = ZINK_FBFETCH_BINDING;
- bindings[ZINK_SHADER_COUNT].descriptorType = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT;
- bindings[ZINK_SHADER_COUNT].descriptorCount = 1;
- bindings[ZINK_SHADER_COUNT].stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
- bindings[ZINK_SHADER_COUNT].pImmutableSamplers = NULL;
- enum zink_descriptor_type dsl_type = screen->descriptor_mode == ZINK_DESCRIPTOR_MODE_LAZY &&
- screen->info.have_KHR_push_descriptor ? ZINK_DESCRIPTOR_TYPES : ZINK_DESCRIPTOR_TYPE_UBO;
- dsls[0] = create_layout(ctx, dsl_type, bindings, ARRAY_SIZE(bindings), &layout_keys[0]);
+ dsls[0] = create_gfx_layout(ctx, &layout_keys[0], false);
dsls[1] = create_layout(ctx, dsl_type, &compute_binding, 1, &layout_keys[1]);
return dsls[0] && dsls[1];
}
sizes[0].descriptorCount = ZINK_SHADER_COUNT * ZINK_DEFAULT_MAX_DESCS;
sizes[1].type = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT;
sizes[1].descriptorCount = ZINK_DEFAULT_MAX_DESCS;
- ctx->dd->push_pool[0] = descriptor_pool_get(ctx, 0, ctx->dd->push_layout_keys[0], sizes, 2);
+ ctx->dd->push_pool[0] = descriptor_pool_get(ctx, 0, ctx->dd->push_layout_keys[0], sizes, ctx->dd->has_fbfetch ? 2 : 1);
sizes[0].descriptorCount = ZINK_DEFAULT_MAX_DESCS;
ctx->dd->push_pool[1] = descriptor_pool_get(ctx, 0, ctx->dd->push_layout_keys[1], sizes, 1);
return ctx->dd->push_pool[0] && ctx->dd->push_pool[1];
}
}
}
+
+
+void
+zink_descriptor_util_init_fbfetch(struct zink_context *ctx)
+{
+ if (ctx->dd->has_fbfetch)
+ return;
+
+ struct zink_screen *screen = zink_screen(ctx->base.screen);
+ vkDestroyDescriptorSetLayout(screen->dev, ctx->dd->push_dsl[0]->layout, NULL);
+ ralloc_free(ctx->dd->push_dsl[0]);
+ ralloc_free(ctx->dd->push_layout_keys[0]);
+ ctx->dd->push_dsl[0] = create_gfx_layout(ctx, &ctx->dd->push_layout_keys[0], true);
+ ctx->dd->has_fbfetch = true;
+ if (screen->descriptor_mode != ZINK_DESCRIPTOR_MODE_LAZY)
+ zink_descriptor_pool_init(ctx);
+}
struct zink_program *pg[2]; //gfx, compute
VkDescriptorSetLayout dsl[2][ZINK_DESCRIPTOR_TYPES];
unsigned push_usage[2];
+ bool has_fbfetch;
};
ALWAYS_INLINE static struct zink_descriptor_data_lazy *
stages = &((struct zink_compute_program*)pg)->shader;
else {
stages = ((struct zink_gfx_program*)pg)->shaders;
- if (stages[PIPE_SHADER_FRAGMENT]->nir->info.fs.uses_fbfetch_output)
+ if (stages[PIPE_SHADER_FRAGMENT]->nir->info.fs.uses_fbfetch_output) {
+ zink_descriptor_util_init_fbfetch(ctx);
push_count = 1;
+ }
}
if (!pg->dd)
/* number of descriptors in template */
unsigned wd_count[ZINK_DESCRIPTOR_TYPES + 1];
if (push_count)
- wd_count[0] = pg->is_compute ? 1 : (ZINK_SHADER_COUNT + 1);
+ wd_count[0] = pg->is_compute ? 1 : (ZINK_SHADER_COUNT + !!ctx->dd->has_fbfetch);
for (unsigned i = 0; i < ZINK_DESCRIPTOR_TYPES; i++)
wd_count[i + 1] = pg->dd->layout_key[i] ? pg->dd->layout_key[i]->num_descriptors : 0;
}
static struct zink_descriptor_pool *
-create_push_pool(struct zink_screen *screen, struct zink_batch_descriptor_data_lazy *bdd, bool is_compute)
+create_push_pool(struct zink_screen *screen, struct zink_batch_descriptor_data_lazy *bdd, bool is_compute, bool has_fbfetch)
{
struct zink_descriptor_pool *pool = rzalloc(bdd, struct zink_descriptor_pool);
VkDescriptorPoolSize sizes[2];
sizes[1].type = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT;
sizes[1].descriptorCount = ZINK_DEFAULT_MAX_DESCS;
}
- pool->pool = create_pool(screen, 1 + !is_compute, sizes, 0);
+ pool->pool = create_pool(screen, !is_compute && has_fbfetch ? 2 : 1, sizes, 0);
return pool;
}
{
struct zink_screen *screen = zink_screen(ctx->base.screen);
/* allocate up to $current * 10, e.g., 10 -> 100 or 100 -> 1000 */
- if (pool->set_idx == pool->sets_alloc) {
+ if (pool->set_idx == pool->sets_alloc || unlikely(ctx->dd->has_fbfetch != bdd->has_fbfetch)) {
unsigned sets_to_alloc = MIN2(MAX2(pool->sets_alloc * 10, 10), ZINK_DEFAULT_MAX_DESCS) - pool->sets_alloc;
- if (!sets_to_alloc) {
+ if (!sets_to_alloc || unlikely(ctx->dd->has_fbfetch != bdd->has_fbfetch)) {
/* overflowed pool: queue for deletion on next reset */
util_dynarray_append(&bdd->overflowed_pools, struct zink_descriptor_pool*, pool);
- bdd->push_pool[is_compute] = create_push_pool(screen, bdd, is_compute);
+ bdd->push_pool[is_compute] = create_push_pool(screen, bdd, is_compute, ctx->dd->has_fbfetch);
ctx->oom_flush = true;
return check_push_pool_alloc(ctx, bdd->push_pool[is_compute], bdd, is_compute);
}
}
util_dynarray_init(&bdd->overflowed_pools, bs->dd);
if (!screen->info.have_KHR_push_descriptor) {
- bdd->push_pool[0] = create_push_pool(screen, bdd, false);
- bdd->push_pool[1] = create_push_pool(screen, bdd, true);
+ bdd->push_pool[0] = create_push_pool(screen, bdd, false, false);
+ bdd->push_pool[1] = create_push_pool(screen, bdd, true, false);
}
return true;
}