ctx->gfx_pipeline_state.patch_vertices = patch_vertices;
}
+void
+zink_update_fbfetch(struct zink_context *ctx)
+{
+ const bool had_fbfetch = ctx->di.fbfetch.imageLayout == VK_IMAGE_LAYOUT_GENERAL;
+ if (!ctx->gfx_stages[PIPE_SHADER_FRAGMENT] ||
+ !ctx->gfx_stages[PIPE_SHADER_FRAGMENT]->nir->info.fs.uses_fbfetch_output) {
+ if (!had_fbfetch)
+ return;
+ ctx->di.fbfetch.imageLayout = VK_IMAGE_LAYOUT_UNDEFINED;
+ ctx->di.fbfetch.imageView = zink_screen(ctx->base.screen)->info.rb2_feats.nullDescriptor ?
+ VK_NULL_HANDLE :
+ zink_surface(ctx->dummy_surface)->image_view;
+ return;
+ }
+
+ if (ctx->fb_state.cbufs[0]) {
+ VkImageView fbfetch = zink_surface(ctx->fb_state.cbufs[0])->image_view;
+ ctx->di.fbfetch.imageView = zink_surface(ctx->fb_state.cbufs[0])->image_view;
+ }
+ ctx->di.fbfetch.imageLayout = VK_IMAGE_LAYOUT_GENERAL;
+}
+
static uint32_t
hash_render_pass_state(const void *key)
{
unsigned h = ctx->fb_state.height;
util_copy_framebuffer_state(&ctx->fb_state, state);
+ zink_update_fbfetch(ctx);
unsigned prev_void_alpha_attachments = ctx->gfx_pipeline_state.void_alpha_attachments;
ctx->gfx_pipeline_state.void_alpha_attachments = 0;
for (int i = 0; i < ctx->fb_state.nr_cbufs; i++) {
update_descriptor_state_image(ctx, i, j);
}
}
+ if (!screen->info.rb2_feats.nullDescriptor)
+ ctx->di.fbfetch.imageView = zink_surface(ctx->dummy_surface)->image_view;
p_atomic_inc(&screen->base.num_contexts);
zink_select_draw_vbo(ctx);
struct zink_framebuffer_clear fb_clears[PIPE_MAX_COLOR_BUFS + 1];
uint16_t clears_enabled;
uint16_t rp_clears_enabled;
+ uint16_t fbfetch_outputs;
VkBuffer vbufs[PIPE_MAX_ATTRIBS];
unsigned vbuf_offsets[PIPE_MAX_ATTRIBS];
VkBufferView texel_images[PIPE_SHADER_TYPES][PIPE_MAX_SHADER_IMAGES];
uint8_t num_images[PIPE_SHADER_TYPES];
+ VkDescriptorImageInfo fbfetch;
+
struct zink_resource *descriptor_res[ZINK_DESCRIPTOR_TYPES][PIPE_SHADER_TYPES][PIPE_MAX_SAMPLERS];
struct zink_descriptor_surface sampler_surfaces[PIPE_SHADER_TYPES][PIPE_MAX_SAMPLERS];
struct zink_descriptor_surface image_surfaces[PIPE_SHADER_TYPES][PIPE_MAX_SHADER_IMAGES];
void
zink_flush_queue(struct zink_context *ctx);
-
+void
+zink_update_fbfetch(struct zink_context *ctx);
bool
zink_resource_access_is_write(VkAccessFlags flags);
zink_bind_fs_state(struct pipe_context *pctx,
void *cso)
{
- bind_stage(zink_context(pctx), PIPE_SHADER_FRAGMENT, cso);
+ struct zink_context *ctx = zink_context(pctx);
+ bind_stage(ctx, PIPE_SHADER_FRAGMENT, cso);
+ ctx->fbfetch_outputs = 0;
+ if (cso) {
+ nir_shader *nir = ctx->gfx_stages[PIPE_SHADER_FRAGMENT]->nir;
+ if (nir->info.fs.uses_fbfetch_output) {
+ nir_foreach_shader_out_variable(var, ctx->gfx_stages[PIPE_SHADER_FRAGMENT]->nir) {
+ if (var->data.fb_fetch_output)
+ ctx->fbfetch_outputs |= BITFIELD_BIT(var->data.location - FRAG_RESULT_DATA0);
+ }
+ }
+ }
+ zink_update_fbfetch(ctx);
}
static void