.logic_op_enable = 0u,
.stippled_line_enable = 0u,
.alpha_to_coverage_enable = 0u,
+ .sample_mask = 0u,
};
static void
RADV_CMP_COPY(alpha_to_coverage_enable, RADV_DYNAMIC_ALPHA_TO_COVERAGE_ENABLE);
+ RADV_CMP_COPY(sample_mask, RADV_DYNAMIC_SAMPLE_MASK);
+
#undef RADV_CMP_COPY
cmd_buffer->state.dirty |= dest_mask;
}
static void
+radv_emit_sample_mask(struct radv_cmd_buffer *cmd_buffer)
+{
+ struct radv_dynamic_state *d = &cmd_buffer->state.dynamic;
+
+ radeon_set_context_reg_seq(cmd_buffer->cs, R_028C38_PA_SC_AA_MASK_X0Y0_X1Y0, 2);
+ radeon_emit(cmd_buffer->cs, d->sample_mask | ((uint32_t)d->sample_mask << 16));
+ radeon_emit(cmd_buffer->cs, d->sample_mask | ((uint32_t)d->sample_mask << 16));
+}
+
+static void
radv_cmd_buffer_flush_dynamic_state(struct radv_cmd_buffer *cmd_buffer, bool pipeline_is_dirty)
{
uint64_t states =
if (states & RADV_CMD_DIRTY_DYNAMIC_ALPHA_TO_COVERAGE_ENABLE)
radv_emit_alpha_to_coverage_enable(cmd_buffer);
+ if (states & RADV_CMD_DIRTY_DYNAMIC_SAMPLE_MASK)
+ radv_emit_sample_mask(cmd_buffer);
+
cmd_buffer->state.dirty &= ~states;
}
}
VKAPI_ATTR void VKAPI_CALL
+radv_CmdSetSampleMaskEXT(VkCommandBuffer commandBuffer, VkSampleCountFlagBits samples,
+ const VkSampleMask *pSampleMask)
+{
+ RADV_FROM_HANDLE(radv_cmd_buffer, cmd_buffer, commandBuffer);
+ struct radv_cmd_state *state = &cmd_buffer->state;
+
+ state->dynamic.sample_mask = pSampleMask[0] & 0xffff;
+
+ state->dirty |= RADV_CMD_DIRTY_DYNAMIC_SAMPLE_MASK;
+}
+
+VKAPI_ATTR void VKAPI_CALL
radv_CmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBufferCount,
const VkCommandBuffer *pCmdBuffers)
{
unsigned num_tile_pipes = pdevice->rad_info.num_tile_pipes;
const VkConservativeRasterizationModeEXT mode = state->rs->conservative_mode;
bool out_of_order_rast = false;
- uint32_t sample_mask = 0xffff;
int ps_iter_samples = 1;
ms->num_samples = state->ms ? state->ms->rasterization_samples : 1;
if (ps_iter_samples > 1)
pipeline->spi_baryc_cntl |= S_0286E0_POS_FLOAT_LOCATION(2);
}
-
- if (state->ms) {
- sample_mask = state->ms->sample_mask & 0xffff;
- }
-
- ms->pa_sc_aa_mask[0] = sample_mask | ((uint32_t)sample_mask << 16);
- ms->pa_sc_aa_mask[1] = sample_mask | ((uint32_t)sample_mask << 16);
}
static void
dynamic->alpha_to_coverage_enable = state->ms->alpha_to_coverage_enable;
}
+ if (states & RADV_DYNAMIC_SAMPLE_MASK) {
+ dynamic->sample_mask = state->ms->sample_mask & 0xffff;
+ }
+
pipeline->dynamic_state.mask = states;
}
const struct radv_physical_device *pdevice = pipeline->base.device->physical_device;
const struct radv_multisample_state *ms = &pipeline->ms;
- radeon_set_context_reg_seq(ctx_cs, R_028C38_PA_SC_AA_MASK_X0Y0_X1Y0, 2);
- radeon_emit(ctx_cs, ms->pa_sc_aa_mask[0]);
- radeon_emit(ctx_cs, ms->pa_sc_aa_mask[1]);
-
radeon_set_context_reg(ctx_cs, R_028804_DB_EQAA, ms->db_eqaa);
radeon_set_context_reg(ctx_cs, R_028BE0_PA_SC_AA_CONFIG, ms->pa_sc_aa_config);
bool stippled_line_enable;
bool alpha_to_coverage_enable;
+
+ uint16_t sample_mask;
};
extern const struct radv_dynamic_state default_dynamic_state;
uint32_t pa_sc_mode_cntl_0;
uint32_t pa_sc_mode_cntl_1;
uint32_t pa_sc_aa_config;
- uint32_t pa_sc_aa_mask[2];
unsigned num_samples;
};