}
cmd->u.vertex_buffers.buffers = buffers;
cmd->u.vertex_buffers.offsets = offsets;
+ cmd->u.vertex_buffers.strides = NULL;
cmd_buf_queue(cmd_buffer, cmd);
}
return;
cmd_buf_queue(cmd_buffer, cmd);
}
+
+VKAPI_ATTR void VKAPI_CALL lvp_CmdSetCullModeEXT(
+ VkCommandBuffer commandBuffer,
+ VkCullModeFlags cullMode)
+{
+ LVP_FROM_HANDLE(lvp_cmd_buffer, cmd_buffer, commandBuffer);
+ struct lvp_cmd_buffer_entry *cmd;
+
+ cmd = cmd_buf_entry_alloc(cmd_buffer, LVP_CMD_SET_CULL_MODE);
+ if (!cmd)
+ return;
+
+ cmd->u.set_cull_mode.cull_mode = cullMode;
+ cmd_buf_queue(cmd_buffer, cmd);
+}
+
+VKAPI_ATTR void VKAPI_CALL lvp_CmdSetFrontFaceEXT(
+ VkCommandBuffer commandBuffer,
+ VkFrontFace frontFace)
+{
+ LVP_FROM_HANDLE(lvp_cmd_buffer, cmd_buffer, commandBuffer);
+ struct lvp_cmd_buffer_entry *cmd;
+
+ cmd = cmd_buf_entry_alloc(cmd_buffer, LVP_CMD_SET_FRONT_FACE);
+ if (!cmd)
+ return;
+
+ cmd->u.set_front_face.front_face = frontFace;
+ cmd_buf_queue(cmd_buffer, cmd);
+}
+
+VKAPI_ATTR void VKAPI_CALL lvp_CmdSetPrimitiveTopologyEXT(
+ VkCommandBuffer commandBuffer,
+ VkPrimitiveTopology primitiveTopology)
+{
+ LVP_FROM_HANDLE(lvp_cmd_buffer, cmd_buffer, commandBuffer);
+ struct lvp_cmd_buffer_entry *cmd;
+
+ cmd = cmd_buf_entry_alloc(cmd_buffer, LVP_CMD_SET_PRIMITIVE_TOPOLOGY);
+ if (!cmd)
+ return;
+
+ cmd->u.set_primitive_topology.prim = primitiveTopology;
+ cmd_buf_queue(cmd_buffer, cmd);
+}
+
+VKAPI_ATTR void VKAPI_CALL lvp_CmdSetViewportWithCountEXT(
+ VkCommandBuffer commandBuffer,
+ uint32_t viewportCount,
+ const VkViewport* pViewports)
+{
+ LVP_FROM_HANDLE(lvp_cmd_buffer, cmd_buffer, commandBuffer);
+ struct lvp_cmd_buffer_entry *cmd;
+ int i;
+
+ cmd = cmd_buf_entry_alloc(cmd_buffer, LVP_CMD_SET_VIEWPORT);
+ if (!cmd)
+ return;
+
+ cmd->u.set_viewport.first_viewport = UINT32_MAX;
+ cmd->u.set_viewport.viewport_count = viewportCount;
+ for (i = 0; i < viewportCount; i++)
+ cmd->u.set_viewport.viewports[i] = pViewports[i];
+
+ cmd_buf_queue(cmd_buffer, cmd);
+}
+
+VKAPI_ATTR void VKAPI_CALL lvp_CmdSetScissorWithCountEXT(
+ VkCommandBuffer commandBuffer,
+ uint32_t scissorCount,
+ const VkRect2D* pScissors)
+{
+ LVP_FROM_HANDLE(lvp_cmd_buffer, cmd_buffer, commandBuffer);
+ struct lvp_cmd_buffer_entry *cmd;
+ int i;
+
+ cmd = cmd_buf_entry_alloc(cmd_buffer, LVP_CMD_SET_SCISSOR);
+ if (!cmd)
+ return;
+
+ cmd->u.set_scissor.first_scissor = UINT32_MAX;
+ cmd->u.set_scissor.scissor_count = scissorCount;
+ for (i = 0; i < scissorCount; i++)
+ cmd->u.set_scissor.scissors[i] = pScissors[i];
+
+ cmd_buf_queue(cmd_buffer, cmd);
+}
+
+VKAPI_ATTR void VKAPI_CALL lvp_CmdBindVertexBuffers2EXT(
+ VkCommandBuffer commandBuffer,
+ uint32_t firstBinding,
+ uint32_t bindingCount,
+ const VkBuffer* pBuffers,
+ const VkDeviceSize* pOffsets,
+ const VkDeviceSize* pSizes,
+ const VkDeviceSize* pStrides)
+{
+ LVP_FROM_HANDLE(lvp_cmd_buffer, cmd_buffer, commandBuffer);
+ struct lvp_cmd_buffer_entry *cmd;
+ struct lvp_buffer **buffers;
+ VkDeviceSize *offsets;
+ VkDeviceSize *sizes;
+ VkDeviceSize *strides;
+ int i;
+ uint32_t cmd_size = bindingCount * sizeof(struct lvp_buffer *) + bindingCount * 3 * sizeof(VkDeviceSize);
+
+ cmd = cmd_buf_entry_alloc_size(cmd_buffer, cmd_size, LVP_CMD_BIND_VERTEX_BUFFERS);
+ if (!cmd)
+ return;
+
+ cmd->u.vertex_buffers.first = firstBinding;
+ cmd->u.vertex_buffers.binding_count = bindingCount;
+
+ buffers = (struct lvp_buffer **)(cmd + 1);
+ offsets = (VkDeviceSize *)(buffers + bindingCount);
+ sizes = (VkDeviceSize *)(offsets + bindingCount);
+ strides = (VkDeviceSize *)(sizes + bindingCount);
+ for (i = 0; i < bindingCount; i++) {
+ buffers[i] = lvp_buffer_from_handle(pBuffers[i]);
+ offsets[i] = pOffsets[i];
+ if (pSizes)
+ sizes[i] = pSizes[i];
+ else
+ sizes[i] = 0;
+ strides[i] = pStrides[i];
+ }
+ cmd->u.vertex_buffers.buffers = buffers;
+ cmd->u.vertex_buffers.offsets = offsets;
+ cmd->u.vertex_buffers.sizes = sizes;
+ cmd->u.vertex_buffers.strides = strides;
+
+ cmd_buf_queue(cmd_buffer, cmd);
+}
+
+VKAPI_ATTR void VKAPI_CALL lvp_CmdSetDepthTestEnableEXT(
+ VkCommandBuffer commandBuffer,
+ VkBool32 depthTestEnable)
+{
+ LVP_FROM_HANDLE(lvp_cmd_buffer, cmd_buffer, commandBuffer);
+ struct lvp_cmd_buffer_entry *cmd;
+
+ cmd = cmd_buf_entry_alloc(cmd_buffer, LVP_CMD_SET_DEPTH_TEST_ENABLE);
+ if (!cmd)
+ return;
+
+ cmd->u.set_depth_test_enable.depth_test_enable = depthTestEnable;
+ cmd_buf_queue(cmd_buffer, cmd);
+}
+
+VKAPI_ATTR void VKAPI_CALL lvp_CmdSetDepthWriteEnableEXT(
+ VkCommandBuffer commandBuffer,
+ VkBool32 depthWriteEnable)
+{
+ LVP_FROM_HANDLE(lvp_cmd_buffer, cmd_buffer, commandBuffer);
+ struct lvp_cmd_buffer_entry *cmd;
+
+ cmd = cmd_buf_entry_alloc(cmd_buffer, LVP_CMD_SET_DEPTH_WRITE_ENABLE);
+ if (!cmd)
+ return;
+
+ cmd->u.set_depth_write_enable.depth_write_enable = depthWriteEnable;
+ cmd_buf_queue(cmd_buffer, cmd);
+}
+
+VKAPI_ATTR void VKAPI_CALL lvp_CmdSetDepthCompareOpEXT(
+ VkCommandBuffer commandBuffer,
+ VkCompareOp depthCompareOp)
+{
+ LVP_FROM_HANDLE(lvp_cmd_buffer, cmd_buffer, commandBuffer);
+ struct lvp_cmd_buffer_entry *cmd;
+
+ cmd = cmd_buf_entry_alloc(cmd_buffer, LVP_CMD_SET_DEPTH_COMPARE_OP);
+ if (!cmd)
+ return;
+
+ cmd->u.set_depth_compare_op.depth_op = depthCompareOp;
+ cmd_buf_queue(cmd_buffer, cmd);
+}
+
+VKAPI_ATTR void VKAPI_CALL lvp_CmdSetDepthBoundsTestEnableEXT(
+ VkCommandBuffer commandBuffer,
+ VkBool32 depthBoundsTestEnable)
+{
+ LVP_FROM_HANDLE(lvp_cmd_buffer, cmd_buffer, commandBuffer);
+ struct lvp_cmd_buffer_entry *cmd;
+
+ cmd = cmd_buf_entry_alloc(cmd_buffer, LVP_CMD_SET_DEPTH_BOUNDS_TEST_ENABLE);
+ if (!cmd)
+ return;
+
+ cmd->u.set_depth_bounds_test_enable.depth_bounds_test_enable = depthBoundsTestEnable;
+ cmd_buf_queue(cmd_buffer, cmd);
+}
+
+VKAPI_ATTR void VKAPI_CALL lvp_CmdSetStencilTestEnableEXT(
+ VkCommandBuffer commandBuffer,
+ VkBool32 stencilTestEnable)
+{
+ LVP_FROM_HANDLE(lvp_cmd_buffer, cmd_buffer, commandBuffer);
+ struct lvp_cmd_buffer_entry *cmd;
+
+ cmd = cmd_buf_entry_alloc(cmd_buffer, LVP_CMD_SET_STENCIL_TEST_ENABLE);
+ if (!cmd)
+ return;
+
+ cmd->u.set_stencil_test_enable.stencil_test_enable = stencilTestEnable;
+ cmd_buf_queue(cmd_buffer, cmd);
+}
+
+VKAPI_ATTR void VKAPI_CALL lvp_CmdSetStencilOpEXT(
+ VkCommandBuffer commandBuffer,
+ VkStencilFaceFlags faceMask,
+ VkStencilOp failOp,
+ VkStencilOp passOp,
+ VkStencilOp depthFailOp,
+ VkCompareOp compareOp)
+{
+ LVP_FROM_HANDLE(lvp_cmd_buffer, cmd_buffer, commandBuffer);
+ struct lvp_cmd_buffer_entry *cmd;
+
+ cmd = cmd_buf_entry_alloc(cmd_buffer, LVP_CMD_SET_STENCIL_OP);
+ if (!cmd)
+ return;
+
+ cmd->u.set_stencil_op.face_mask = faceMask;
+ cmd->u.set_stencil_op.fail_op = failOp;
+ cmd->u.set_stencil_op.pass_op = passOp;
+ cmd->u.set_stencil_op.depth_fail_op = depthFailOp;
+ cmd->u.set_stencil_op.compare_op = compareOp;
+ cmd_buf_queue(cmd_buffer, cmd);
+}
#endif
.EXT_calibrated_timestamps = true,
.EXT_conditional_rendering = true,
+ .EXT_extended_dynamic_state = true,
.EXT_index_type_uint8 = true,
.EXT_post_depth_coverage = true,
.EXT_private_data = true,
features->inheritedConditionalRendering = false;
break;
}
+ case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_FEATURES_EXT: {
+ VkPhysicalDeviceExtendedDynamicStateFeaturesEXT *features =
+ (VkPhysicalDeviceExtendedDynamicStateFeaturesEXT*)ext;
+ features->extendedDynamicState = true;
+ break;
+ }
default:
break;
}
translate[2] = n;
}
+static int conv_dynamic_state_idx(VkDynamicState dyn_state)
+{
+ if (dyn_state <= VK_DYNAMIC_STATE_STENCIL_REFERENCE)
+ return dyn_state;
+
+ if (dyn_state >= VK_DYNAMIC_STATE_CULL_MODE_EXT &&
+ dyn_state <= VK_DYNAMIC_STATE_STENCIL_OP_EXT)
+ return dyn_state - VK_DYNAMIC_STATE_CULL_MODE_EXT + VK_DYNAMIC_STATE_STENCIL_REFERENCE + 1;
+ assert(0);
+ return -1;
+}
+
static void handle_graphics_pipeline(struct lvp_cmd_buffer_entry *cmd,
struct rendering_state *state)
{
struct lvp_pipeline *pipeline = cmd->u.pipeline.pipeline;
- bool dynamic_states[VK_DYNAMIC_STATE_STENCIL_REFERENCE+1];
+ bool dynamic_states[VK_DYNAMIC_STATE_STENCIL_REFERENCE+13];
unsigned fb_samples = 0;
memset(dynamic_states, 0, sizeof(dynamic_states));
const VkPipelineDynamicStateCreateInfo *dyn = pipeline->graphics_create_info.pDynamicState;
int i;
for (i = 0; i < dyn->dynamicStateCount; i++) {
- if (dyn->pDynamicStates[i] > VK_DYNAMIC_STATE_STENCIL_REFERENCE)
+ int idx = conv_dynamic_state_idx(dyn->pDynamicStates[i]);
+ if (idx == -1)
continue;
- dynamic_states[dyn->pDynamicStates[i]] = true;
+ dynamic_states[idx] = true;
}
}
const VkPipelineRasterizationStateCreateInfo *rsc = pipeline->graphics_create_info.pRasterizationState;
state->rs_state.depth_clip_near = state->rs_state.depth_clip_far = !rsc->depthClampEnable;
state->rs_state.rasterizer_discard = rsc->rasterizerDiscardEnable;
- state->rs_state.front_ccw = (rsc->frontFace == VK_FRONT_FACE_COUNTER_CLOCKWISE);
- state->rs_state.cull_face = vk_cull_to_pipe(rsc->cullMode);
+
+
state->rs_state.fill_front = vk_polygon_mode_to_pipe(rsc->polygonMode);
state->rs_state.fill_back = vk_polygon_mode_to_pipe(rsc->polygonMode);
state->rs_state.point_size_per_vertex = true;
state->rs_state.offset_scale = rsc->depthBiasSlopeFactor;
state->rs_state.offset_clamp = rsc->depthBiasClamp;
}
+
+ if (!dynamic_states[conv_dynamic_state_idx(VK_DYNAMIC_STATE_CULL_MODE_EXT)])
+ state->rs_state.cull_face = vk_cull_to_pipe(rsc->cullMode);
+
+ if (!dynamic_states[conv_dynamic_state_idx(VK_DYNAMIC_STATE_FRONT_FACE_EXT)])
+ state->rs_state.front_ccw = (rsc->frontFace == VK_FRONT_FACE_COUNTER_CLOCKWISE);
state->rs_dirty = true;
}
if (pipeline->graphics_create_info.pDepthStencilState) {
const VkPipelineDepthStencilStateCreateInfo *dsa = pipeline->graphics_create_info.pDepthStencilState;
- state->dsa_state.depth_enabled = dsa->depthTestEnable;
- state->dsa_state.depth_writemask = dsa->depthWriteEnable;
- state->dsa_state.depth_func = dsa->depthCompareOp;
- state->dsa_state.depth_bounds_test = dsa->depthBoundsTestEnable;
+ if (!dynamic_states[conv_dynamic_state_idx(VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE_EXT)])
+ state->dsa_state.depth_enabled = dsa->depthTestEnable;
+ if (!dynamic_states[conv_dynamic_state_idx(VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE_EXT)])
+ state->dsa_state.depth_writemask = dsa->depthWriteEnable;
+ if (!dynamic_states[conv_dynamic_state_idx(VK_DYNAMIC_STATE_DEPTH_COMPARE_OP_EXT)])
+ state->dsa_state.depth_func = dsa->depthCompareOp;
+ if (!dynamic_states[conv_dynamic_state_idx(VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE_EXT)])
+ state->dsa_state.depth_bounds_test = dsa->depthBoundsTestEnable;
if (!dynamic_states[VK_DYNAMIC_STATE_DEPTH_BOUNDS]) {
state->dsa_state.depth_bounds_min = dsa->minDepthBounds;
state->dsa_state.depth_bounds_max = dsa->maxDepthBounds;
}
- state->dsa_state.stencil[0].enabled = dsa->stencilTestEnable;
- state->dsa_state.stencil[0].func = dsa->front.compareOp;
- state->dsa_state.stencil[0].fail_op = vk_conv_stencil_op(dsa->front.failOp);
- state->dsa_state.stencil[0].zpass_op = vk_conv_stencil_op(dsa->front.passOp);
- state->dsa_state.stencil[0].zfail_op = vk_conv_stencil_op(dsa->front.depthFailOp);
+ if (!dynamic_states[conv_dynamic_state_idx(VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE_EXT)]) {
+ state->dsa_state.stencil[0].enabled = dsa->stencilTestEnable;
+ state->dsa_state.stencil[1].enabled = dsa->stencilTestEnable;
+ }
- state->dsa_state.stencil[1].enabled = dsa->stencilTestEnable;
- state->dsa_state.stencil[1].func = dsa->back.compareOp;
- state->dsa_state.stencil[1].fail_op = vk_conv_stencil_op(dsa->back.failOp);
- state->dsa_state.stencil[1].zpass_op = vk_conv_stencil_op(dsa->back.passOp);
- state->dsa_state.stencil[1].zfail_op = vk_conv_stencil_op(dsa->back.depthFailOp);
+ if (!dynamic_states[conv_dynamic_state_idx(VK_DYNAMIC_STATE_STENCIL_OP_EXT)]) {
+ state->dsa_state.stencil[0].func = dsa->front.compareOp;
+ state->dsa_state.stencil[0].fail_op = vk_conv_stencil_op(dsa->front.failOp);
+ state->dsa_state.stencil[0].zpass_op = vk_conv_stencil_op(dsa->front.passOp);
+ state->dsa_state.stencil[0].zfail_op = vk_conv_stencil_op(dsa->front.depthFailOp);
+
+ state->dsa_state.stencil[1].func = dsa->back.compareOp;
+ state->dsa_state.stencil[1].fail_op = vk_conv_stencil_op(dsa->back.failOp);
+ state->dsa_state.stencil[1].zpass_op = vk_conv_stencil_op(dsa->back.passOp);
+ state->dsa_state.stencil[1].zfail_op = vk_conv_stencil_op(dsa->back.depthFailOp);
+ }
if (!dynamic_states[VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK]) {
state->dsa_state.stencil[0].valuemask = dsa->front.compareMask;
vk_find_struct_const(vi->pNext,
PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT);
- for (i = 0; i < vi->vertexBindingDescriptionCount; i++) {
- state->vb[i].stride = vi->pVertexBindingDescriptions[i].stride;
+ if (!dynamic_states[conv_dynamic_state_idx(VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT)]) {
+ for (i = 0; i < vi->vertexBindingDescriptionCount; i++) {
+ state->vb[i].stride = vi->pVertexBindingDescriptions[i].stride;
+ }
}
int max_location = -1;
{
const VkPipelineInputAssemblyStateCreateInfo *ia = pipeline->graphics_create_info.pInputAssemblyState;
- state->info.mode = vk_conv_topology(ia->topology);
+ if (!dynamic_states[conv_dynamic_state_idx(VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT)])
+ state->info.mode = vk_conv_topology(ia->topology);
state->info.primitive_restart = ia->primitiveRestartEnable;
}
const VkPipelineViewportStateCreateInfo *vpi= pipeline->graphics_create_info.pViewportState;
int i;
- state->num_viewports = vpi->viewportCount;
- state->num_scissors = vpi->scissorCount;
- state->vp_dirty = true;
+ if (!dynamic_states[conv_dynamic_state_idx(VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT)]) {
+ state->num_viewports = vpi->viewportCount;
+ state->vp_dirty = true;
+ }
+ if (!dynamic_states[conv_dynamic_state_idx(VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT)]) {
+ state->num_scissors = vpi->scissorCount;
+ state->scissor_dirty = true;
+ }
+
if (!dynamic_states[VK_DYNAMIC_STATE_VIEWPORT]) {
for (i = 0; i < vpi->viewportCount; i++)
get_viewport_xform(&vpi->pViewports[i], state->viewports[i].scale, state->viewports[i].translate);
state->vb[idx].buffer_offset = vcb->offsets[i];
state->vb[idx].buffer.resource = vcb->buffers[i]->bo;
+
+ if (vcb->strides) {
+ state->vb[idx].stride = vcb->strides[i];
+ }
}
if (vcb->first < state->start_vb)
state->start_vb = vcb->first;
struct rendering_state *state)
{
int i;
+ unsigned base = 0;
+ if (cmd->u.set_viewport.first_viewport == UINT32_MAX)
+ state->num_viewports = cmd->u.set_viewport.viewport_count;
+ else
+ base = cmd->u.set_viewport.first_viewport;
for (i = 0; i < cmd->u.set_viewport.viewport_count; i++) {
- int idx = i + cmd->u.set_viewport.first_viewport;
+ int idx = i + base;
const VkViewport *vp = &cmd->u.set_viewport.viewports[i];
get_viewport_xform(vp, state->viewports[idx].scale, state->viewports[idx].translate);
}
struct rendering_state *state)
{
int i;
+ unsigned base = 0;
+ if (cmd->u.set_scissor.first_scissor == UINT32_MAX)
+ state->num_scissors = cmd->u.set_scissor.scissor_count;
+ else
+ base = cmd->u.set_scissor.first_scissor;
for (i = 0; i < cmd->u.set_scissor.scissor_count; i++) {
- int idx = i + cmd->u.set_scissor.first_scissor;
+ int idx = i + base;
const VkRect2D *ss = &cmd->u.set_scissor.scissors[i];
state->scissors[idx].minx = ss->offset.x;
state->scissors[idx].miny = ss->offset.y;
state->pctx->render_condition_mem(state->pctx, NULL, 0, false);
}
+static void handle_set_cull_mode(struct lvp_cmd_buffer_entry *cmd,
+ struct rendering_state *state)
+{
+ state->rs_state.cull_face = vk_cull_to_pipe(cmd->u.set_cull_mode.cull_mode);
+ state->rs_dirty = true;
+}
+
+static void handle_set_front_face(struct lvp_cmd_buffer_entry *cmd,
+ struct rendering_state *state)
+{
+ state->rs_state.front_ccw = (cmd->u.set_front_face.front_face == VK_FRONT_FACE_COUNTER_CLOCKWISE);
+ state->rs_dirty = true;
+}
+
+static void handle_set_primitive_topology(struct lvp_cmd_buffer_entry *cmd,
+ struct rendering_state *state)
+{
+ state->info.mode = vk_conv_topology(cmd->u.set_primitive_topology.prim);
+}
+
+
+static void handle_set_depth_test_enable(struct lvp_cmd_buffer_entry *cmd,
+ struct rendering_state *state)
+{
+ state->dsa_state.depth_enabled = cmd->u.set_depth_test_enable.depth_test_enable;
+ state->dsa_dirty = true;
+}
+
+static void handle_set_depth_write_enable(struct lvp_cmd_buffer_entry *cmd,
+ struct rendering_state *state)
+{
+ state->dsa_state.depth_writemask = cmd->u.set_depth_write_enable.depth_write_enable;
+ state->dsa_dirty = true;
+}
+
+static void handle_set_depth_compare_op(struct lvp_cmd_buffer_entry *cmd,
+ struct rendering_state *state)
+{
+ state->dsa_state.depth_func = cmd->u.set_depth_compare_op.depth_op;
+ state->dsa_dirty = true;
+}
+
+static void handle_set_depth_bounds_test_enable(struct lvp_cmd_buffer_entry *cmd,
+ struct rendering_state *state)
+{
+ state->dsa_state.depth_bounds_test = cmd->u.set_depth_bounds_test_enable.depth_bounds_test_enable;
+ state->dsa_dirty = true;
+}
+
+static void handle_set_stencil_test_enable(struct lvp_cmd_buffer_entry *cmd,
+ struct rendering_state *state)
+{
+ state->dsa_state.stencil[0].enabled = cmd->u.set_stencil_test_enable.stencil_test_enable;
+ state->dsa_state.stencil[1].enabled = cmd->u.set_stencil_test_enable.stencil_test_enable;
+ state->dsa_dirty = true;
+}
+
+static void handle_set_stencil_op(struct lvp_cmd_buffer_entry *cmd,
+ struct rendering_state *state)
+{
+ if (cmd->u.set_stencil_op.face_mask & VK_STENCIL_FACE_FRONT_BIT) {
+ state->dsa_state.stencil[0].func = cmd->u.set_stencil_op.compare_op;
+ state->dsa_state.stencil[0].fail_op = vk_conv_stencil_op(cmd->u.set_stencil_op.fail_op);
+ state->dsa_state.stencil[0].zpass_op = vk_conv_stencil_op(cmd->u.set_stencil_op.pass_op);
+ state->dsa_state.stencil[0].zfail_op = vk_conv_stencil_op(cmd->u.set_stencil_op.depth_fail_op);
+ }
+
+ if (cmd->u.set_stencil_op.face_mask & VK_STENCIL_FACE_BACK_BIT) {
+ state->dsa_state.stencil[1].func = cmd->u.set_stencil_op.compare_op;
+ state->dsa_state.stencil[1].fail_op = vk_conv_stencil_op(cmd->u.set_stencil_op.fail_op);
+ state->dsa_state.stencil[1].zpass_op = vk_conv_stencil_op(cmd->u.set_stencil_op.pass_op);
+ state->dsa_state.stencil[1].zfail_op = vk_conv_stencil_op(cmd->u.set_stencil_op.depth_fail_op);
+ }
+ state->dsa_dirty = true;
+}
+
static void lvp_execute_cmd_buffer(struct lvp_cmd_buffer *cmd_buffer,
struct rendering_state *state)
{
case LVP_CMD_END_CONDITIONAL_RENDERING:
handle_end_conditional_rendering(state);
break;
+ case LVP_CMD_SET_CULL_MODE:
+ handle_set_cull_mode(cmd, state);
+ break;
+ case LVP_CMD_SET_FRONT_FACE:
+ handle_set_front_face(cmd, state);
+ break;
+ case LVP_CMD_SET_PRIMITIVE_TOPOLOGY:
+ handle_set_primitive_topology(cmd, state);
+ break;
+ case LVP_CMD_SET_DEPTH_TEST_ENABLE:
+ handle_set_depth_test_enable(cmd, state);
+ break;
+ case LVP_CMD_SET_DEPTH_WRITE_ENABLE:
+ handle_set_depth_write_enable(cmd, state);
+ break;
+ case LVP_CMD_SET_DEPTH_COMPARE_OP:
+ handle_set_depth_compare_op(cmd, state);
+ break;
+ case LVP_CMD_SET_DEPTH_BOUNDS_TEST_ENABLE:
+ handle_set_depth_bounds_test_enable(cmd, state);
+ break;
+ case LVP_CMD_SET_STENCIL_TEST_ENABLE:
+ handle_set_stencil_test_enable(cmd, state);
+ break;
+ case LVP_CMD_SET_STENCIL_OP:
+ handle_set_stencil_op(cmd, state);
+ break;
}
}
}
LVP_CMD_DRAW_INDIRECT_BYTE_COUNT,
LVP_CMD_BEGIN_CONDITIONAL_RENDERING,
LVP_CMD_END_CONDITIONAL_RENDERING,
+ LVP_CMD_SET_CULL_MODE,
+ LVP_CMD_SET_FRONT_FACE,
+ LVP_CMD_SET_PRIMITIVE_TOPOLOGY,
+ LVP_CMD_SET_DEPTH_TEST_ENABLE,
+ LVP_CMD_SET_DEPTH_WRITE_ENABLE,
+ LVP_CMD_SET_DEPTH_COMPARE_OP,
+ LVP_CMD_SET_DEPTH_BOUNDS_TEST_ENABLE,
+ LVP_CMD_SET_STENCIL_TEST_ENABLE,
+ LVP_CMD_SET_STENCIL_OP,
};
struct lvp_cmd_bind_pipeline {
uint32_t binding_count;
struct lvp_buffer **buffers;
const VkDeviceSize *offsets;
+ const VkDeviceSize *sizes;
+ const VkDeviceSize *strides;
};
struct lvp_cmd_draw {
bool inverted;
};
+struct lvp_cmd_set_cull_mode {
+ VkCullModeFlags cull_mode;
+};
+
+struct lvp_cmd_set_front_face {
+ VkFrontFace front_face;
+};
+
+struct lvp_cmd_set_primitive_topology {
+ VkPrimitiveTopology prim;
+};
+
+struct lvp_cmd_set_depth_test_enable {
+ VkBool32 depth_test_enable;
+};
+
+struct lvp_cmd_set_depth_write_enable {
+ VkBool32 depth_write_enable;
+};
+
+struct lvp_cmd_set_depth_bounds_test_enable {
+ VkBool32 depth_bounds_test_enable;
+};
+
+struct lvp_cmd_set_depth_compare_op {
+ VkCompareOp depth_op;
+};
+
+struct lvp_cmd_set_stencil_test_enable {
+ VkBool32 stencil_test_enable;
+};
+
+struct lvp_cmd_set_stencil_op {
+ VkStencilFaceFlags face_mask;
+ VkStencilOp fail_op;
+ VkStencilOp pass_op;
+ VkStencilOp depth_fail_op;
+ VkCompareOp compare_op;
+};
+
struct lvp_cmd_buffer_entry {
struct list_head cmd_link;
uint32_t cmd_type;
struct lvp_cmd_end_transform_feedback end_transform_feedback;
struct lvp_cmd_draw_indirect_byte_count draw_indirect_byte_count;
struct lvp_cmd_begin_conditional_rendering begin_conditional_rendering;
+ struct lvp_cmd_set_cull_mode set_cull_mode;
+ struct lvp_cmd_set_front_face set_front_face;
+ struct lvp_cmd_set_primitive_topology set_primitive_topology;
+ struct lvp_cmd_set_depth_test_enable set_depth_test_enable;
+ struct lvp_cmd_set_depth_write_enable set_depth_write_enable;
+ struct lvp_cmd_set_depth_compare_op set_depth_compare_op;
+ struct lvp_cmd_set_depth_bounds_test_enable set_depth_bounds_test_enable;
+ struct lvp_cmd_set_stencil_test_enable set_stencil_test_enable;
+ struct lvp_cmd_set_stencil_op set_stencil_op;
} u;
};