lavapipe: VK_EXT_extended_dynamic_state support
authorDave Airlie <airlied@redhat.com>
Mon, 1 Mar 2021 05:52:07 +0000 (15:52 +1000)
committerMarge Bot <eric+marge@anholt.net>
Tue, 2 Mar 2021 01:30:19 +0000 (01:30 +0000)
zink can use this in some paths, and it's not a crazy thing to support
for lavapipe.

Reviewed-By: Mike Blumenkrantz <michael.blumenkrantz@gmail.com>
Part-of: <https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/9328>

src/gallium/frontends/lavapipe/lvp_cmd_buffer.c
src/gallium/frontends/lavapipe/lvp_device.c
src/gallium/frontends/lavapipe/lvp_execute.c
src/gallium/frontends/lavapipe/lvp_private.h

index ae5349c..aa85d27 100644 (file)
@@ -395,6 +395,7 @@ VKAPI_ATTR void VKAPI_CALL lvp_CmdBindVertexBuffers(
    }
    cmd->u.vertex_buffers.buffers = buffers;
    cmd->u.vertex_buffers.offsets = offsets;
+   cmd->u.vertex_buffers.strides = NULL;
 
    cmd_buf_queue(cmd_buffer, cmd);
 }
@@ -1829,3 +1830,234 @@ VKAPI_ATTR void VKAPI_CALL lvp_CmdEndConditionalRenderingEXT(
       return;
    cmd_buf_queue(cmd_buffer, cmd);
 }
+
+VKAPI_ATTR void VKAPI_CALL lvp_CmdSetCullModeEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkCullModeFlags                             cullMode)
+{
+   LVP_FROM_HANDLE(lvp_cmd_buffer, cmd_buffer, commandBuffer);
+   struct lvp_cmd_buffer_entry *cmd;
+
+   cmd = cmd_buf_entry_alloc(cmd_buffer, LVP_CMD_SET_CULL_MODE);
+   if (!cmd)
+      return;
+
+   cmd->u.set_cull_mode.cull_mode = cullMode;
+   cmd_buf_queue(cmd_buffer, cmd);
+}
+
+VKAPI_ATTR void VKAPI_CALL lvp_CmdSetFrontFaceEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkFrontFace                                 frontFace)
+{
+   LVP_FROM_HANDLE(lvp_cmd_buffer, cmd_buffer, commandBuffer);
+   struct lvp_cmd_buffer_entry *cmd;
+
+   cmd = cmd_buf_entry_alloc(cmd_buffer, LVP_CMD_SET_FRONT_FACE);
+   if (!cmd)
+      return;
+
+   cmd->u.set_front_face.front_face = frontFace;
+   cmd_buf_queue(cmd_buffer, cmd);
+}
+
+VKAPI_ATTR void VKAPI_CALL lvp_CmdSetPrimitiveTopologyEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkPrimitiveTopology                         primitiveTopology)
+{
+   LVP_FROM_HANDLE(lvp_cmd_buffer, cmd_buffer, commandBuffer);
+   struct lvp_cmd_buffer_entry *cmd;
+
+   cmd = cmd_buf_entry_alloc(cmd_buffer, LVP_CMD_SET_PRIMITIVE_TOPOLOGY);
+   if (!cmd)
+      return;
+
+   cmd->u.set_primitive_topology.prim = primitiveTopology;
+   cmd_buf_queue(cmd_buffer, cmd);
+}
+
+VKAPI_ATTR void VKAPI_CALL lvp_CmdSetViewportWithCountEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    viewportCount,
+    const VkViewport*                           pViewports)
+{
+   LVP_FROM_HANDLE(lvp_cmd_buffer, cmd_buffer, commandBuffer);
+   struct lvp_cmd_buffer_entry *cmd;
+   int i;
+
+   cmd = cmd_buf_entry_alloc(cmd_buffer, LVP_CMD_SET_VIEWPORT);
+   if (!cmd)
+      return;
+
+   cmd->u.set_viewport.first_viewport = UINT32_MAX;
+   cmd->u.set_viewport.viewport_count = viewportCount;
+   for (i = 0; i < viewportCount; i++)
+      cmd->u.set_viewport.viewports[i] = pViewports[i];
+
+   cmd_buf_queue(cmd_buffer, cmd);
+}
+
+VKAPI_ATTR void VKAPI_CALL lvp_CmdSetScissorWithCountEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    scissorCount,
+    const VkRect2D*                             pScissors)
+{
+   LVP_FROM_HANDLE(lvp_cmd_buffer, cmd_buffer, commandBuffer);
+   struct lvp_cmd_buffer_entry *cmd;
+   int i;
+
+   cmd = cmd_buf_entry_alloc(cmd_buffer, LVP_CMD_SET_SCISSOR);
+   if (!cmd)
+      return;
+
+   cmd->u.set_scissor.first_scissor = UINT32_MAX;
+   cmd->u.set_scissor.scissor_count = scissorCount;
+   for (i = 0; i < scissorCount; i++)
+      cmd->u.set_scissor.scissors[i] = pScissors[i];
+
+   cmd_buf_queue(cmd_buffer, cmd);
+}
+
+VKAPI_ATTR void VKAPI_CALL lvp_CmdBindVertexBuffers2EXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstBinding,
+    uint32_t                                    bindingCount,
+    const VkBuffer*                             pBuffers,
+    const VkDeviceSize*                         pOffsets,
+    const VkDeviceSize*                         pSizes,
+    const VkDeviceSize*                         pStrides)
+{
+   LVP_FROM_HANDLE(lvp_cmd_buffer, cmd_buffer, commandBuffer);
+   struct lvp_cmd_buffer_entry *cmd;
+   struct lvp_buffer **buffers;
+   VkDeviceSize *offsets;
+   VkDeviceSize *sizes;
+   VkDeviceSize *strides;
+   int i;
+   uint32_t cmd_size = bindingCount * sizeof(struct lvp_buffer *) + bindingCount * 3 * sizeof(VkDeviceSize);
+
+   cmd = cmd_buf_entry_alloc_size(cmd_buffer, cmd_size, LVP_CMD_BIND_VERTEX_BUFFERS);
+   if (!cmd)
+      return;
+
+   cmd->u.vertex_buffers.first = firstBinding;
+   cmd->u.vertex_buffers.binding_count = bindingCount;
+
+   buffers = (struct lvp_buffer **)(cmd + 1);
+   offsets = (VkDeviceSize *)(buffers + bindingCount);
+   sizes = (VkDeviceSize *)(offsets + bindingCount);
+   strides = (VkDeviceSize *)(sizes + bindingCount);
+   for (i = 0; i < bindingCount; i++) {
+      buffers[i] = lvp_buffer_from_handle(pBuffers[i]);
+      offsets[i] = pOffsets[i];
+      if (pSizes)
+         sizes[i] = pSizes[i];
+      else
+         sizes[i] = 0;
+      strides[i] = pStrides[i];
+   }
+   cmd->u.vertex_buffers.buffers = buffers;
+   cmd->u.vertex_buffers.offsets = offsets;
+   cmd->u.vertex_buffers.sizes = sizes;
+   cmd->u.vertex_buffers.strides = strides;
+
+   cmd_buf_queue(cmd_buffer, cmd);
+}
+
+VKAPI_ATTR void VKAPI_CALL lvp_CmdSetDepthTestEnableEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkBool32                                    depthTestEnable)
+{
+   LVP_FROM_HANDLE(lvp_cmd_buffer, cmd_buffer, commandBuffer);
+   struct lvp_cmd_buffer_entry *cmd;
+
+   cmd = cmd_buf_entry_alloc(cmd_buffer, LVP_CMD_SET_DEPTH_TEST_ENABLE);
+   if (!cmd)
+      return;
+
+   cmd->u.set_depth_test_enable.depth_test_enable = depthTestEnable;
+   cmd_buf_queue(cmd_buffer, cmd);
+}
+
+VKAPI_ATTR void VKAPI_CALL lvp_CmdSetDepthWriteEnableEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkBool32                                    depthWriteEnable)
+{
+   LVP_FROM_HANDLE(lvp_cmd_buffer, cmd_buffer, commandBuffer);
+   struct lvp_cmd_buffer_entry *cmd;
+
+   cmd = cmd_buf_entry_alloc(cmd_buffer, LVP_CMD_SET_DEPTH_WRITE_ENABLE);
+   if (!cmd)
+      return;
+
+   cmd->u.set_depth_write_enable.depth_write_enable = depthWriteEnable;
+   cmd_buf_queue(cmd_buffer, cmd);
+}
+
+VKAPI_ATTR void VKAPI_CALL lvp_CmdSetDepthCompareOpEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkCompareOp                                 depthCompareOp)
+{
+   LVP_FROM_HANDLE(lvp_cmd_buffer, cmd_buffer, commandBuffer);
+   struct lvp_cmd_buffer_entry *cmd;
+
+   cmd = cmd_buf_entry_alloc(cmd_buffer, LVP_CMD_SET_DEPTH_COMPARE_OP);
+   if (!cmd)
+      return;
+
+   cmd->u.set_depth_compare_op.depth_op = depthCompareOp;
+   cmd_buf_queue(cmd_buffer, cmd);
+}
+
+VKAPI_ATTR void VKAPI_CALL lvp_CmdSetDepthBoundsTestEnableEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkBool32                                    depthBoundsTestEnable)
+{
+   LVP_FROM_HANDLE(lvp_cmd_buffer, cmd_buffer, commandBuffer);
+   struct lvp_cmd_buffer_entry *cmd;
+
+   cmd = cmd_buf_entry_alloc(cmd_buffer, LVP_CMD_SET_DEPTH_BOUNDS_TEST_ENABLE);
+   if (!cmd)
+      return;
+
+   cmd->u.set_depth_bounds_test_enable.depth_bounds_test_enable = depthBoundsTestEnable;
+   cmd_buf_queue(cmd_buffer, cmd);
+}
+
+VKAPI_ATTR void VKAPI_CALL lvp_CmdSetStencilTestEnableEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkBool32                                    stencilTestEnable)
+{
+   LVP_FROM_HANDLE(lvp_cmd_buffer, cmd_buffer, commandBuffer);
+   struct lvp_cmd_buffer_entry *cmd;
+
+   cmd = cmd_buf_entry_alloc(cmd_buffer, LVP_CMD_SET_STENCIL_TEST_ENABLE);
+   if (!cmd)
+      return;
+
+   cmd->u.set_stencil_test_enable.stencil_test_enable = stencilTestEnable;
+   cmd_buf_queue(cmd_buffer, cmd);
+}
+
+VKAPI_ATTR void VKAPI_CALL lvp_CmdSetStencilOpEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkStencilFaceFlags                          faceMask,
+    VkStencilOp                                 failOp,
+    VkStencilOp                                 passOp,
+    VkStencilOp                                 depthFailOp,
+    VkCompareOp                                 compareOp)
+{
+   LVP_FROM_HANDLE(lvp_cmd_buffer, cmd_buffer, commandBuffer);
+   struct lvp_cmd_buffer_entry *cmd;
+
+   cmd = cmd_buf_entry_alloc(cmd_buffer, LVP_CMD_SET_STENCIL_OP);
+   if (!cmd)
+      return;
+
+   cmd->u.set_stencil_op.face_mask = faceMask;
+   cmd->u.set_stencil_op.fail_op = failOp;
+   cmd->u.set_stencil_op.pass_op = passOp;
+   cmd->u.set_stencil_op.depth_fail_op = depthFailOp;
+   cmd->u.set_stencil_op.compare_op = compareOp;
+   cmd_buf_queue(cmd_buffer, cmd);
+}
index 2158f97..160bb33 100644 (file)
@@ -115,6 +115,7 @@ static const struct vk_device_extension_table lvp_device_extensions_supported =
 #endif
    .EXT_calibrated_timestamps             = true,
    .EXT_conditional_rendering             = true,
+   .EXT_extended_dynamic_state            = true,
    .EXT_index_type_uint8                  = true,
    .EXT_post_depth_coverage               = true,
    .EXT_private_data                      = true,
@@ -484,6 +485,12 @@ VKAPI_ATTR void VKAPI_CALL lvp_GetPhysicalDeviceFeatures2(
          features->inheritedConditionalRendering = false;
          break;
       }
+      case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_FEATURES_EXT: {
+         VkPhysicalDeviceExtendedDynamicStateFeaturesEXT *features =
+            (VkPhysicalDeviceExtendedDynamicStateFeaturesEXT*)ext;
+         features->extendedDynamicState = true;
+         break;
+      }
       default:
          break;
       }
index 37aea2e..5535137 100644 (file)
@@ -352,11 +352,23 @@ get_viewport_xform(const VkViewport *viewport,
    translate[2] = n;
 }
 
+static int conv_dynamic_state_idx(VkDynamicState dyn_state)
+{
+   if (dyn_state <= VK_DYNAMIC_STATE_STENCIL_REFERENCE)
+      return dyn_state;
+
+   if (dyn_state >= VK_DYNAMIC_STATE_CULL_MODE_EXT &&
+       dyn_state <= VK_DYNAMIC_STATE_STENCIL_OP_EXT)
+      return dyn_state - VK_DYNAMIC_STATE_CULL_MODE_EXT + VK_DYNAMIC_STATE_STENCIL_REFERENCE + 1;
+   assert(0);
+   return -1;
+}
+
 static void handle_graphics_pipeline(struct lvp_cmd_buffer_entry *cmd,
                                      struct rendering_state *state)
 {
    struct lvp_pipeline *pipeline = cmd->u.pipeline.pipeline;
-   bool dynamic_states[VK_DYNAMIC_STATE_STENCIL_REFERENCE+1];
+   bool dynamic_states[VK_DYNAMIC_STATE_STENCIL_REFERENCE+13];
    unsigned fb_samples = 0;
 
    memset(dynamic_states, 0, sizeof(dynamic_states));
@@ -365,9 +377,10 @@ static void handle_graphics_pipeline(struct lvp_cmd_buffer_entry *cmd,
       const VkPipelineDynamicStateCreateInfo *dyn = pipeline->graphics_create_info.pDynamicState;
       int i;
       for (i = 0; i < dyn->dynamicStateCount; i++) {
-         if (dyn->pDynamicStates[i] > VK_DYNAMIC_STATE_STENCIL_REFERENCE)
+         int idx = conv_dynamic_state_idx(dyn->pDynamicStates[i]);
+         if (idx == -1)
             continue;
-         dynamic_states[dyn->pDynamicStates[i]] = true;
+         dynamic_states[idx] = true;
       }
    }
 
@@ -425,8 +438,8 @@ static void handle_graphics_pipeline(struct lvp_cmd_buffer_entry *cmd,
       const VkPipelineRasterizationStateCreateInfo *rsc = pipeline->graphics_create_info.pRasterizationState;
       state->rs_state.depth_clip_near = state->rs_state.depth_clip_far = !rsc->depthClampEnable;
       state->rs_state.rasterizer_discard = rsc->rasterizerDiscardEnable;
-      state->rs_state.front_ccw = (rsc->frontFace == VK_FRONT_FACE_COUNTER_CLOCKWISE);
-      state->rs_state.cull_face = vk_cull_to_pipe(rsc->cullMode);
+
+
       state->rs_state.fill_front = vk_polygon_mode_to_pipe(rsc->polygonMode);
       state->rs_state.fill_back = vk_polygon_mode_to_pipe(rsc->polygonMode);
       state->rs_state.point_size_per_vertex = true;
@@ -445,6 +458,12 @@ static void handle_graphics_pipeline(struct lvp_cmd_buffer_entry *cmd,
          state->rs_state.offset_scale = rsc->depthBiasSlopeFactor;
          state->rs_state.offset_clamp = rsc->depthBiasClamp;
       }
+
+      if (!dynamic_states[conv_dynamic_state_idx(VK_DYNAMIC_STATE_CULL_MODE_EXT)])
+         state->rs_state.cull_face = vk_cull_to_pipe(rsc->cullMode);
+
+      if (!dynamic_states[conv_dynamic_state_idx(VK_DYNAMIC_STATE_FRONT_FACE_EXT)])
+         state->rs_state.front_ccw = (rsc->frontFace == VK_FRONT_FACE_COUNTER_CLOCKWISE);
       state->rs_dirty = true;
    }
 
@@ -479,27 +498,36 @@ static void handle_graphics_pipeline(struct lvp_cmd_buffer_entry *cmd,
    if (pipeline->graphics_create_info.pDepthStencilState) {
       const VkPipelineDepthStencilStateCreateInfo *dsa = pipeline->graphics_create_info.pDepthStencilState;
 
-      state->dsa_state.depth_enabled = dsa->depthTestEnable;
-      state->dsa_state.depth_writemask = dsa->depthWriteEnable;
-      state->dsa_state.depth_func = dsa->depthCompareOp;
-      state->dsa_state.depth_bounds_test = dsa->depthBoundsTestEnable;
+      if (!dynamic_states[conv_dynamic_state_idx(VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE_EXT)])
+         state->dsa_state.depth_enabled = dsa->depthTestEnable;
+      if (!dynamic_states[conv_dynamic_state_idx(VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE_EXT)])
+         state->dsa_state.depth_writemask = dsa->depthWriteEnable;
+      if (!dynamic_states[conv_dynamic_state_idx(VK_DYNAMIC_STATE_DEPTH_COMPARE_OP_EXT)])
+         state->dsa_state.depth_func = dsa->depthCompareOp;
+      if (!dynamic_states[conv_dynamic_state_idx(VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE_EXT)])
+         state->dsa_state.depth_bounds_test = dsa->depthBoundsTestEnable;
 
       if (!dynamic_states[VK_DYNAMIC_STATE_DEPTH_BOUNDS]) {
          state->dsa_state.depth_bounds_min = dsa->minDepthBounds;
          state->dsa_state.depth_bounds_max = dsa->maxDepthBounds;
       }
 
-      state->dsa_state.stencil[0].enabled = dsa->stencilTestEnable;
-      state->dsa_state.stencil[0].func = dsa->front.compareOp;
-      state->dsa_state.stencil[0].fail_op = vk_conv_stencil_op(dsa->front.failOp);
-      state->dsa_state.stencil[0].zpass_op = vk_conv_stencil_op(dsa->front.passOp);
-      state->dsa_state.stencil[0].zfail_op = vk_conv_stencil_op(dsa->front.depthFailOp);
+      if (!dynamic_states[conv_dynamic_state_idx(VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE_EXT)]) {
+         state->dsa_state.stencil[0].enabled = dsa->stencilTestEnable;
+         state->dsa_state.stencil[1].enabled = dsa->stencilTestEnable;
+      }
 
-      state->dsa_state.stencil[1].enabled = dsa->stencilTestEnable;
-      state->dsa_state.stencil[1].func = dsa->back.compareOp;
-      state->dsa_state.stencil[1].fail_op = vk_conv_stencil_op(dsa->back.failOp);
-      state->dsa_state.stencil[1].zpass_op = vk_conv_stencil_op(dsa->back.passOp);
-      state->dsa_state.stencil[1].zfail_op = vk_conv_stencil_op(dsa->back.depthFailOp);
+      if (!dynamic_states[conv_dynamic_state_idx(VK_DYNAMIC_STATE_STENCIL_OP_EXT)]) {
+         state->dsa_state.stencil[0].func = dsa->front.compareOp;
+         state->dsa_state.stencil[0].fail_op = vk_conv_stencil_op(dsa->front.failOp);
+         state->dsa_state.stencil[0].zpass_op = vk_conv_stencil_op(dsa->front.passOp);
+         state->dsa_state.stencil[0].zfail_op = vk_conv_stencil_op(dsa->front.depthFailOp);
+
+         state->dsa_state.stencil[1].func = dsa->back.compareOp;
+         state->dsa_state.stencil[1].fail_op = vk_conv_stencil_op(dsa->back.failOp);
+         state->dsa_state.stencil[1].zpass_op = vk_conv_stencil_op(dsa->back.passOp);
+         state->dsa_state.stencil[1].zfail_op = vk_conv_stencil_op(dsa->back.depthFailOp);
+      }
 
       if (!dynamic_states[VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK]) {
          state->dsa_state.stencil[0].valuemask = dsa->front.compareMask;
@@ -573,8 +601,10 @@ static void handle_graphics_pipeline(struct lvp_cmd_buffer_entry *cmd,
          vk_find_struct_const(vi->pNext,
                               PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT);
 
-      for (i = 0; i < vi->vertexBindingDescriptionCount; i++) {
-         state->vb[i].stride = vi->pVertexBindingDescriptions[i].stride;
+      if (!dynamic_states[conv_dynamic_state_idx(VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT)]) {
+         for (i = 0; i < vi->vertexBindingDescriptionCount; i++) {
+            state->vb[i].stride = vi->pVertexBindingDescriptions[i].stride;
+         }
       }
 
       int max_location = -1;
@@ -617,7 +647,8 @@ static void handle_graphics_pipeline(struct lvp_cmd_buffer_entry *cmd,
    {
       const VkPipelineInputAssemblyStateCreateInfo *ia = pipeline->graphics_create_info.pInputAssemblyState;
 
-      state->info.mode = vk_conv_topology(ia->topology);
+      if (!dynamic_states[conv_dynamic_state_idx(VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT)])
+         state->info.mode = vk_conv_topology(ia->topology);
       state->info.primitive_restart = ia->primitiveRestartEnable;
    }
 
@@ -631,9 +662,15 @@ static void handle_graphics_pipeline(struct lvp_cmd_buffer_entry *cmd,
       const VkPipelineViewportStateCreateInfo *vpi= pipeline->graphics_create_info.pViewportState;
       int i;
 
-      state->num_viewports = vpi->viewportCount;
-      state->num_scissors = vpi->scissorCount;
-      state->vp_dirty = true;
+      if (!dynamic_states[conv_dynamic_state_idx(VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT)]) {
+         state->num_viewports = vpi->viewportCount;
+         state->vp_dirty = true;
+      }
+      if (!dynamic_states[conv_dynamic_state_idx(VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT)]) {
+         state->num_scissors = vpi->scissorCount;
+         state->scissor_dirty = true;
+      }
+
       if (!dynamic_states[VK_DYNAMIC_STATE_VIEWPORT]) {
          for (i = 0; i < vpi->viewportCount; i++)
             get_viewport_xform(&vpi->pViewports[i], state->viewports[i].scale, state->viewports[i].translate);
@@ -678,6 +715,10 @@ static void handle_vertex_buffers(struct lvp_cmd_buffer_entry *cmd,
 
       state->vb[idx].buffer_offset = vcb->offsets[i];
       state->vb[idx].buffer.resource = vcb->buffers[i]->bo;
+
+      if (vcb->strides) {
+         state->vb[idx].stride = vcb->strides[i];
+      }
    }
    if (vcb->first < state->start_vb)
       state->start_vb = vcb->first;
@@ -1461,9 +1502,14 @@ static void handle_set_viewport(struct lvp_cmd_buffer_entry *cmd,
                                 struct rendering_state *state)
 {
    int i;
+   unsigned base = 0;
+   if (cmd->u.set_viewport.first_viewport == UINT32_MAX)
+      state->num_viewports = cmd->u.set_viewport.viewport_count;
+   else
+      base = cmd->u.set_viewport.first_viewport;
 
    for (i = 0; i < cmd->u.set_viewport.viewport_count; i++) {
-      int idx = i + cmd->u.set_viewport.first_viewport;
+      int idx = i + base;
       const VkViewport *vp = &cmd->u.set_viewport.viewports[i];
       get_viewport_xform(vp, state->viewports[idx].scale, state->viewports[idx].translate);
    }
@@ -1474,9 +1520,14 @@ static void handle_set_scissor(struct lvp_cmd_buffer_entry *cmd,
                                struct rendering_state *state)
 {
    int i;
+   unsigned base = 0;
+   if (cmd->u.set_scissor.first_scissor == UINT32_MAX)
+      state->num_scissors = cmd->u.set_scissor.scissor_count;
+   else
+      base = cmd->u.set_scissor.first_scissor;
 
    for (i = 0; i < cmd->u.set_scissor.scissor_count; i++) {
-      int idx = i + cmd->u.set_scissor.first_scissor;
+      int idx = i + base;
       const VkRect2D *ss = &cmd->u.set_scissor.scissors[i];
       state->scissors[idx].minx = ss->offset.x;
       state->scissors[idx].miny = ss->offset.y;
@@ -2660,6 +2711,82 @@ static void handle_end_conditional_rendering(struct rendering_state *state)
    state->pctx->render_condition_mem(state->pctx, NULL, 0, false);
 }
 
+static void handle_set_cull_mode(struct lvp_cmd_buffer_entry *cmd,
+                                 struct rendering_state *state)
+{
+   state->rs_state.cull_face = vk_cull_to_pipe(cmd->u.set_cull_mode.cull_mode);
+   state->rs_dirty = true;
+}
+
+static void handle_set_front_face(struct lvp_cmd_buffer_entry *cmd,
+                                  struct rendering_state *state)
+{
+   state->rs_state.front_ccw = (cmd->u.set_front_face.front_face == VK_FRONT_FACE_COUNTER_CLOCKWISE);
+   state->rs_dirty = true;
+}
+
+static void handle_set_primitive_topology(struct lvp_cmd_buffer_entry *cmd,
+                                          struct rendering_state *state)
+{
+   state->info.mode = vk_conv_topology(cmd->u.set_primitive_topology.prim);
+}
+
+
+static void handle_set_depth_test_enable(struct lvp_cmd_buffer_entry *cmd,
+                                         struct rendering_state *state)
+{
+   state->dsa_state.depth_enabled = cmd->u.set_depth_test_enable.depth_test_enable;
+   state->dsa_dirty = true;
+}
+
+static void handle_set_depth_write_enable(struct lvp_cmd_buffer_entry *cmd,
+                                          struct rendering_state *state)
+{
+   state->dsa_state.depth_writemask = cmd->u.set_depth_write_enable.depth_write_enable;
+   state->dsa_dirty = true;
+}
+
+static void handle_set_depth_compare_op(struct lvp_cmd_buffer_entry *cmd,
+                                        struct rendering_state *state)
+{
+   state->dsa_state.depth_func = cmd->u.set_depth_compare_op.depth_op;
+   state->dsa_dirty = true;
+}
+
+static void handle_set_depth_bounds_test_enable(struct lvp_cmd_buffer_entry *cmd,
+                                                struct rendering_state *state)
+{
+   state->dsa_state.depth_bounds_test = cmd->u.set_depth_bounds_test_enable.depth_bounds_test_enable;
+   state->dsa_dirty = true;
+}
+
+static void handle_set_stencil_test_enable(struct lvp_cmd_buffer_entry *cmd,
+                                           struct rendering_state *state)
+{
+   state->dsa_state.stencil[0].enabled = cmd->u.set_stencil_test_enable.stencil_test_enable;
+   state->dsa_state.stencil[1].enabled = cmd->u.set_stencil_test_enable.stencil_test_enable;
+   state->dsa_dirty = true;
+}
+
+static void handle_set_stencil_op(struct lvp_cmd_buffer_entry *cmd,
+                                  struct rendering_state *state)
+{
+   if (cmd->u.set_stencil_op.face_mask & VK_STENCIL_FACE_FRONT_BIT) {
+      state->dsa_state.stencil[0].func = cmd->u.set_stencil_op.compare_op;
+      state->dsa_state.stencil[0].fail_op = vk_conv_stencil_op(cmd->u.set_stencil_op.fail_op);
+      state->dsa_state.stencil[0].zpass_op = vk_conv_stencil_op(cmd->u.set_stencil_op.pass_op);
+      state->dsa_state.stencil[0].zfail_op = vk_conv_stencil_op(cmd->u.set_stencil_op.depth_fail_op);
+   }
+
+   if (cmd->u.set_stencil_op.face_mask & VK_STENCIL_FACE_BACK_BIT) {
+      state->dsa_state.stencil[1].func = cmd->u.set_stencil_op.compare_op;
+      state->dsa_state.stencil[1].fail_op = vk_conv_stencil_op(cmd->u.set_stencil_op.fail_op);
+      state->dsa_state.stencil[1].zpass_op = vk_conv_stencil_op(cmd->u.set_stencil_op.pass_op);
+      state->dsa_state.stencil[1].zfail_op = vk_conv_stencil_op(cmd->u.set_stencil_op.depth_fail_op);
+   }
+   state->dsa_dirty = true;
+}
+
 static void lvp_execute_cmd_buffer(struct lvp_cmd_buffer *cmd_buffer,
                                    struct rendering_state *state)
 {
@@ -2833,6 +2960,33 @@ static void lvp_execute_cmd_buffer(struct lvp_cmd_buffer *cmd_buffer,
       case LVP_CMD_END_CONDITIONAL_RENDERING:
          handle_end_conditional_rendering(state);
          break;
+      case LVP_CMD_SET_CULL_MODE:
+         handle_set_cull_mode(cmd, state);
+         break;
+      case LVP_CMD_SET_FRONT_FACE:
+         handle_set_front_face(cmd, state);
+         break;
+      case LVP_CMD_SET_PRIMITIVE_TOPOLOGY:
+         handle_set_primitive_topology(cmd, state);
+         break;
+      case LVP_CMD_SET_DEPTH_TEST_ENABLE:
+         handle_set_depth_test_enable(cmd, state);
+         break;
+      case LVP_CMD_SET_DEPTH_WRITE_ENABLE:
+         handle_set_depth_write_enable(cmd, state);
+         break;
+      case LVP_CMD_SET_DEPTH_COMPARE_OP:
+         handle_set_depth_compare_op(cmd, state);
+         break;
+      case LVP_CMD_SET_DEPTH_BOUNDS_TEST_ENABLE:
+         handle_set_depth_bounds_test_enable(cmd, state);
+         break;
+      case LVP_CMD_SET_STENCIL_TEST_ENABLE:
+         handle_set_stencil_test_enable(cmd, state);
+         break;
+      case LVP_CMD_SET_STENCIL_OP:
+         handle_set_stencil_op(cmd, state);
+         break;
       }
    }
 }
index 57e5af3..cb9e9d3 100644 (file)
@@ -678,6 +678,15 @@ enum lvp_cmds {
    LVP_CMD_DRAW_INDIRECT_BYTE_COUNT,
    LVP_CMD_BEGIN_CONDITIONAL_RENDERING,
    LVP_CMD_END_CONDITIONAL_RENDERING,
+   LVP_CMD_SET_CULL_MODE,
+   LVP_CMD_SET_FRONT_FACE,
+   LVP_CMD_SET_PRIMITIVE_TOPOLOGY,
+   LVP_CMD_SET_DEPTH_TEST_ENABLE,
+   LVP_CMD_SET_DEPTH_WRITE_ENABLE,
+   LVP_CMD_SET_DEPTH_COMPARE_OP,
+   LVP_CMD_SET_DEPTH_BOUNDS_TEST_ENABLE,
+   LVP_CMD_SET_STENCIL_TEST_ENABLE,
+   LVP_CMD_SET_STENCIL_OP,
 };
 
 struct lvp_cmd_bind_pipeline {
@@ -742,6 +751,8 @@ struct lvp_cmd_bind_vertex_buffers {
    uint32_t binding_count;
    struct lvp_buffer **buffers;
    const VkDeviceSize *offsets;
+   const VkDeviceSize *sizes;
+   const VkDeviceSize *strides;
 };
 
 struct lvp_cmd_draw {
@@ -1007,6 +1018,46 @@ struct lvp_cmd_begin_conditional_rendering {
    bool inverted;
 };
 
+struct lvp_cmd_set_cull_mode {
+   VkCullModeFlags cull_mode;
+};
+
+struct lvp_cmd_set_front_face {
+   VkFrontFace front_face;
+};
+
+struct lvp_cmd_set_primitive_topology {
+   VkPrimitiveTopology prim;
+};
+
+struct lvp_cmd_set_depth_test_enable {
+   VkBool32 depth_test_enable;
+};
+
+struct lvp_cmd_set_depth_write_enable {
+   VkBool32 depth_write_enable;
+};
+
+struct lvp_cmd_set_depth_bounds_test_enable {
+   VkBool32 depth_bounds_test_enable;
+};
+
+struct lvp_cmd_set_depth_compare_op {
+   VkCompareOp depth_op;
+};
+
+struct lvp_cmd_set_stencil_test_enable {
+   VkBool32 stencil_test_enable;
+};
+
+struct lvp_cmd_set_stencil_op {
+   VkStencilFaceFlags face_mask;
+   VkStencilOp fail_op;
+   VkStencilOp pass_op;
+   VkStencilOp depth_fail_op;
+   VkCompareOp compare_op;
+};
+
 struct lvp_cmd_buffer_entry {
    struct list_head cmd_link;
    uint32_t cmd_type;
@@ -1054,6 +1105,15 @@ struct lvp_cmd_buffer_entry {
       struct lvp_cmd_end_transform_feedback end_transform_feedback;
       struct lvp_cmd_draw_indirect_byte_count draw_indirect_byte_count;
       struct lvp_cmd_begin_conditional_rendering begin_conditional_rendering;
+      struct lvp_cmd_set_cull_mode set_cull_mode;
+      struct lvp_cmd_set_front_face set_front_face;
+      struct lvp_cmd_set_primitive_topology set_primitive_topology;
+      struct lvp_cmd_set_depth_test_enable set_depth_test_enable;
+      struct lvp_cmd_set_depth_write_enable set_depth_write_enable;
+      struct lvp_cmd_set_depth_compare_op set_depth_compare_op;
+      struct lvp_cmd_set_depth_bounds_test_enable set_depth_bounds_test_enable;
+      struct lvp_cmd_set_stencil_test_enable set_stencil_test_enable;
+      struct lvp_cmd_set_stencil_op set_stencil_op;
    } u;
 };