agx_batch_add_bo(batch, batch->encoder);
agx_batch_add_bo(batch, batch->scissor.bo);
+ agx_batch_add_bo(batch, batch->depth_bias.bo);
agx_batch_add_bo(batch, dev->internal.bo);
agx_batch_add_bo(batch, dev->reload.bo);
ctx->batch->encoder->ptr.gpu,
encoder_id,
ctx->batch->scissor.bo->ptr.gpu,
+ ctx->batch->depth_bias.bo->ptr.gpu,
pipeline_null.gpu,
pipeline_clear,
pipeline_store,
ctx->batch->encoder = agx_bo_create(agx_device(screen), 0x80000, AGX_MEMORY_TYPE_FRAMEBUFFER);
ctx->batch->encoder_current = ctx->batch->encoder->ptr.cpu;
ctx->batch->scissor.bo = agx_bo_create(agx_device(screen), 0x80000, AGX_MEMORY_TYPE_FRAMEBUFFER);
+ ctx->batch->depth_bias.bo = agx_bo_create(agx_device(screen), 0x80000, AGX_MEMORY_TYPE_FRAMEBUFFER);
/* Upload fixed shaders (TODO: compile them?) */
struct agx_context *ctx = agx_context(pctx);
struct agx_rasterizer *so = cso;
- /* Check if scissor state has changed, since scissor enable is part of the
- * rasterizer state but everything else needed for scissors is part of
- * viewport/scissor states */
- bool scissor_changed = (cso == NULL) || (ctx->rast == NULL) ||
- (ctx->rast->base.scissor != so->base.scissor);
+ /* Check if scissor or depth bias state has changed, since scissor/depth bias
+ * enable is part of the rasterizer state but everything else needed for
+ * scissors and depth bias is part of the scissor/depth bias arrays */
+ bool scissor_zbias_changed = (cso == NULL) || (ctx->rast == NULL) ||
+ (ctx->rast->base.scissor != so->base.scissor) ||
+ (ctx->rast->base.offset_tri != so->base.offset_tri);
ctx->rast = so;
- if (scissor_changed)
- ctx->dirty |= AGX_DIRTY_SCISSOR;
+ if (scissor_zbias_changed)
+ ctx->dirty |= AGX_DIRTY_SCISSOR_ZBIAS;
}
static enum agx_wrap
assert(num_scissors == 1 && "no geometry shaders");
ctx->scissor = *scissor;
- ctx->dirty |= AGX_DIRTY_SCISSOR;
+ ctx->dirty |= AGX_DIRTY_SCISSOR_ZBIAS;
}
static void
};
}
+static uint16_t
+agx_upload_depth_bias(struct agx_batch *batch,
+ const struct pipe_rasterizer_state *rast)
+{
+ struct agx_depth_bias_packed *ptr = batch->depth_bias.bo->ptr.cpu;
+ unsigned index = (batch->depth_bias.count++);
+
+ agx_pack(ptr + index, DEPTH_BIAS, cfg) {
+ cfg.depth_bias = rast->offset_units;
+ cfg.slope_scale = rast->offset_scale;
+ cfg.clamp = rast->offset_clamp;
+ }
+
+ return index;
+}
+
/* A framebuffer state can be reused across batches, so it doesn't make sense
* to add surfaces to the BO list here. Instead we added them when flushing.
*/
* optimize this out if the viewport is the default and the app does not
* use the scissor test) */
cfg.scissor_enable = true;
+
+ cfg.depth_bias_enable = rast->base.offset_tri;
};
/* Words 2-3: front */
}
static uint64_t
-agx_set_index(struct agx_pool *pool, unsigned scissor)
+agx_set_index(struct agx_pool *pool, uint16_t scissor, uint16_t zbias)
{
struct agx_ptr T = agx_pool_alloc_aligned(pool, AGX_SET_INDEX_LENGTH, 64);
agx_pack(T.cpu, SET_INDEX, cfg) {
cfg.scissor = scissor;
+ cfg.depth_bias = zbias;
};
return T.gpu;
agx_push_record(&out, 7, demo_rasterizer(ctx, pool, is_points));
agx_push_record(&out, 5, demo_unk11(pool, is_lines, is_points, reads_tib, sample_mask_from_shader));
- if (ctx->dirty & (AGX_DIRTY_VIEWPORT | AGX_DIRTY_SCISSOR)) {
+ unsigned zbias = 0;
+
+ if (ctx->rast->base.offset_tri) {
+ zbias = agx_upload_depth_bias(ctx->batch, &ctx->rast->base);
+ ctx->dirty |= AGX_DIRTY_SCISSOR_ZBIAS;
+ }
+
+ if (ctx->dirty & (AGX_DIRTY_VIEWPORT | AGX_DIRTY_SCISSOR_ZBIAS)) {
struct agx_viewport_scissor vps = agx_upload_viewport_scissor(pool,
ctx->batch, &ctx->viewport,
ctx->rast->base.scissor ? &ctx->scissor : NULL);
agx_push_record(&out, 10, vps.viewport);
- agx_push_record(&out, 2, agx_set_index(pool, vps.scissor));
+ agx_push_record(&out, 2, agx_set_index(pool, vps.scissor, zbias));
}
agx_push_record(&out, 3, demo_unk12(pool));