struct intel_renderbuffer *irb =
intel_renderbuffer(fb->_ColorDrawBuffers[i]);
- if (irb) {
- brw_render_cache_set_add_bo(brw, irb->mt->bo);
- intel_miptree_used_for_rendering(brw, irb->mt);
- }
+ if (!irb)
+ continue;
+
+ brw_render_cache_set_add_bo(brw, irb->mt->bo);
+ intel_miptree_used_for_rendering(
+ brw, irb->mt, irb->mt_level, irb->mt_layer, irb->layer_count);
}
}
BLORP_HIZ_OP_DEPTH_RESOLVE);
}
+enum intel_fast_clear_state
+intel_miptree_get_fast_clear_state(const struct intel_mipmap_tree *mt,
+ unsigned level, unsigned layer)
+{
+ return mt->fast_clear_state;
+}
+
static void
intel_miptree_check_color_resolve(const struct intel_mipmap_tree *mt,
unsigned level, unsigned layer)
(void)layer;
}
+void
+intel_miptree_set_fast_clear_state(struct intel_mipmap_tree *mt,
+ unsigned level,
+ unsigned first_layer,
+ unsigned num_layers,
+ enum intel_fast_clear_state new_state)
+{
+ intel_miptree_check_color_resolve(mt, level, first_layer);
+
+ assert(first_layer + num_layers <= mt->physical_depth0);
+
+ mt->fast_clear_state = new_state;
+}
+
+void
+intel_miptree_used_for_rendering(const struct brw_context *brw,
+ struct intel_mipmap_tree *mt, unsigned level,
+ unsigned start_layer, unsigned num_layers)
+{
+ const bool is_lossless_compressed =
+ intel_miptree_is_lossless_compressed(brw, mt);
+
+ for (unsigned i = 0; i < num_layers; ++i) {
+ const enum intel_fast_clear_state fast_clear_state =
+ intel_miptree_get_fast_clear_state(mt, level, start_layer + i);
+
+ /* If the buffer was previously in fast clear state, change it to
+ * unresolved state, since it won't be guaranteed to be clear after
+ * rendering occurs.
+ */
+ if (is_lossless_compressed ||
+ fast_clear_state == INTEL_FAST_CLEAR_STATE_CLEAR) {
+ intel_miptree_set_fast_clear_state(
+ mt, level, start_layer + i, 1,
+ INTEL_FAST_CLEAR_STATE_UNRESOLVED);
+ }
+ }
+}
+
bool
intel_miptree_resolve_color(struct brw_context *brw,
struct intel_mipmap_tree *mt, unsigned level,
/**\}*/
+enum intel_fast_clear_state
+intel_miptree_get_fast_clear_state(const struct intel_mipmap_tree *mt,
+ unsigned level, unsigned layer);
+
+void
+intel_miptree_set_fast_clear_state(struct intel_mipmap_tree *mt,
+ unsigned level,
+ unsigned first_layer,
+ unsigned num_layers,
+ enum intel_fast_clear_state new_state);
+
/**
* Update the fast clear state for a miptree to indicate that it has been used
* for rendering.
*/
-static inline void
+void
intel_miptree_used_for_rendering(const struct brw_context *brw,
- struct intel_mipmap_tree *mt)
-{
- /* If the buffer was previously in fast clear state, change it to
- * unresolved state, since it won't be guaranteed to be clear after
- * rendering occurs.
- */
- if (mt->fast_clear_state == INTEL_FAST_CLEAR_STATE_CLEAR ||
- intel_miptree_is_lossless_compressed(brw, mt))
- mt->fast_clear_state = INTEL_FAST_CLEAR_STATE_UNRESOLVED;
-}
+ struct intel_mipmap_tree *mt, unsigned level,
+ unsigned start_layer, unsigned num_layers);
/**
* Flag values telling color resolve pass which special types of buffers