return true;
}
-static bool
-lower_ray_query_globals(nir_builder *b, nir_intrinsic_instr *intrin,
- struct apply_pipeline_layout_state *state)
-{
- b->cursor = nir_instr_remove(&intrin->instr);
-
- nir_ssa_def *rq_globals =
- nir_load_push_constant(b, 1, 64, nir_imm_int(b, 0),
- .base = offsetof(struct anv_push_constants, ray_query_globals),
- .range = sizeof_field(struct anv_push_constants, ray_query_globals));
- nir_ssa_def_rewrite_uses(&intrin->dest.ssa, rq_globals);
-
- return true;
-}
-
static bool
apply_pipeline_layout(nir_builder *b, nir_instr *instr, void *_state)
{
return lower_load_constant(b, intrin, state);
case nir_intrinsic_load_base_workgroup_id:
return lower_base_workgroup_id(b, intrin, state);
- case nir_intrinsic_load_ray_query_global_intel:
- return lower_ray_query_globals(b, intrin, state);
default:
return false;
}
NIR_PASS(_, nir, nir_lower_explicit_io, nir_var_mem_push_const,
nir_address_format_32bit_offset);
- NIR_PASS(_, nir, brw_nir_lower_ray_queries, &pdevice->info);
-
/* Apply the actual pipeline layout to UBOs, SSBOs, and textures */
NIR_PASS_V(nir, anv_nir_apply_pipeline_layout,
pdevice, pipeline->device->robust_buffer_access,
} else {
anv_pipeline_add_executable(pipeline, stage, bin->stats, 0);
}
-
- pipeline->ray_queries = MAX2(pipeline->ray_queries, bin->prog_data->ray_queries);
}
static void
* genX(cmd_buffer_emit_hashing_mode)().
*/
unsigned current_hash_scale;
-
- /**
- * A buffer used for spill/fill of ray queries.
- */
- struct anv_bo * ray_query_shadow_bo;
};
#define ANV_MIN_CMD_BUFFER_BATCH_SIZE 8192
enum anv_pipeline_type type;
VkPipelineCreateFlags flags;
- uint32_t ray_queries;
-
struct util_dynarray executables;
const struct intel_l3_config * l3_config;