/* nir_lower_io_to_scalar is required before this */
assert(intr->src[0].ssa->num_components == 1);
/* No intrinsic should store undef. */
- assert(intr->src[0].ssa->parent_instr->type != nir_instr_type_ssa_undef);
+ assert(intr->src[0].ssa->parent_instr->type != nir_instr_type_undef);
/* Gather the output. */
struct ac_out_info *out_info = &outputs[sem.location];
get_phi_operand(isel_context* ctx, nir_def* ssa, RegClass rc, bool logical)
{
Temp tmp = get_ssa_temp(ctx, ssa);
- if (ssa->parent_instr->type == nir_instr_type_ssa_undef) {
+ if (ssa->parent_instr->type == nir_instr_type_undef) {
return Operand(rc);
} else if (logical && ssa->bit_size == 1 &&
ssa->parent_instr->type == nir_instr_type_load_const) {
case nir_instr_type_intrinsic: visit_intrinsic(ctx, nir_instr_as_intrinsic(instr)); break;
case nir_instr_type_tex: visit_tex(ctx, nir_instr_as_tex(instr)); break;
case nir_instr_type_phi: visit_phi(ctx, nir_instr_as_phi(instr)); break;
- case nir_instr_type_ssa_undef: visit_undef(ctx, nir_instr_as_ssa_undef(instr)); break;
+ case nir_instr_type_undef: visit_undef(ctx, nir_instr_as_undef(instr)); break;
case nir_instr_type_deref: break;
case nir_instr_type_jump: visit_jump(ctx, nir_instr_as_jump(instr)); break;
default: isel_err(instr, "Unknown NIR instr type");
regclasses[tex->def.index] = rc;
break;
}
- case nir_instr_type_ssa_undef: {
- unsigned num_components = nir_instr_as_ssa_undef(instr)->def.num_components;
- unsigned bit_size = nir_instr_as_ssa_undef(instr)->def.bit_size;
+ case nir_instr_type_undef: {
+ unsigned num_components = nir_instr_as_undef(instr)->def.num_components;
+ unsigned bit_size = nir_instr_as_undef(instr)->def.bit_size;
RegClass rc = get_reg_class(ctx, RegType::sgpr, num_components, bit_size);
- regclasses[nir_instr_as_ssa_undef(instr)->def.index] = rc;
+ regclasses[nir_instr_as_undef(instr)->def.index] = rc;
break;
}
case nir_instr_type_phi: {
break;
case nir_instr_type_phi:
break;
- case nir_instr_type_ssa_undef:
- visit_ssa_undef(ctx, nir_instr_as_ssa_undef(instr));
+ case nir_instr_type_undef:
+ visit_ssa_undef(ctx, nir_instr_as_undef(instr));
break;
case nir_instr_type_jump:
if (!visit_jump(&ctx->ac, nir_instr_as_jump(instr)))
agx_emit_phi(b, nir_instr_as_phi(instr));
break;
- case nir_instr_type_ssa_undef:
- agx_emit_undef(b, nir_instr_as_ssa_undef(instr));
+ case nir_instr_type_undef:
+ agx_emit_undef(b, nir_instr_as_undef(instr));
break;
default:
nir_foreach_instr(instr, block) {
switch (instr->type) {
case nir_instr_type_alu:
- case nir_instr_type_ssa_undef:
+ case nir_instr_type_undef:
case nir_instr_type_load_const:
if (--cost <= 0)
return false;
ntq_emit_load_const(c, nir_instr_as_load_const(instr));
break;
- case nir_instr_type_ssa_undef:
+ case nir_instr_type_undef:
unreachable("Should've been lowered by nir_lower_undef_to_zero");
break;
struct v3d_compile *c = (struct v3d_compile *) data;
switch (instr->type) {
- case nir_instr_type_ssa_undef:
+ case nir_instr_type_undef:
case nir_instr_type_load_const:
case nir_instr_type_alu:
case nir_instr_type_deref:
unsigned bit_size)
{
nir_undef_instr *instr = gc_alloc(shader->gctx, nir_undef_instr, 1);
- instr_init(&instr->instr, nir_instr_type_ssa_undef);
+ instr_init(&instr->instr, nir_instr_type_undef);
nir_def_init(&instr->instr, &instr->def, num_components, bit_size);
case nir_instr_type_load_const:
return &nir_instr_as_load_const(instr)->def;
- case nir_instr_type_ssa_undef:
- return &nir_instr_as_ssa_undef(instr)->def;
+ case nir_instr_type_undef:
+ return &nir_instr_as_undef(instr)->def;
case nir_instr_type_call:
case nir_instr_type_jump:
nir_instr_type_intrinsic,
nir_instr_type_load_const,
nir_instr_type_jump,
- nir_instr_type_ssa_undef,
+ nir_instr_type_undef,
nir_instr_type_phi,
nir_instr_type_parallel_copy,
} nir_instr_type;
static inline bool
nir_src_is_undef(nir_src src)
{
- return src.ssa->parent_instr->type == nir_instr_type_ssa_undef;
+ return src.ssa->parent_instr->type == nir_instr_type_undef;
}
static inline bool
type, nir_instr_type_intrinsic)
NIR_DEFINE_CAST(nir_instr_as_load_const, nir_instr, nir_load_const_instr, instr,
type, nir_instr_type_load_const)
-NIR_DEFINE_CAST(nir_instr_as_ssa_undef, nir_instr, nir_undef_instr, instr,
- type, nir_instr_type_ssa_undef)
+NIR_DEFINE_CAST(nir_instr_as_undef, nir_instr, nir_undef_instr, instr,
+ type, nir_instr_type_undef)
NIR_DEFINE_CAST(nir_instr_as_phi, nir_instr, nir_phi_instr, instr,
type, nir_instr_type_phi)
NIR_DEFINE_CAST(nir_instr_as_parallel_copy, nir_instr,
static inline bool
nir_scalar_is_undef(nir_scalar s)
{
- return s.def->parent_instr->type == nir_instr_type_ssa_undef;
+ return s.def->parent_instr->type == nir_instr_type_undef;
}
static inline nir_const_value
return &clone_intrinsic(state, nir_instr_as_intrinsic(instr))->instr;
case nir_instr_type_load_const:
return &clone_load_const(state, nir_instr_as_load_const(instr))->instr;
- case nir_instr_type_ssa_undef:
- return &clone_ssa_undef(state, nir_instr_as_ssa_undef(instr))->instr;
+ case nir_instr_type_undef:
+ return &clone_ssa_undef(state, nir_instr_as_undef(instr))->instr;
case nir_instr_type_tex:
return &clone_tex(state, nir_instr_as_tex(instr))->instr;
case nir_instr_type_phi:
return visit_tex(nir_instr_as_tex(instr));
case nir_instr_type_load_const:
return visit_load_const(nir_instr_as_load_const(instr));
- case nir_instr_type_ssa_undef:
- return visit_ssa_undef(nir_instr_as_ssa_undef(instr));
+ case nir_instr_type_undef:
+ return visit_ssa_undef(nir_instr_as_undef(instr));
case nir_instr_type_deref:
return visit_deref(shader, nir_instr_as_deref(instr));
case nir_instr_type_jump:
phi->def.divergent = true;
return true;
}
- if (src->src.ssa->parent_instr->type != nir_instr_type_ssa_undef) {
+ if (src->src.ssa->parent_instr->type != nir_instr_type_undef) {
defined_srcs++;
}
}
static bool
def_after(nir_def *a, nir_def *b)
{
- if (a->parent_instr->type == nir_instr_type_ssa_undef)
+ if (a->parent_instr->type == nir_instr_type_undef)
return false;
- if (b->parent_instr->type == nir_instr_type_ssa_undef)
+ if (b->parent_instr->type == nir_instr_type_undef)
return true;
/* If they're in the same block, we can rely on whichever instruction
static bool
ssa_def_dominates(nir_def *a, nir_def *b)
{
- if (a->parent_instr->type == nir_instr_type_ssa_undef) {
+ if (a->parent_instr->type == nir_instr_type_undef) {
/* SSA undefs always dominate */
return true;
}
def_replace_with_reg(nir_def *def, nir_function_impl *impl)
{
/* These are handled elsewhere */
- assert(def->parent_instr->type != nir_instr_type_ssa_undef &&
+ assert(def->parent_instr->type != nir_instr_type_undef &&
def->parent_instr->type != nir_instr_type_load_const);
nir_builder b = nir_builder_create(impl);
const unsigned num_ssa = impl->ssa_alloc;
nir_foreach_instr_safe(instr, block) {
- if (instr->type == nir_instr_type_ssa_undef) {
+ if (instr->type == nir_instr_type_undef) {
/* Undefs are just a read of something never written. */
- nir_undef_instr *undef = nir_instr_as_ssa_undef(instr);
+ nir_undef_instr *undef = nir_instr_as_undef(instr);
nir_def *reg = decl_reg_for_ssa_def(&b, &undef->def);
nir_rewrite_uses_to_load_reg(&b, &undef->def, reg);
} else if (instr->type == nir_instr_type_load_const) {
instr->type == nir_instr_type_deref ||
instr->type == nir_instr_type_tex ||
instr->type == nir_instr_type_load_const ||
- instr->type == nir_instr_type_ssa_undef)
+ instr->type == nir_instr_type_undef)
return true;
if (instr->type == nir_instr_type_intrinsic &&
case nir_instr_type_load_const:
return cb(&nir_instr_as_load_const(instr)->def, state);
- case nir_instr_type_ssa_undef:
- return cb(&nir_instr_as_ssa_undef(instr)->def, state);
+ case nir_instr_type_undef:
+ return cb(&nir_instr_as_undef(instr)->def, state);
case nir_instr_type_call:
case nir_instr_type_jump:
}
case nir_instr_type_load_const:
- case nir_instr_type_ssa_undef:
+ case nir_instr_type_undef:
return true;
default:
return nir_intrinsic_can_reorder(nir_instr_as_intrinsic(instr));
case nir_instr_type_call:
case nir_instr_type_jump:
- case nir_instr_type_ssa_undef:
+ case nir_instr_type_undef:
return false;
case nir_instr_type_parallel_copy:
default:
}
case nir_instr_type_call:
case nir_instr_type_jump:
- case nir_instr_type_ssa_undef:
+ case nir_instr_type_undef:
case nir_instr_type_parallel_copy:
default:
unreachable("Invalid instruction type");
* least one isn't dead.
*/
return true;
- } else if (a->parent_instr->type == nir_instr_type_ssa_undef ||
- b->parent_instr->type == nir_instr_type_ssa_undef) {
+ } else if (a->parent_instr->type == nir_instr_type_undef ||
+ b->parent_instr->type == nir_instr_type_undef) {
/* If either variable is an ssa_undef, then there's no interference */
return false;
} else if (a->parent_instr->index < b->parent_instr->index) {
nir_def *index = nir_ssa_for_src(&b, deref->arr.index, 1);
nir_def *scalar =
nir_vector_extract(&b, &intrin->def, index);
- if (scalar->parent_instr->type == nir_instr_type_ssa_undef) {
+ if (scalar->parent_instr->type == nir_instr_type_undef) {
nir_def_rewrite_uses(&intrin->def,
scalar);
nir_instr_remove(&intrin->instr);
case nir_instr_type_phi:
return lower_phi_instr(b, nir_instr_as_phi(instr));
- case nir_instr_type_ssa_undef:
+ case nir_instr_type_undef:
case nir_instr_type_intrinsic: {
bool progress = false;
nir_foreach_def(instr, rewrite_1bit_ssa_def_to_32bit, &progress);
}
case nir_instr_type_intrinsic:
- case nir_instr_type_ssa_undef:
+ case nir_instr_type_undef:
case nir_instr_type_phi: {
bool progress = false;
nir_foreach_def(instr, rewrite_1bit_ssa_def_to_32bit, &progress);
}
case nir_instr_type_intrinsic:
- case nir_instr_type_ssa_undef:
+ case nir_instr_type_undef:
case nir_instr_type_phi: {
bool progress = false;
nir_foreach_def(instr, rewrite_1bit_ssa_def_to_32bit, &progress);
}
case nir_instr_type_intrinsic:
- case nir_instr_type_ssa_undef:
+ case nir_instr_type_undef:
case nir_instr_type_phi:
case nir_instr_type_tex:
break;
/* These are trivially scalarizable */
return true;
- case nir_instr_type_ssa_undef:
+ case nir_instr_type_undef:
/* The caller of this function is going to OR the results and we don't
* want undefs to count so we return false.
*/
}
}
- case nir_instr_type_ssa_undef:
+ case nir_instr_type_undef:
case nir_instr_type_load_const:
return true;
static bool
lower_undef_instr_to_zero(nir_builder *b, nir_instr *instr, UNUSED void *_state)
{
- if (instr->type != nir_instr_type_ssa_undef)
+ if (instr->type != nir_instr_type_undef)
return false;
- nir_undef_instr *und = nir_instr_as_ssa_undef(instr);
+ nir_undef_instr *und = nir_instr_as_undef(instr);
b->cursor = nir_instr_remove(&und->instr);
nir_def *zero = nir_imm_zero(b, und->def.num_components,
und->def.bit_size);
}
/* No sense storing from undef, just return the write mask */
- if (src->parent_instr->type == nir_instr_type_ssa_undef)
+ if (src->parent_instr->type == nir_instr_type_undef)
return write_mask;
b->cursor = nir_before_instr(&vec->instr);
nir_load_const_instr *lc = nir_instr_as_load_const(instr);
return is_def_live(&lc->def, defs_live);
}
- case nir_instr_type_ssa_undef: {
- nir_undef_instr *undef = nir_instr_as_ssa_undef(instr);
+ case nir_instr_type_undef: {
+ nir_undef_instr *undef = nir_instr_as_undef(instr);
return is_def_live(&undef->def, defs_live);
}
case nir_instr_type_parallel_copy: {
/* These are trivially scalarizable */
return true;
- case nir_instr_type_ssa_undef:
+ case nir_instr_type_undef:
return true;
case nir_instr_type_intrinsic: {
break;
case nir_instr_type_jump:
- case nir_instr_type_ssa_undef:
+ case nir_instr_type_undef:
case nir_instr_type_phi:
instr->pass_flags = GCM_INSTR_PLACED;
break;
nir_foreach_phi_src(src_of_phi, phi) {
if (src_of_phi->pred == prev_block) {
if (src_of_phi->src.ssa->parent_instr->type !=
- nir_instr_type_ssa_undef) {
+ nir_instr_type_undef) {
is_prev_result_undef = false;
}
case nir_instr_type_deref:
case nir_instr_type_load_const:
- case nir_instr_type_ssa_undef:
+ case nir_instr_type_undef:
case nir_instr_type_phi:
/* These are all safe */
continue;
case nir_instr_type_deref:
case nir_instr_type_load_const:
case nir_instr_type_phi:
- case nir_instr_type_ssa_undef:
+ case nir_instr_type_undef:
case nir_instr_type_tex:
break;
case nir_instr_type_deref:
case nir_instr_type_load_const:
- case nir_instr_type_ssa_undef:
+ case nir_instr_type_undef:
break;
case nir_instr_type_alu: {
* this for them.
*/
if (instr->type == nir_instr_type_load_const ||
- instr->type == nir_instr_type_ssa_undef)
+ instr->type == nir_instr_type_undef)
return 0;
return options->instr_cost_cb(instr, options->cb_data);
return can_move_intrinsic(nir_instr_as_intrinsic(instr), ctx);
case nir_instr_type_load_const:
- case nir_instr_type_ssa_undef:
+ case nir_instr_type_undef:
return true;
case nir_instr_type_deref: {
case nir_instr_type_load_const:
return opt_shrink_vectors_load_const(nir_instr_as_load_const(instr));
- case nir_instr_type_ssa_undef:
- return opt_shrink_vectors_ssa_undef(nir_instr_as_ssa_undef(instr));
+ case nir_instr_type_undef:
+ return opt_shrink_vectors_ssa_undef(nir_instr_as_undef(instr));
case nir_instr_type_phi:
return opt_shrink_vectors_phi(b, nir_instr_as_phi(instr));
{
switch (instr->type) {
case nir_instr_type_load_const:
- case nir_instr_type_ssa_undef: {
+ case nir_instr_type_undef: {
return options & nir_move_const_undef;
}
case nir_instr_type_alu: {
for (int i = 1; i <= 2; i++) {
nir_instr *parent = instr->src[i].src.ssa->parent_instr;
- if (parent->type != nir_instr_type_ssa_undef)
+ if (parent->type != nir_instr_type_undef)
continue;
/* We can't just use nir_alu_src_copy, because we need the def/use
return false;
for (unsigned i = 0; i < nir_op_infos[alu->op].num_inputs; i++) {
- if (alu->src[i].src.ssa->parent_instr->type != nir_instr_type_ssa_undef)
+ if (alu->src[i].src.ssa->parent_instr->type != nir_instr_type_undef)
return false;
}
{
nir_instr *instr = def->parent_instr;
- if (instr->type == nir_instr_type_ssa_undef)
+ if (instr->type == nir_instr_type_undef)
return BITSET_MASK(def->num_components);
if (instr->type != nir_instr_type_alu)
if (nir_op_is_vec(alu->op)) {
for (int i = 0; i < nir_op_infos[alu->op].num_inputs; i++) {
if (alu->src[i].src.ssa->parent_instr->type ==
- nir_instr_type_ssa_undef) {
+ nir_instr_type_undef) {
undef |= BITSET_MASK(nir_ssa_alu_instr_src_components(alu, i)) << i;
}
}
print_jump_instr(nir_instr_as_jump(instr), state);
break;
- case nir_instr_type_ssa_undef:
- print_ssa_undef_instr(nir_instr_as_ssa_undef(instr), state);
+ case nir_instr_type_undef:
+ print_ssa_undef_instr(nir_instr_as_undef(instr), state);
break;
case nir_instr_type_phi:
case nir_instr_type_deref:
case nir_instr_type_alu:
case nir_instr_type_tex:
- case nir_instr_type_ssa_undef:
+ case nir_instr_type_undef:
case nir_instr_type_phi:
case nir_instr_type_parallel_copy:
return true;
case nir_instr_type_deref:
case nir_instr_type_jump:
- case nir_instr_type_ssa_undef:
+ case nir_instr_type_undef:
case nir_instr_type_load_const:
break; /* Nothing to do */
add_read_dep(state, state->jump, n);
switch (instr->type) {
- case nir_instr_type_ssa_undef:
+ case nir_instr_type_undef:
case nir_instr_type_load_const:
case nir_instr_type_alu:
case nir_instr_type_deref:
}
switch (instr->type) {
- case nir_instr_type_ssa_undef:
+ case nir_instr_type_undef:
case nir_instr_type_load_const:
case nir_instr_type_alu:
case nir_instr_type_deref:
case nir_instr_type_load_const:
write_load_const(ctx, nir_instr_as_load_const(instr));
break;
- case nir_instr_type_ssa_undef:
- write_ssa_undef(ctx, nir_instr_as_ssa_undef(instr));
+ case nir_instr_type_undef:
+ write_ssa_undef(ctx, nir_instr_as_undef(instr));
break;
case nir_instr_type_tex:
write_tex(ctx, nir_instr_as_tex(instr));
case nir_instr_type_load_const:
instr = &read_load_const(ctx, header)->instr;
break;
- case nir_instr_type_ssa_undef:
+ case nir_instr_type_undef:
instr = &read_ssa_undef(ctx, header)->instr;
break;
case nir_instr_type_tex:
switch (instr->type) {
case nir_instr_type_load_const:
- case nir_instr_type_ssa_undef:
+ case nir_instr_type_undef:
return invariant;
case nir_instr_type_call:
return not_invariant;
/* SSA-only instruction types */
nir_instr *parent = value->parent_instr;
nontrivial |= (parent->type == nir_instr_type_load_const) ||
- (parent->type == nir_instr_type_ssa_undef);
+ (parent->type == nir_instr_type_undef);
/* Must be written in the same block */
nontrivial |= (parent->block != block);
validate_phi_instr(nir_instr_as_phi(instr), state);
break;
- case nir_instr_type_ssa_undef:
- validate_ssa_undef_instr(nir_instr_as_ssa_undef(instr), state);
+ case nir_instr_type_undef:
+ validate_ssa_undef_instr(nir_instr_as_undef(instr), state);
break;
case nir_instr_type_jump:
nir_foreach_phi_src (nsrc, nphi) {
if (blk->nblock == nsrc->pred) {
- if (nsrc->src.ssa->parent_instr->type == nir_instr_type_ssa_undef) {
+ if (nsrc->src.ssa->parent_instr->type == nir_instr_type_undef) {
/* Create an ir3 undef */
return NULL;
} else {
case nir_instr_type_load_const:
emit_load_const(ctx, nir_instr_as_load_const(instr));
break;
- case nir_instr_type_ssa_undef:
- emit_undef(ctx, nir_instr_as_ssa_undef(instr));
+ case nir_instr_type_undef:
+ emit_undef(ctx, nir_instr_as_undef(instr));
break;
case nir_instr_type_tex: {
nir_tex_instr *tex = nir_instr_as_tex(instr);
{
(void)unused;
- nir_undef_instr *undef = nir_instr_as_ssa_undef(instr);
+ nir_undef_instr *undef = nir_instr_as_undef(instr);
unsigned num_comp = undef->def.num_components;
nir_def *components[num_comp];
{
(void)unused;
- return instr->type == nir_instr_type_ssa_undef &&
- nir_instr_as_ssa_undef(instr)->def.bit_size == 64;
+ return instr->type == nir_instr_type_undef &&
+ nir_instr_as_undef(instr)->def.bit_size == 64;
}
bool
case nir_instr_type_alu:
case nir_instr_type_deref:
case nir_instr_type_load_const:
- case nir_instr_type_ssa_undef:
+ case nir_instr_type_undef:
/* These could be safely moved around */
break;
case nir_instr_type_intrinsic: {
case nir_instr_type_phi:
assert(0);
break;
- case nir_instr_type_ssa_undef:
- visit_ssa_undef(bld_base, nir_instr_as_ssa_undef(instr));
+ case nir_instr_type_undef:
+ visit_ssa_undef(bld_base, nir_instr_as_undef(instr));
break;
case nir_instr_type_jump:
visit_jump(bld_base, nir_instr_as_jump(instr));
* the specific swizzles from an undef don't matter)
*/
if (nir_src_bit_size(instr->src[i].src) == 64 &&
- !(src.src.is_ssa && src.src.ssa->parent_instr->type == nir_instr_type_ssa_undef)) {
+ !(src.src.is_ssa && src.src.ssa->parent_instr->type == nir_instr_type_undef)) {
int chan1 = 1;
if (nir_op_infos[instr->op].input_sizes[i] == 0) {
chan1 = instr->def.num_components > 1 ? 1 : 0;
ntt_emit_jump(c, nir_instr_as_jump(instr));
break;
- case nir_instr_type_ssa_undef:
- ntt_emit_ssa_undef(c, nir_instr_as_ssa_undef(instr));
+ case nir_instr_type_undef:
+ ntt_emit_ssa_undef(c, nir_instr_as_undef(instr));
break;
default:
case nir_instr_type_alu:
case nir_instr_type_tex:
return ra_src(c, src);
- case nir_instr_type_ssa_undef: {
+ case nir_instr_type_undef: {
/* return zero to deal with broken Blur demo */
nir_const_value value = CONST(0);
return src_swizzle(const_src(c, &value, 1), SWIZZLE(X,X,X,X));
assert(nir_instr_is_last(instr));
break;
case nir_instr_type_load_const:
- case nir_instr_type_ssa_undef:
+ case nir_instr_type_undef:
case nir_instr_type_deref:
break;
default:
switch (instr->type) {
case nir_instr_type_load_const:
- case nir_instr_type_ssa_undef:
+ case nir_instr_type_undef:
return true;
case nir_instr_type_alu: {
/* alu op bypass */
case nir_instr_type_jump:
ctx->block_has_jump[ctx->block_idx] = true;
break;
- case nir_instr_type_ssa_undef:
- emit_undef(ctx, nir_instr_as_ssa_undef(instr));
+ case nir_instr_type_undef:
+ emit_undef(ctx, nir_instr_as_undef(instr));
break;
default:
break;
[nir_instr_type_alu] = gpir_emit_alu,
[nir_instr_type_intrinsic] = gpir_emit_intrinsic,
[nir_instr_type_load_const] = gpir_emit_load_const,
- [nir_instr_type_ssa_undef] = gpir_emit_ssa_undef,
+ [nir_instr_type_undef] = gpir_emit_ssa_undef,
[nir_instr_type_tex] = gpir_emit_tex,
[nir_instr_type_jump] = gpir_emit_jump,
};
static bool ppir_emit_ssa_undef(ppir_block *block, nir_instr *ni)
{
- nir_undef_instr *undef = nir_instr_as_ssa_undef(ni);
+ nir_undef_instr *undef = nir_instr_as_undef(ni);
ppir_node *node = ppir_node_create_ssa(block, ppir_op_undef, &undef->def);
if (!node)
return false;
[nir_instr_type_alu] = ppir_emit_alu,
[nir_instr_type_intrinsic] = ppir_emit_intrinsic,
[nir_instr_type_load_const] = ppir_emit_load_const,
- [nir_instr_type_ssa_undef] = ppir_emit_ssa_undef,
+ [nir_instr_type_undef] = ppir_emit_ssa_undef,
[nir_instr_type_tex] = ppir_emit_tex,
[nir_instr_type_jump] = ppir_emit_jump,
};
return TexInstr::from_nir(nir_instr_as_tex(instr), shader);
case nir_instr_type_jump:
return process_jump(nir_instr_as_jump(instr), shader);
- case nir_instr_type_ssa_undef:
- return process_undef(nir_instr_as_ssa_undef(instr), shader);
+ case nir_instr_type_undef:
+ return process_undef(nir_instr_as_undef(instr), shader);
default:
fprintf(stderr, "Instruction type %d not supported\n", instr->type);
return false;
auto lc = nir_instr_as_load_const(instr);
return lc->def.bit_size == 64;
}
- case nir_instr_type_ssa_undef: {
- auto undef = nir_instr_as_ssa_undef(instr);
+ case nir_instr_type_undef: {
+ auto undef = nir_instr_as_undef(instr);
return undef->def.bit_size == 64;
}
default:
return nir_build_imm(b, 2 * lc->def.num_components, 32, val);
}
- case nir_instr_type_ssa_undef: {
- auto undef = nir_instr_as_ssa_undef(instr);
+ case nir_instr_type_undef: {
+ auto undef = nir_instr_as_undef(instr);
undef->def.num_components *= 2;
undef->def.bit_size = 32;
return NIR_LOWER_INSTR_PROGRESS;
ntq_emit_load_const(c, nir_instr_as_load_const(instr));
break;
- case nir_instr_type_ssa_undef:
- ntq_emit_ssa_undef(c, nir_instr_as_ssa_undef(instr));
+ case nir_instr_type_undef:
+ ntq_emit_ssa_undef(c, nir_instr_as_undef(instr));
break;
case nir_instr_type_tex:
case nir_instr_type_load_const:
emit_load_const(ctx, nir_instr_as_load_const(instr));
break;
- case nir_instr_type_ssa_undef:
- emit_undef(ctx, nir_instr_as_ssa_undef(instr));
+ case nir_instr_type_undef:
+ emit_undef(ctx, nir_instr_as_undef(instr));
break;
case nir_instr_type_tex:
emit_tex(ctx, nir_instr_as_tex(instr));
nir_emit_load_const(abld, nir_instr_as_load_const(instr));
break;
- case nir_instr_type_ssa_undef:
+ case nir_instr_type_undef:
/* We create a new VGRF for undefs on every use (by handling
* them in get_nir_src()), rather than for each definition.
* This helps register coalescing eliminate MOVs from undef.
nir_emit_texture(nir_instr_as_tex(instr));
break;
- case nir_instr_type_ssa_undef:
- nir_emit_undef(nir_instr_as_ssa_undef(instr));
+ case nir_instr_type_undef:
+ nir_emit_undef(nir_instr_as_undef(instr));
break;
default:
return emit_phi(ctx, nir_instr_as_phi(instr));
case nir_instr_type_tex:
return emit_tex(ctx, nir_instr_as_tex(instr));
- case nir_instr_type_ssa_undef:
- return emit_undefined(ctx, nir_instr_as_ssa_undef(instr));
+ case nir_instr_type_undef:
+ return emit_undefined(ctx, nir_instr_as_undef(instr));
default:
log_nir_instr_unsupported(ctx->logger, "Unimplemented instruction type",
instr);
return visit(nir_instr_as_jump(insn));
case nir_instr_type_load_const:
return visit(nir_instr_as_load_const(insn));
- case nir_instr_type_ssa_undef:
- return visit(nir_instr_as_ssa_undef(insn));
+ case nir_instr_type_undef:
+ return visit(nir_instr_as_undef(insn));
case nir_instr_type_tex:
return visit(nir_instr_as_tex(insn));
default:
emit_jump(ctx, nir_instr_as_jump(instr));
break;
- case nir_instr_type_ssa_undef:
+ case nir_instr_type_undef:
/* Spurious */
break;