-Wed Jan 14 19:36:08 1998 Gavin Kock (gavin@cygnus.com)
+Wed Jan 14 22:49:17 1998 Richard Henderson <rth@cygnus.com>
+
+ * alias.c: Change all uses of gen_rtx(FOO...) to gen_rtx_FOO;
+ change gen_rtx(expr...) to gen_rtx_fmt_foo(expr...).
+ * caller-save.c, calls.c, combine.c, cse.c: Likewise.
+ * dwarf2out.c, except.c, explow.c, expmed.c, expr.c: Likewise.
+ * final.c, flow.c, function.c, genpeep.c, haifa-sched.c: Likewise.
+ * halfpic.c, integrate.c, jump.c, local-alloc.c, loop.c: Likewise.
+ * profile.c, recog.c, reg-stack.c, regclass.c, regmove.c: Likewise.
+ * reload.c, reload1.c, reorg.c, sched.c, stmt.c, stupid.c: Likewise.
+ * unroll.c, varasm.c: Likewise.
+ * config/alpha/alpha.c, config/alpha/alpha.md: Likewise.
+
+Wed Jan 14 19:36:08 1998 Gavin Koch (gavin@cygnus.com)
* mips.h: Fix some type-o's from a previous change.
&& (XEXP (src, 0) == arg_pointer_rtx
|| (GET_CODE (XEXP (src, 0)) == PLUS
&& XEXP (XEXP (src, 0), 0) == arg_pointer_rtx)))
- return gen_rtx (ADDRESS, VOIDmode, src);
+ return gen_rtx_ADDRESS (VOIDmode, src);
return 0;
case CONST:
return;
}
reg_seen[regno] = 1;
- new_reg_base_value[regno] = gen_rtx (ADDRESS, Pmode,
- GEN_INT (unique_id++));
+ new_reg_base_value[regno] = gen_rtx_ADDRESS (Pmode,
+ GEN_INT (unique_id++));
return;
}
return plus_constant_for_output (x1, INTVAL (x0));
else if (GET_CODE (x1) == CONST_INT)
return plus_constant_for_output (x0, INTVAL (x1));
- return gen_rtx (PLUS, GET_MODE (x), x0, x1);
+ return gen_rtx_PLUS (GET_MODE (x), x0, x1);
}
}
/* This gives us much better alias analysis when called from
rtx addr = canon_rtx (XEXP (x, 0));
if (addr != XEXP (x, 0))
{
- rtx new = gen_rtx (MEM, GET_MODE (x), addr);
+ rtx new = gen_rtx_MEM (GET_MODE (x), addr);
MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
if (TEST_HARD_REG_BIT (argument_registers, i))
- new_reg_base_value[i] = gen_rtx (ADDRESS, VOIDmode,
- gen_rtx (REG, Pmode, i));
+ new_reg_base_value[i] = gen_rtx_ADDRESS (VOIDmode,
+ gen_rtx_REG (Pmode, i));
new_reg_base_value[STACK_POINTER_REGNUM]
- = gen_rtx (ADDRESS, Pmode, stack_pointer_rtx);
+ = gen_rtx_ADDRESS (Pmode, stack_pointer_rtx);
new_reg_base_value[ARG_POINTER_REGNUM]
- = gen_rtx (ADDRESS, Pmode, arg_pointer_rtx);
+ = gen_rtx_ADDRESS (Pmode, arg_pointer_rtx);
new_reg_base_value[FRAME_POINTER_REGNUM]
- = gen_rtx (ADDRESS, Pmode, frame_pointer_rtx);
+ = gen_rtx_ADDRESS (Pmode, frame_pointer_rtx);
#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
new_reg_base_value[HARD_FRAME_POINTER_REGNUM]
- = gen_rtx (ADDRESS, Pmode, hard_frame_pointer_rtx);
+ = gen_rtx_ADDRESS (Pmode, hard_frame_pointer_rtx);
#endif
if (struct_value_incoming_rtx
&& GET_CODE (struct_value_incoming_rtx) == REG)
new_reg_base_value[REGNO (struct_value_incoming_rtx)]
- = gen_rtx (ADDRESS, Pmode, struct_value_incoming_rtx);
+ = gen_rtx_ADDRESS (Pmode, struct_value_incoming_rtx);
if (static_chain_rtx
&& GET_CODE (static_chain_rtx) == REG)
new_reg_base_value[REGNO (static_chain_rtx)]
- = gen_rtx (ADDRESS, Pmode, static_chain_rtx);
+ = gen_rtx_ADDRESS (Pmode, static_chain_rtx);
/* Walk the insns adding values to the new_reg_base_value array. */
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
if (i == FIRST_PSEUDO_REGISTER)
abort ();
- addr_reg = gen_rtx (REG, Pmode, i);
+ addr_reg = gen_rtx_REG (Pmode, i);
for (offset = 1 << (HOST_BITS_PER_INT / 2); offset; offset >>= 1)
{
- address = gen_rtx (PLUS, Pmode, addr_reg, GEN_INT (offset));
+ address = gen_rtx_PLUS (Pmode, addr_reg, GEN_INT (offset));
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
if (regno_save_mode[i][1] != VOIDmode
for (j = 1; j <= MOVE_MAX / UNITS_PER_WORD; j++)
if (regno_save_mode[i][j] != VOIDmode)
{
- rtx mem = gen_rtx (MEM, regno_save_mode[i][j], address);
- rtx reg = gen_rtx (REG, regno_save_mode[i][j], i);
- rtx savepat = gen_rtx (SET, VOIDmode, mem, reg);
- rtx restpat = gen_rtx (SET, VOIDmode, reg, mem);
+ rtx mem = gen_rtx_MEM (regno_save_mode[i][j], address);
+ rtx reg = gen_rtx_REG (regno_save_mode[i][j], i);
+ rtx savepat = gen_rtx_SET (VOIDmode, mem, reg);
+ rtx restpat = gen_rtx_SET (VOIDmode, reg, mem);
rtx saveinsn = emit_insn (savepat);
rtx restinsn = emit_insn (restpat);
int ok;
{
/* This should not depend on WORDS_BIG_ENDIAN.
The order of words in regs is the same as in memory. */
- rtx temp = gen_rtx (MEM, regno_save_mode[i+k][1],
- XEXP (regno_save_mem[i][j], 0));
+ rtx temp = gen_rtx_MEM (regno_save_mode[i+k][1],
+ XEXP (regno_save_mem[i][j], 0));
regno_save_mem[i+k][1]
= adj_offsettable_operand (temp, k * UNITS_PER_WORD);
if (! ok)
continue;
- pat = gen_rtx (SET, VOIDmode, regno_save_mem[regno][i],
- gen_rtx (REG, GET_MODE (regno_save_mem[regno][i]), regno));
+ pat = gen_rtx_SET (VOIDmode, regno_save_mem[regno][i],
+ gen_rtx_REG (GET_MODE (regno_save_mem[regno][i]),
+ regno));
code = reg_save_code[regno][i];
/* Set hard_regs_saved for all the registers we saved. */
if (! ok)
continue;
- pat = gen_rtx (SET, VOIDmode,
- gen_rtx (REG, GET_MODE (regno_save_mem[regno][i]),
- regno),
+ pat = gen_rtx_SET (VOIDmode,
+ gen_rtx_REG (GET_MODE (regno_save_mem[regno][i]),
+ regno),
regno_save_mem[regno][i]);
code = reg_restore_code[regno][i];
if (valreg)
pat = gen_call_value_pop (valreg,
- gen_rtx (MEM, FUNCTION_MODE, funexp),
+ gen_rtx_MEM (FUNCTION_MODE, funexp),
stack_size_rtx, next_arg_reg, n_pop);
else
- pat = gen_call_pop (gen_rtx (MEM, FUNCTION_MODE, funexp),
+ pat = gen_call_pop (gen_rtx_MEM (FUNCTION_MODE, funexp),
stack_size_rtx, next_arg_reg, n_pop);
emit_call_insn (pat);
{
if (valreg)
emit_call_insn (gen_call_value (valreg,
- gen_rtx (MEM, FUNCTION_MODE, funexp),
+ gen_rtx_MEM (FUNCTION_MODE, funexp),
stack_size_rtx, next_arg_reg,
NULL_RTX));
else
- emit_call_insn (gen_call (gen_rtx (MEM, FUNCTION_MODE, funexp),
+ emit_call_insn (gen_call (gen_rtx_MEM (FUNCTION_MODE, funexp),
stack_size_rtx, next_arg_reg,
struct_value_size_rtx));
}
{
if (!already_popped)
CALL_INSN_FUNCTION_USAGE (call_insn)
- = gen_rtx (EXPR_LIST, VOIDmode,
- gen_rtx (CLOBBER, VOIDmode, stack_pointer_rtx),
- CALL_INSN_FUNCTION_USAGE (call_insn));
+ = gen_rtx_EXPR_LIST (VOIDmode,
+ gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
+ CALL_INSN_FUNCTION_USAGE (call_insn));
stack_size -= RETURN_POPS_ARGS (fndecl, funtype, stack_size);
stack_size_rtx = GEN_INT (stack_size);
}
pending_stack_adjust = 0;
}
- copy = gen_rtx (MEM, BLKmode,
- allocate_dynamic_stack_space (size_rtx,
- NULL_RTX,
- TYPE_ALIGN (type)));
+ copy = gen_rtx_MEM (BLKmode,
+ allocate_dynamic_stack_space (size_rtx,
+ NULL_RTX,
+ TYPE_ALIGN (type)));
}
else
{
if (GET_CODE (offset) == CONST_INT)
addr = plus_constant (arg_reg, INTVAL (offset));
else
- addr = gen_rtx (PLUS, Pmode, arg_reg, offset);
+ addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
addr = plus_constant (addr, arg_offset);
- args[i].stack = gen_rtx (MEM, args[i].mode, addr);
+ args[i].stack = gen_rtx_MEM (args[i].mode, addr);
MEM_IN_STRUCT_P (args[i].stack)
= AGGREGATE_TYPE_P (TREE_TYPE (args[i].tree_value));
if (GET_CODE (slot_offset) == CONST_INT)
addr = plus_constant (arg_reg, INTVAL (slot_offset));
else
- addr = gen_rtx (PLUS, Pmode, arg_reg, slot_offset);
+ addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
addr = plus_constant (addr, arg_offset);
- args[i].stack_slot = gen_rtx (MEM, args[i].mode, addr);
+ args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
}
}
BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
save_mode = BLKmode;
- stack_area = gen_rtx (MEM, save_mode,
- memory_address (save_mode,
+ stack_area = gen_rtx_MEM (save_mode,
+ memory_address (save_mode,
#ifdef ARGS_GROW_DOWNWARD
- plus_constant (argblock,
- - high_to_save)
+ plus_constant (argblock,
+ - high_to_save)
#else
- plus_constant (argblock,
- low_to_save)
+ plus_constant (argblock,
+ low_to_save)
#endif
- ));
+ ));
if (save_mode == BLKmode)
{
save_area = assign_stack_temp (BLKmode, num_to_save, 0);
else if (args[i].n_aligned_regs != 0)
for (j = 0; j < args[i].n_aligned_regs; j++)
- emit_move_insn (gen_rtx (REG, word_mode, REGNO (reg) + j),
+ emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
args[i].aligned_regs[j]);
else if (partial == 0 || args[i].pass_on_stack)
arguments in order as well as the function name. */
#ifdef PUSH_ARGS_REVERSED
for (i = 0; i < num_actuals; i++)
- note = gen_rtx (EXPR_LIST, VOIDmode, args[i].initial_value, note);
+ note = gen_rtx_EXPR_LIST (VOIDmode, args[i].initial_value, note);
#else
for (i = num_actuals - 1; i >= 0; i--)
- note = gen_rtx (EXPR_LIST, VOIDmode, args[i].initial_value, note);
+ note = gen_rtx_EXPR_LIST (VOIDmode, args[i].initial_value, note);
#endif
- note = gen_rtx (EXPR_LIST, VOIDmode, funexp, note);
+ note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
insns = get_insns ();
end_sequence ();
anything else. */
last = get_last_insn ();
REG_NOTES (last) =
- gen_rtx (EXPR_LIST, REG_NOALIAS, temp, REG_NOTES (last));
+ gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
/* Write out the sequence. */
insns = get_insns ();
{
if (target == 0 || GET_CODE (target) != MEM)
{
- target = gen_rtx (MEM, TYPE_MODE (TREE_TYPE (exp)),
- memory_address (TYPE_MODE (TREE_TYPE (exp)),
- structure_value_addr));
+ target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
+ memory_address (TYPE_MODE (TREE_TYPE (exp)),
+ structure_value_addr));
MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (TREE_TYPE (exp));
}
}
/* This is the special C++ case where we need to
know what the true target was. We take care to
never use this value more than once in one expression. */
- target = gen_rtx (MEM, TYPE_MODE (TREE_TYPE (exp)),
- copy_to_reg (valreg));
+ target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
+ copy_to_reg (valreg));
MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (TREE_TYPE (exp));
}
/* Handle calls that return values in multiple non-contiguous locations.
!= promote_mode (type, TYPE_MODE (type), &unsignedp, 1))
abort ();
- target = gen_rtx (SUBREG, TYPE_MODE (type), target, 0);
+ target = gen_rtx_SUBREG (TYPE_MODE (type), target, 0);
SUBREG_PROMOTED_VAR_P (target) = 1;
SUBREG_PROMOTED_UNSIGNED_P (target) = unsignedp;
}
{
enum machine_mode save_mode = GET_MODE (save_area);
rtx stack_area
- = gen_rtx (MEM, save_mode,
- memory_address (save_mode,
+ = gen_rtx_MEM (save_mode,
+ memory_address (save_mode,
#ifdef ARGS_GROW_DOWNWARD
- plus_constant (argblock, - high_to_save)
+ plus_constant (argblock,
+ - high_to_save)
#else
- plus_constant (argblock, low_to_save)
+ plus_constant (argblock,
+ low_to_save)
#endif
- ));
+ ));
if (save_mode != BLKmode)
emit_move_insn (stack_area, save_area);
{
enum machine_mode save_mode = GET_MODE (args[i].save_area);
rtx stack_area
- = gen_rtx (MEM, save_mode,
- memory_address (save_mode,
- XEXP (args[i].stack_slot, 0)));
+ = gen_rtx_MEM (save_mode,
+ memory_address (save_mode,
+ XEXP (args[i].stack_slot, 0)));
if (save_mode != BLKmode)
emit_move_insn (stack_area, args[i].save_area);
BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
save_mode = BLKmode;
- stack_area = gen_rtx (MEM, save_mode,
- memory_address (save_mode,
+ stack_area = gen_rtx_MEM (save_mode,
+ memory_address (save_mode,
#ifdef ARGS_GROW_DOWNWARD
- plus_constant (argblock,
- - high_to_save)
+ plus_constant (argblock,
+ - high_to_save)
#else
- plus_constant (argblock,
- low_to_save)
+ plus_constant (argblock,
+ low_to_save)
#endif
- ));
+ ));
if (save_mode == BLKmode)
{
save_area = assign_stack_temp (BLKmode, num_to_save, 0);
= mode_for_size (argvec[argnum].size.constant * BITS_PER_UNIT,
MODE_INT, 1);
rtx stack_area
- = gen_rtx (MEM, save_mode,
- memory_address (save_mode, plus_constant (argblock,
- argvec[argnum].offset.constant)));
+ = gen_rtx_MEM (save_mode,
+ memory_address (save_mode,
+ plus_constant (argblock, argvec[argnum].offset.constant)));
argvec[argnum].save_area = gen_reg_rtx (save_mode);
emit_move_insn (argvec[argnum].save_area, stack_area);
}
{
enum machine_mode save_mode = GET_MODE (save_area);
rtx stack_area
- = gen_rtx (MEM, save_mode,
- memory_address (save_mode,
+ = gen_rtx_MEM (save_mode,
+ memory_address (save_mode,
#ifdef ARGS_GROW_DOWNWARD
- plus_constant (argblock, - high_to_save)
+ plus_constant (argblock, - high_to_save)
#else
- plus_constant (argblock, low_to_save)
+ plus_constant (argblock, low_to_save)
#endif
- ));
+ ));
if (save_mode != BLKmode)
emit_move_insn (stack_area, save_area);
{
enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
rtx stack_area
- = gen_rtx (MEM, save_mode,
- memory_address (save_mode, plus_constant (argblock,
- argvec[count].offset.constant)));
+ = gen_rtx_MEM (save_mode,
+ memory_address (save_mode,
+ plus_constant (argblock, argvec[count].offset.constant)));
emit_move_insn (stack_area, argvec[count].save_area);
}
rtx pointer_reg
= hard_function_value (build_pointer_type (type_for_mode (outmode, 0)),
0);
- mem_value = gen_rtx (MEM, outmode, pointer_reg);
+ mem_value = gen_rtx_MEM (outmode, pointer_reg);
pcc_struct_value = 1;
if (value == 0)
value = gen_reg_rtx (outmode);
BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
save_mode = BLKmode;
- stack_area = gen_rtx (MEM, save_mode,
- memory_address (save_mode,
+ stack_area = gen_rtx_MEM (save_mode,
+ memory_address (save_mode,
#ifdef ARGS_GROW_DOWNWARD
- plus_constant (argblock,
- - high_to_save)
+ plus_constant (argblock,
+ - high_to_save)
#else
- plus_constant (argblock,
- low_to_save)
+ plus_constant (argblock,
+ low_to_save)
#endif
- ));
+ ));
if (save_mode == BLKmode)
{
save_area = assign_stack_temp (BLKmode, num_to_save, 0);
= mode_for_size (argvec[argnum].size.constant * BITS_PER_UNIT,
MODE_INT, 1);
rtx stack_area
- = gen_rtx (MEM, save_mode,
- memory_address (save_mode, plus_constant (argblock,
- argvec[argnum].offset.constant)));
+ = gen_rtx_MEM (save_mode,
+ memory_address (save_mode,
+ plus_constant (argblock,
+ argvec[argnum].offset.constant)));
argvec[argnum].save_area = gen_reg_rtx (save_mode);
emit_move_insn (argvec[argnum].save_area, stack_area);
}
{
enum machine_mode save_mode = GET_MODE (save_area);
rtx stack_area
- = gen_rtx (MEM, save_mode,
- memory_address (save_mode,
+ = gen_rtx_MEM (save_mode,
+ memory_address (save_mode,
#ifdef ARGS_GROW_DOWNWARD
- plus_constant (argblock, - high_to_save)
+ plus_constant (argblock, - high_to_save)
#else
- plus_constant (argblock, low_to_save)
+ plus_constant (argblock, low_to_save)
#endif
- ));
-
+ ));
if (save_mode != BLKmode)
emit_move_insn (stack_area, save_area);
else
{
enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
rtx stack_area
- = gen_rtx (MEM, save_mode,
+ = gen_rtx_MEM (save_mode,
memory_address (save_mode, plus_constant (argblock,
argvec[count].offset.constant)));
{
/* I have no idea how to guarantee that this
will work in the presence of register parameters. */
- target = gen_rtx (PLUS, Pmode, args_addr, offset_rtx);
+ target = gen_rtx_PLUS (Pmode, args_addr, offset_rtx);
target = memory_address (QImode, target);
}
- return gen_rtx (MEM, BLKmode, target);
+ return gen_rtx_MEM (BLKmode, target);
}
#endif
\f
enum machine_mode save_mode
= mode_for_size (arg->size.constant * BITS_PER_UNIT, MODE_INT, 1);
rtx stack_area
- = gen_rtx (MEM, save_mode,
- memory_address (save_mode, XEXP (arg->stack_slot, 0)));
+ = gen_rtx_MEM (save_mode,
+ memory_address (save_mode,
+ XEXP (arg->stack_slot, 0)));
if (save_mode == BLKmode)
{
for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
if (FUNCTION_ARG_REGNO_P (regno)
&& (reg = promoted_input_arg (regno, &mode, &unsignedp)) != 0)
- record_value_for_reg (reg, first,
- gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
- GET_MODE (reg),
- gen_rtx (CLOBBER, mode, const0_rtx)));
+ {
+ record_value_for_reg
+ (reg, first, gen_rtx_fmt_e ((unsignedp ? ZERO_EXTEND
+ : SIGN_EXTEND),
+ GET_MODE (reg),
+ gen_rtx_CLOBBER (mode, const0_rtx)));
+ }
#endif
}
\f
as I2 will not cause a problem. */
subst_prev_insn = i1
- = gen_rtx (INSN, VOIDmode, INSN_UID (i2), NULL_RTX, i2,
- XVECEXP (PATTERN (i2), 0, 1), -1, NULL_RTX, NULL_RTX);
+ = gen_rtx_INSN (VOIDmode, INSN_UID (i2), NULL_RTX, i2,
+ XVECEXP (PATTERN (i2), 0, 1), -1, NULL_RTX,
+ NULL_RTX);
SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
I2DEST. */
i2pat = (GET_CODE (PATTERN (i2)) == PARALLEL
- ? gen_rtx (SET, VOIDmode, i2dest, i2src)
+ ? gen_rtx_SET (VOIDmode, i2dest, i2src)
: PATTERN (i2));
if (added_sets_2)
!= GET_MODE (SET_DEST (newpat))))
{
int regno = REGNO (SET_DEST (newpat));
- rtx new_dest = gen_rtx (REG, compare_mode, regno);
+ rtx new_dest = gen_rtx_REG (compare_mode, regno);
if (regno < FIRST_PSEUDO_REGISTER
|| (REG_N_SETS (regno) == 1 && ! added_sets_2
{
rtvec old = XVEC (newpat, 0);
total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2;
- newpat = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (total_sets));
+ newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
bcopy ((char *) &old->elem[0], (char *) XVEC (newpat, 0)->elem,
sizeof (old->elem[0]) * old->num_elem);
}
{
rtx old = newpat;
total_sets = 1 + added_sets_1 + added_sets_2;
- newpat = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (total_sets));
+ newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
XVECEXP (newpat, 0, 0) = old;
}
if (added_sets_1)
XVECEXP (newpat, 0, --total_sets)
= (GET_CODE (PATTERN (i1)) == PARALLEL
- ? gen_rtx (SET, VOIDmode, i1dest, i1src) : PATTERN (i1));
+ ? gen_rtx_SET (VOIDmode, i1dest, i1src) : PATTERN (i1));
if (added_sets_2)
{
&& (REGNO (i2dest) < FIRST_PSEUDO_REGISTER
|| (REG_N_SETS (REGNO (i2dest)) == 1 && ! added_sets_2
&& ! REG_USERVAR_P (i2dest))))
- ni2dest = gen_rtx (REG, GET_MODE (SET_DEST (newpat)),
+ ni2dest = gen_rtx_REG (GET_MODE (SET_DEST (newpat)),
REGNO (i2dest));
- m_split = split_insns (gen_rtx (PARALLEL, VOIDmode,
- gen_rtvec (2, newpat,
- gen_rtx (CLOBBER,
- VOIDmode,
- ni2dest))),
- i3);
+ m_split = split_insns
+ (gen_rtx_PARALLEL (VOIDmode,
+ gen_rtvec (2, newpat,
+ gen_rtx_CLOBBER (VOIDmode,
+ ni2dest))),
+ i3);
}
if (m_split && GET_CODE (m_split) == SEQUENCE
validated that we can do this. */
if (GET_MODE (i2dest) != split_mode && split_mode != VOIDmode)
{
- newdest = gen_rtx (REG, split_mode, REGNO (i2dest));
+ newdest = gen_rtx_REG (split_mode, REGNO (i2dest));
if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
SUBST (regno_reg_rtx[REGNO (i2dest)], newdest);
that destination. */
PATTERN (i3) = newpat;
- distribute_links (gen_rtx (INSN_LIST, VOIDmode, i3, NULL_RTX));
+ distribute_links (gen_rtx_INSN_LIST (VOIDmode, i3, NULL_RTX));
/* I3 now uses what used to be its destination and which is
now I2's destination. That means we need a LOG_LINK from
REG_N_DEATHS (REGNO (i3dest_killed))++;
if (newi2pat && reg_set_p (i3dest_killed, newi2pat))
- distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i3dest_killed,
- NULL_RTX),
+ distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed,
+ NULL_RTX),
NULL_RTX, i2, NULL_RTX, elim_i2, elim_i1);
else
- distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i3dest_killed,
- NULL_RTX),
+ distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed,
+ NULL_RTX),
NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
elim_i2, elim_i1);
}
REG_N_DEATHS (REGNO (i2dest))++;
if (newi2pat && reg_set_p (i2dest, newi2pat))
- distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i2dest, NULL_RTX),
+ distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX),
NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
else
- distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i2dest, NULL_RTX),
+ distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX),
NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
NULL_RTX, NULL_RTX);
}
REG_N_DEATHS (REGNO (i1dest))++;
if (newi2pat && reg_set_p (i1dest, newi2pat))
- distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i1dest, NULL_RTX),
+ distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX),
NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
else
- distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i1dest, NULL_RTX),
+ distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX),
NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
NULL_RTX, NULL_RTX);
}
&& ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
{
rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
- rtx seq = split_insns (gen_rtx (SET, VOIDmode, reg, XEXP (x, 0)),
+ rtx seq = split_insns (gen_rtx_SET (VOIDmode, reg, XEXP (x, 0)),
subst_insn);
/* This should have produced two insns, each of which sets our
So force this insn not to match in this (rare) case. */
if (! in_dest && code == REG && GET_CODE (from) == REG
&& REGNO (x) == REGNO (from))
- return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
+ return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
/* If this is an object, we are done unless it is a MEM or LO_SUM, both
of which may contain things that can be combined. */
&& ! (code == SET && i == 1 && XEXP (x, 0) == cc0_rtx)
#endif
)
- return gen_rtx (CLOBBER, VOIDmode, const0_rtx);
+ return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
n_occurrences++;
gen_binary (reverse_condition (cond_code),
mode, cond, cop1));
else
- return gen_rtx (IF_THEN_ELSE, mode,
- gen_binary (cond_code, VOIDmode, cond, cop1),
- true, false);
+ return gen_rtx_IF_THEN_ELSE (mode,
+ gen_binary (cond_code, VOIDmode,
+ cond, cop1),
+ true, false);
code = GET_CODE (x);
op0_mode = VOIDmode;
if that would change the meaning of the address. */
if (MEM_VOLATILE_P (SUBREG_REG (x))
|| mode_dependent_address_p (XEXP (inner, 0)))
- return gen_rtx (CLOBBER, mode, const0_rtx);
+ return gen_rtx_CLOBBER (mode, const0_rtx);
if (BYTES_BIG_ENDIAN)
{
}
/* Note if the plus_constant doesn't make a valid address
then this combination won't be accepted. */
- x = gen_rtx (MEM, mode,
- plus_constant (XEXP (inner, 0),
- (SUBREG_WORD (x) * UNITS_PER_WORD
- + endian_offset)));
+ x = gen_rtx_MEM (mode,
+ plus_constant (XEXP (inner, 0),
+ (SUBREG_WORD (x) * UNITS_PER_WORD
+ + endian_offset)));
MEM_VOLATILE_P (x) = MEM_VOLATILE_P (inner);
RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (inner);
MEM_IN_STRUCT_P (x) = MEM_IN_STRUCT_P (inner);
{
if (HARD_REGNO_MODE_OK (REGNO (SUBREG_REG (x)) + SUBREG_WORD (x),
mode))
- return gen_rtx (REG, mode,
- REGNO (SUBREG_REG (x)) + SUBREG_WORD (x));
+ return gen_rtx_REG (mode,
+ REGNO (SUBREG_REG (x)) + SUBREG_WORD (x));
else
- return gen_rtx (CLOBBER, mode, const0_rtx);
+ return gen_rtx_CLOBBER (mode, const0_rtx);
}
/* For a constant, try to pick up the part we want. Handle a full
but this doesn't seem common enough to bother with. */
if (GET_CODE (XEXP (x, 0)) == ASHIFT
&& XEXP (XEXP (x, 0), 0) == const1_rtx)
- return gen_rtx (ROTATE, mode, gen_unary (NOT, mode, mode, const1_rtx),
- XEXP (XEXP (x, 0), 1));
+ return gen_rtx_ROTATE (mode, gen_unary (NOT, mode, mode, const1_rtx),
+ XEXP (XEXP (x, 0), 1));
if (GET_CODE (XEXP (x, 0)) == SUBREG
&& subreg_lowpart_p (XEXP (x, 0))
{
enum machine_mode inner_mode = GET_MODE (SUBREG_REG (XEXP (x, 0)));
- x = gen_rtx (ROTATE, inner_mode,
- gen_unary (NOT, inner_mode, inner_mode, const1_rtx),
- XEXP (SUBREG_REG (XEXP (x, 0)), 1));
+ x = gen_rtx_ROTATE (inner_mode,
+ gen_unary (NOT, inner_mode, inner_mode,
+ const1_rtx),
+ XEXP (SUBREG_REG (XEXP (x, 0)), 1));
return gen_lowpart_for_combine (mode, x);
}
if (compare_mode != GET_MODE (dest))
{
int regno = REGNO (dest);
- rtx new_dest = gen_rtx (REG, compare_mode, regno);
+ rtx new_dest = gen_rtx_REG (compare_mode, regno);
if (regno < FIRST_PSEUDO_REGISTER
|| (REG_N_SETS (regno) == 1 && ! REG_USERVAR_P (dest)))
&& GET_CODE (XEXP (op1, 1)) == CONST_INT
&& (INTVAL (XEXP (op0, 1)) + INTVAL (XEXP (op1, 1))
== GET_MODE_BITSIZE (mode)))
- return gen_rtx (ROTATE, mode, XEXP (op0, 0),
- (GET_CODE (op0) == ASHIFT
- ? XEXP (op0, 1) : XEXP (op1, 1)));
+ return gen_rtx_ROTATE (mode, XEXP (op0, 0),
+ (GET_CODE (op0) == ASHIFT
+ ? XEXP (op0, 1) : XEXP (op1, 1)));
/* If OP0 is (ashiftrt (plus ...) C), it might actually be
a (sign_extend (plus ...)). If so, OP1 is a CONST_INT, and the PLUS
with a (use (mem ...)) construct that only combine understands
and is used only for this purpose. */
if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
- SUBST (XEXP (x, 0), gen_rtx (USE, GET_MODE (x), XEXP (x, 0)));
+ SUBST (XEXP (x, 0), gen_rtx_USE (GET_MODE (x), XEXP (x, 0)));
if (BITS_BIG_ENDIAN)
pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos;
>> 1))
== 0))))
{
- rtx temp = gen_rtx (SIGN_EXTEND, GET_MODE (x), XEXP (x, 0));
+ rtx temp = gen_rtx_SIGN_EXTEND (GET_MODE (x), XEXP (x, 0));
if (rtx_cost (temp, SET) < rtx_cost (x, SET))
return expand_compound_operation (temp);
surround INNER with a USE to indicate this. */
if (GET_CODE (pos) == CONST_INT
&& INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner)))
- inner = gen_rtx (USE, GET_MODE (SET_DEST (x)), inner);
+ inner = gen_rtx_USE (GET_MODE (SET_DEST (x)), inner);
if (BITS_BIG_ENDIAN)
{
== ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
+ (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
{
- x = gen_rtx (SET, VOIDmode, SUBREG_REG (SET_DEST (x)),
- gen_lowpart_for_combine (GET_MODE (SUBREG_REG (SET_DEST (x))),
- SET_SRC (x)));
+ x = gen_rtx_SET (VOIDmode, SUBREG_REG (SET_DEST (x)),
+ gen_lowpart_for_combine (GET_MODE (SUBREG_REG (SET_DEST (x))),
+ SET_SRC (x)));
continue;
}
else
/* Now compute the equivalent expression. Make a copy of INNER
for the SET_DEST in case it is a MEM into which we will substitute;
we don't want shared RTL in that case. */
- x = gen_rtx (SET, VOIDmode, copy_rtx (inner),
- gen_binary (IOR, compute_mode,
- gen_binary (AND, compute_mode,
- gen_unary (NOT, compute_mode,
- compute_mode,
- gen_binary (ASHIFT,
- compute_mode,
- mask, pos)),
- inner),
- gen_binary (ASHIFT, compute_mode,
- gen_binary (AND, compute_mode,
- gen_lowpart_for_combine
- (compute_mode,
- SET_SRC (x)),
- mask),
- pos)));
+ x = gen_rtx_SET (VOIDmode, copy_rtx (inner),
+ gen_binary (IOR, compute_mode,
+ gen_binary (AND, compute_mode,
+ gen_unary (NOT, compute_mode,
+ compute_mode,
+ gen_binary (ASHIFT,
+ compute_mode,
+ mask, pos)),
+ inner),
+ gen_binary (ASHIFT, compute_mode,
+ gen_binary (AND, compute_mode,
+ gen_lowpart_for_combine
+ (compute_mode,
+ SET_SRC (x)),
+ mask),
+ pos)));
}
return x;
else
offset = pos / BITS_PER_UNIT;
- new = gen_rtx (MEM, tmode, plus_constant (XEXP (inner, 0), offset));
+ new = gen_rtx_MEM (tmode, plus_constant (XEXP (inner, 0), offset));
RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (inner);
MEM_VOLATILE_P (new) = MEM_VOLATILE_P (inner);
MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (inner);
/* We can't call gen_lowpart_for_combine here since we always want
a SUBREG and it would sometimes return a new hard register. */
if (tmode != inner_mode)
- new = gen_rtx (SUBREG, tmode, inner,
- (WORDS_BIG_ENDIAN
- && GET_MODE_SIZE (inner_mode) > UNITS_PER_WORD
- ? (((GET_MODE_SIZE (inner_mode)
- - GET_MODE_SIZE (tmode))
- / UNITS_PER_WORD)
- - pos / BITS_PER_WORD)
- : pos / BITS_PER_WORD));
+ new = gen_rtx_SUBREG (tmode, inner,
+ (WORDS_BIG_ENDIAN
+ && GET_MODE_SIZE (inner_mode) > UNITS_PER_WORD
+ ? (((GET_MODE_SIZE (inner_mode)
+ - GET_MODE_SIZE (tmode))
+ / UNITS_PER_WORD)
+ - pos / BITS_PER_WORD)
+ : pos / BITS_PER_WORD));
else
new = inner;
}
if (in_dest)
return (GET_CODE (new) == MEM ? new
: (GET_CODE (new) != SUBREG
- ? gen_rtx (CLOBBER, tmode, const0_rtx)
+ ? gen_rtx_CLOBBER (tmode, const0_rtx)
: gen_rtx_combine (STRICT_LOW_PART, VOIDmode, new)));
/* Otherwise, sign- or zero-extend unless we already are in the
if (offset != 0 || inner_mode != wanted_inner_mode)
{
- rtx newmem = gen_rtx (MEM, wanted_inner_mode,
- plus_constant (XEXP (inner, 0), offset));
+ rtx newmem = gen_rtx_MEM (wanted_inner_mode,
+ plus_constant (XEXP (inner, 0), offset));
RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (inner);
MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (inner);
MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (inner);
{
/* Apply the distributive law, and then try to make extractions. */
new = gen_rtx_combine (GET_CODE (XEXP (x, 0)), mode,
- gen_rtx (AND, mode, XEXP (XEXP (x, 0), 0),
- XEXP (x, 1)),
- gen_rtx (AND, mode, XEXP (XEXP (x, 0), 1),
- XEXP (x, 1)));
+ gen_rtx_AND (mode, XEXP (XEXP (x, 0), 0),
+ XEXP (x, 1)),
+ gen_rtx_AND (mode, XEXP (XEXP (x, 0), 1),
+ XEXP (x, 1)));
new = make_compound_operation (new, in_code);
}
assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
1, 1, 1, 0);
if (assign != 0)
- return gen_rtx (SET, VOIDmode, assign, const0_rtx);
+ return gen_rtx_SET (VOIDmode, assign, const0_rtx);
return x;
}
XEXP (SUBREG_REG (XEXP (src, 0)), 1),
1, 1, 1, 0);
if (assign != 0)
- return gen_rtx (SET, VOIDmode, assign, const0_rtx);
+ return gen_rtx_SET (VOIDmode, assign, const0_rtx);
return x;
}
assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
1, 1, 1, 0);
if (assign != 0)
- return gen_rtx (SET, VOIDmode, assign, const1_rtx);
+ return gen_rtx_SET (VOIDmode, assign, const1_rtx);
return x;
}
if (x)
return x;
- return gen_rtx (code, mode, varop, GEN_INT (count));
+ return gen_rtx_fmt_ee (code, mode, varop, GEN_INT (count));
}
/* Unless one of the branches of the `if' in this loop does a `continue',
MODE_INT, 1)) != BLKmode)
{
if (BYTES_BIG_ENDIAN)
- new = gen_rtx (MEM, tmode, XEXP (varop, 0));
+ new = gen_rtx_MEM (tmode, XEXP (varop, 0));
else
- new = gen_rtx (MEM, tmode,
- plus_constant (XEXP (varop, 0),
- count / BITS_PER_UNIT));
+ new = gen_rtx_MEM (tmode,
+ plus_constant (XEXP (varop, 0),
+ count / BITS_PER_UNIT));
RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (varop);
MEM_VOLATILE_P (new) = MEM_VOLATILE_P (varop);
MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (varop);
them. Then check to make sure that all of them are dead. */
if (num_clobbers_to_add)
{
- rtx newpat = gen_rtx (PARALLEL, VOIDmode,
- gen_rtvec (GET_CODE (pat) == PARALLEL
- ? XVECLEN (pat, 0) + num_clobbers_to_add
- : num_clobbers_to_add + 1));
+ rtx newpat = gen_rtx_PARALLEL (VOIDmode,
+ gen_rtvec (GET_CODE (pat) == PARALLEL
+ ? XVECLEN (pat, 0) + num_clobbers_to_add
+ : num_clobbers_to_add + 1));
if (GET_CODE (pat) == PARALLEL)
for (i = 0; i < XVECLEN (pat, 0); i++)
return -1;
else if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == SCRATCH)
(*padded_scratches)++;
- notes = gen_rtx (EXPR_LIST, REG_UNUSED,
- XEXP (XVECEXP (newpat, 0, i), 0), notes);
+ notes = gen_rtx_EXPR_LIST (REG_UNUSED,
+ XEXP (XVECEXP (newpat, 0, i), 0), notes);
}
pat = newpat;
}
&& (GET_CODE (x) == CONST_INT
|| GET_CODE (x) == CONST_DOUBLE))
|| GET_MODE_SIZE (GET_MODE (x)) == GET_MODE_SIZE (mode)))
- return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
+ return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
/* X might be a paradoxical (subreg (mem)). In that case, gen_lowpart
won't know what to do. So we will strip off the SUBREG here and
/* Refuse to work on a volatile memory ref or one with a mode-dependent
address. */
if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0)))
- return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
+ return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
/* If we want to refer to something bigger than the original memref,
generate a perverse subreg instead. That will force a reload
of the original memref X. */
if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode))
- return gen_rtx (SUBREG, mode, x, 0);
+ return gen_rtx_SUBREG (mode, x, 0);
if (WORDS_BIG_ENDIAN)
offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
- MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
}
- new = gen_rtx (MEM, mode, plus_constant (XEXP (x, 0), offset));
+ new = gen_rtx_MEM (mode, plus_constant (XEXP (x, 0), offset));
RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
word = ((GET_MODE_SIZE (GET_MODE (x))
- MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD))
/ UNITS_PER_WORD);
- return gen_rtx (SUBREG, mode, x, word);
+ return gen_rtx_SUBREG (mode, x, word);
}
}
\f
&& reg_last_set_label[j] > tick))
{
if (replace)
- *loc = gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
+ *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
return replace;
}
&& INSN_CUID (insn) <= mem_last_set)
{
if (replace)
- *loc = gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
+ *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
return replace;
}
if (reg_mentioned_p (x, value))
value = replace_rtx (copy_rtx (value), x,
- gen_rtx (CLOBBER, GET_MODE (x), const0_rtx));
+ gen_rtx_CLOBBER (GET_MODE (x), const0_rtx));
if (reg_overlap_mentioned_p (x, value))
return 0;
for (i = deadregno; i < deadend; i++)
if (i < regno || i >= ourend)
REG_NOTES (where_dead)
- = gen_rtx (EXPR_LIST, REG_DEAD,
- gen_rtx (REG, reg_raw_mode[i], i),
- REG_NOTES (where_dead));
+ = gen_rtx_EXPR_LIST (REG_DEAD,
+ gen_rtx_REG (reg_raw_mode[i], i),
+ REG_NOTES (where_dead));
}
/* If we didn't find any note, or if we found a REG_DEAD note that
covers only part of the given reg, and we have a multi-reg hard
offset = 1;
for (i = regno + offset; i < ourend; i++)
- move_deaths (gen_rtx (REG, reg_raw_mode[i], i),
+ move_deaths (gen_rtx_REG (reg_raw_mode[i], i),
maybe_kill_insn, from_cuid, to_insn, &oldnotes);
}
*pnotes = note;
}
else
- *pnotes = gen_rtx (EXPR_LIST, REG_DEAD, x, *pnotes);
+ *pnotes = gen_rtx_EXPR_LIST (REG_DEAD, x, *pnotes);
REG_N_DEATHS (regno)++;
}
if (REG_NOTE_KIND (note) == REG_DEAD && place == 0 && tem != 0)
{
place
- = emit_insn_after (gen_rtx (USE, VOIDmode, XEXP (note, 0)),
+ = emit_insn_after (gen_rtx_USE (VOIDmode, XEXP (note, 0)),
tem);
/* If this insn was emitted between blocks, then update
if (! refers_to_regno_p (i, i + 1, PATTERN (place), 0)
&& ! find_regno_fusage (place, USE, i))
{
- rtx piece = gen_rtx (REG, reg_raw_mode[i], i);
+ rtx piece = gen_rtx_REG (reg_raw_mode[i], i);
rtx p;
/* See if we already placed a USE note for this
if (p)
{
rtx use_insn
- = emit_insn_before (gen_rtx (USE, VOIDmode,
- piece),
+ = emit_insn_before (gen_rtx_USE (VOIDmode,
+ piece),
p);
REG_NOTES (use_insn)
- = gen_rtx (EXPR_LIST, REG_DEAD, piece,
- REG_NOTES (use_insn));
+ = gen_rtx_EXPR_LIST (REG_DEAD, piece,
+ REG_NOTES (use_insn));
}
all_used = 0;
for (i = regno; i < endregno; i++)
{
- rtx piece = gen_rtx (REG, reg_raw_mode[i], i);
+ rtx piece = gen_rtx_REG (reg_raw_mode[i], i);
if ((reg_referenced_p (piece, PATTERN (place))
|| (GET_CODE (place) == CALL_INSN
&& ! dead_or_set_p (place, piece)
&& ! reg_bitfield_target_p (piece,
PATTERN (place)))
- REG_NOTES (place) = gen_rtx (EXPR_LIST, REG_DEAD,
- piece,
- REG_NOTES (place));
+ REG_NOTES (place)
+ = gen_rtx_EXPR_LIST (REG_DEAD,
+ piece, REG_NOTES (place));
}
place = 0;
&& GET_CODE (XEXP (note, 0)) == REG)
REG_N_DEATHS (REGNO (XEXP (note, 0)))++;
- REG_NOTES (place2) = gen_rtx (GET_CODE (note), REG_NOTE_KIND (note),
- XEXP (note, 0), REG_NOTES (place2));
+ REG_NOTES (place2) = gen_rtx_fmt_ee (GET_CODE (note),
+ REG_NOTE_KIND (note),
+ XEXP (note, 0),
+ REG_NOTES (place2));
}
}
}
if (GET_CODE (base) == PLUS)
offset += INTVAL (XEXP (base, 1)), base = XEXP (base, 0);
- *paligned_mem = gen_rtx (MEM, SImode,
+ *paligned_mem = gen_rtx_MEM (SImode,
plus_constant (base, offset & ~3));
MEM_IN_STRUCT_P (*paligned_mem) = MEM_IN_STRUCT_P (ref);
MEM_VOLATILE_P (*paligned_mem) = MEM_VOLATILE_P (ref);
if (target == NULL)
target = gen_reg_rtx (mode);
- emit_insn (gen_rtx (SET, VOIDmode, target, GEN_INT (c)));
+ emit_insn (gen_rtx_SET (VOIDmode, target, GEN_INT (c)));
return target;
}
else if (n >= 2 + (extra != 0))
This avoids emitting spurious compares. */
if (signed_comparison_operator (cmp, cmp_op_mode)
&& (op0 == CONST0_RTX (cmp_mode) || op1 == CONST0_RTX (cmp_mode)))
- return gen_rtx (code, VOIDmode, op0, op1);
+ return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
/* We can't put the comparison insides a conditional move;
emit a compare instruction and put that inside the
}
tem = gen_reg_rtx (cmp_op_mode);
- emit_move_insn (tem, gen_rtx (code, cmp_op_mode, op0, op1));
- return gen_rtx (cmov_code, VOIDmode, tem, CONST0_RTX (cmp_op_mode));
+ emit_move_insn (tem, gen_rtx_fmt_ee (code, cmp_op_mode, op0, op1));
+ return gen_rtx_fmt_ee (cmov_code, VOIDmode, tem, CONST0_RTX (cmp_op_mode));
}
\f
/* Use ext[wlq][lh] as the Architecture Handbook describes for extracting
emit_move_insn (meml,
change_address (mem, DImode,
- gen_rtx (AND, DImode,
- plus_constant (XEXP (mem, 0), ofs),
- GEN_INT (-8))));
+ gen_rtx_AND (DImode,
+ plus_constant (XEXP (mem, 0),
+ ofs),
+ GEN_INT (-8))));
emit_move_insn (memh,
change_address (mem, DImode,
- gen_rtx (AND, DImode,
- plus_constant (XEXP (mem, 0),
- ofs + size - 1),
- GEN_INT (-8))));
+ gen_rtx_AND (DImode,
+ plus_constant (XEXP (mem, 0),
+ ofs + size - 1),
+ GEN_INT (-8))));
if (sign && size == 2)
{
insh = gen_reg_rtx (DImode);
meml = change_address (dst, DImode,
- gen_rtx (AND, DImode,
- plus_constant (XEXP (dst, 0), ofs),
- GEN_INT (-8)));
+ gen_rtx_AND (DImode,
+ plus_constant (XEXP (dst, 0), ofs),
+ GEN_INT (-8)));
memh = change_address (dst, DImode,
- gen_rtx (AND, DImode,
- plus_constant (XEXP (dst, 0), ofs+size-1),
- GEN_INT (-8)));
+ gen_rtx_AND (DImode,
+ plus_constant (XEXP (dst, 0),
+ ofs+size-1),
+ GEN_INT (-8)));
emit_move_insn (dsth, memh);
emit_move_insn (dstl, meml);
{
emit_move_insn (data_regs[i],
change_address (src_addr, DImode,
- gen_rtx (AND, DImode,
- plus_constant (XEXP(src_addr,0),
- 8*i),
- im8)));
+ gen_rtx_AND (DImode,
+ plus_constant (XEXP(src_addr,0),
+ 8*i),
+ im8)));
}
emit_move_insn (data_regs[words],
change_address (src_addr, DImode,
- gen_rtx (AND, DImode,
- plus_constant (XEXP(src_addr,0),
- 8*words - 1),
- im8)));
+ gen_rtx_AND (DImode,
+ plus_constant (XEXP(src_addr,0),
+ 8*words - 1),
+ im8)));
/* Extract the half-word fragments. Unfortunately DEC decided to make
extxh with offset zero a noop instead of zeroing the register, so
emit_insn (gen_extxl (data_regs[i], data_regs[i], i64, src_reg));
emit_insn (gen_extqh (ext_tmps[i], data_regs[i+1], src_reg));
- emit_insn (gen_rtx (SET, VOIDmode, ext_tmps[i],
- gen_rtx (IF_THEN_ELSE, DImode,
- gen_rtx (EQ, DImode, and_reg, const0_rtx),
- const0_rtx, ext_tmps[i])));
+ emit_insn (gen_rtx_SET (VOIDmode, ext_tmps[i],
+ gen_rtx_IF_THEN_ELSE (DImode,
+ gen_rtx_EQ (DImode, and_reg, const0_rtx),
+ const0_rtx, ext_tmps[i])));
}
/* Merge the half-words into whole words. */
st_tmp_2 = gen_reg_rtx(DImode);
st_addr_2 = change_address (dst_addr, DImode,
- gen_rtx (AND, DImode,
- plus_constant (XEXP(dst_addr,0),
- words*8 - 1),
+ gen_rtx_AND (DImode,
+ plus_constant (XEXP(dst_addr,0),
+ words*8 - 1),
im8));
st_addr_1 = change_address (dst_addr, DImode,
- gen_rtx (AND, DImode,
- XEXP (dst_addr, 0),
- im8));
+ gen_rtx_AND (DImode,
+ XEXP (dst_addr, 0),
+ im8));
/* Load up the destination end bits. */
emit_move_insn (st_tmp_2, st_addr_2);
for (i = words-1; i > 0; --i)
{
emit_move_insn (change_address (dst_addr, DImode,
- gen_rtx (AND, DImode,
- plus_constant(XEXP (dst_addr,0),
- i*8),
+ gen_rtx_AND (DImode,
+ plus_constant(XEXP (dst_addr,0),
+ i*8),
im8)),
data_regs ? ins_tmps[i-1] : const0_rtx);
}
/* No rtx yet. Invent one, and initialize it from $26 in the prologue. */
alpha_return_addr_rtx = gen_reg_rtx (Pmode);
- init = gen_rtx (SET, Pmode, alpha_return_addr_rtx, gen_rtx (REG, Pmode, 26));
+ init = gen_rtx_SET (Pmode, alpha_return_addr_rtx,
+ gen_rtx_REG (Pmode, REG_RA));
/* Emit the insn to the prologue with the other argument copies. */
push_topmost_sequence ();
if (!alpha_return_addr_rtx)
return regs_ever_live[REG_RA];
- return reg_set_between_p (gen_rtx (REG, REG_RA), get_insns(), NULL_RTX);
+ return reg_set_between_p (gen_rtx_REG (Pmode, REG_RA),
+ get_insns(), NULL_RTX);
}
\f
(match_operand:SI 2 "add_operand" "")))]
""
"
-{ emit_insn (gen_rtx (SET, VOIDmode, gen_lowpart (DImode, operands[0]),
- gen_rtx (PLUS, DImode,
- gen_lowpart (DImode, operands[1]),
- gen_lowpart (DImode, operands[2]))));
+{ emit_insn (gen_rtx_SET (VOIDmode, gen_lowpart (DImode, operands[0]),
+ gen_rtx_PLUS (DImode,
+ gen_lowpart (DImode, operands[1]),
+ gen_lowpart (DImode, operands[2]))));
DONE;
} ")
(set (match_dup 0) (sign_extend:DI (plus:SI (match_dup 7) (match_dup 4))))]
"
{
- operands[6] = gen_rtx (GET_CODE (operands[1]), DImode,
- operands[2], operands[3]);
+ operands[6] = gen_rtx_fmt_ee (GET_CODE (operands[1]), DImode,
+ operands[2], operands[3]);
operands[7] = gen_lowpart (SImode, operands[5]);
}")
(match_dup 5))))]
"
{
- operands[7] = gen_rtx (GET_CODE (operands[1]), DImode,
- operands[2], operands[3]);
+ operands[7] = gen_rtx_fmt_ee (GET_CODE (operands[1]), DImode,
+ operands[2], operands[3]);
operands[8] = gen_lowpart (SImode, operands[6]);
}")
(match_operand:SI 2 "reg_or_8bit_operand" "")))]
""
"
-{ emit_insn (gen_rtx (SET, VOIDmode, gen_lowpart (DImode, operands[0]),
- gen_rtx (MINUS, DImode,
- gen_lowpart (DImode, operands[1]),
- gen_lowpart (DImode, operands[2]))));
+{ emit_insn (gen_rtx_SET (VOIDmode, gen_lowpart (DImode, operands[0]),
+ gen_rtx_MINUS (DImode,
+ gen_lowpart (DImode, operands[1]),
+ gen_lowpart (DImode, operands[2]))));
DONE;
-
} ")
(define_insn ""
}
operands[1] = gen_reg_rtx (mode);
- operands[2] = gen_rtx (compare_code, mode,
- alpha_compare_op0, alpha_compare_op1);
- operands[3] = gen_rtx (branch_code, VOIDmode,
- operands[1], CONST0_RTX (mode));
+ operands[2] = gen_rtx_fmt_ee (compare_code, mode,
+ alpha_compare_op0, alpha_compare_op1);
+ operands[3] = gen_rtx_fmt_ee (branch_code, VOIDmode,
+ operands[1], CONST0_RTX (mode));
}")
(define_expand "bne"
}
operands[1] = gen_reg_rtx (mode);
- operands[2] = gen_rtx (compare_code, mode,
- alpha_compare_op0, alpha_compare_op1);
- operands[3] = gen_rtx (branch_code, VOIDmode,
- operands[1], CONST0_RTX (mode));
+ operands[2] = gen_rtx_fmt_ee (compare_code, mode,
+ alpha_compare_op0, alpha_compare_op1);
+ operands[3] = gen_rtx_fmt_ee (branch_code, VOIDmode,
+ operands[1], CONST0_RTX (mode));
}")
(define_expand "blt"
{
enum machine_mode mode = alpha_compare_fp_p ? DFmode : DImode;
operands[1] = gen_reg_rtx (mode);
- operands[2] = gen_rtx (LT, mode, alpha_compare_op0, alpha_compare_op1);
- operands[3] = gen_rtx (NE, VOIDmode, operands[1], CONST0_RTX (mode));
+ operands[2] = gen_rtx_LT (mode, alpha_compare_op0, alpha_compare_op1);
+ operands[3] = gen_rtx_NE (VOIDmode, operands[1], CONST0_RTX (mode));
}")
(define_expand "ble"
{
enum machine_mode mode = alpha_compare_fp_p ? DFmode : DImode;
operands[1] = gen_reg_rtx (mode);
- operands[2] = gen_rtx (LE, mode, alpha_compare_op0, alpha_compare_op1);
- operands[3] = gen_rtx (NE, VOIDmode, operands[1], CONST0_RTX (mode));
+ operands[2] = gen_rtx_LE (mode, alpha_compare_op0, alpha_compare_op1);
+ operands[3] = gen_rtx_NE (VOIDmode, operands[1], CONST0_RTX (mode));
}")
(define_expand "bgt"
if (alpha_compare_fp_p)
{
operands[1] = gen_reg_rtx (DFmode);
- operands[2] = gen_rtx (LT, DFmode, alpha_compare_op1, alpha_compare_op0);
- operands[3] = gen_rtx (NE, VOIDmode, operands[1], CONST0_RTX (DFmode));
+ operands[2] = gen_rtx_LT (DFmode, alpha_compare_op1, alpha_compare_op0);
+ operands[3] = gen_rtx_NE (VOIDmode, operands[1], CONST0_RTX (DFmode));
}
else
{
operands[1] = gen_reg_rtx (DImode);
- operands[2] = gen_rtx (LE, DImode, alpha_compare_op0, alpha_compare_op1);
- operands[3] = gen_rtx (EQ, VOIDmode, operands[1], const0_rtx);
+ operands[2] = gen_rtx_LE (DImode, alpha_compare_op0, alpha_compare_op1);
+ operands[3] = gen_rtx_EQ (VOIDmode, operands[1], const0_rtx);
}
}")
if (alpha_compare_fp_p)
{
operands[1] = gen_reg_rtx (DFmode);
- operands[2] = gen_rtx (LE, DFmode, alpha_compare_op1, alpha_compare_op0);
- operands[3] = gen_rtx (NE, VOIDmode, operands[1], CONST0_RTX (DFmode));
+ operands[2] = gen_rtx_LE (DFmode, alpha_compare_op1, alpha_compare_op0);
+ operands[3] = gen_rtx_NE (VOIDmode, operands[1], CONST0_RTX (DFmode));
}
else
{
operands[1] = gen_reg_rtx (DImode);
- operands[2] = gen_rtx (LT, DImode, alpha_compare_op0, alpha_compare_op1);
- operands[3] = gen_rtx (EQ, VOIDmode, operands[1], const0_rtx);
+ operands[2] = gen_rtx_LT (DImode, alpha_compare_op0, alpha_compare_op1);
+ operands[3] = gen_rtx_EQ (VOIDmode, operands[1], const0_rtx);
}
}")
"
{
operands[1] = gen_reg_rtx (DImode);
- operands[2] = gen_rtx (LTU, DImode, alpha_compare_op0, alpha_compare_op1);
- operands[3] = gen_rtx (NE, VOIDmode, operands[1], const0_rtx);
+ operands[2] = gen_rtx_LTU (DImode, alpha_compare_op0, alpha_compare_op1);
+ operands[3] = gen_rtx_NE (VOIDmode, operands[1], const0_rtx);
}")
(define_expand "bleu"
"
{
operands[1] = gen_reg_rtx (DImode);
- operands[2] = gen_rtx (LEU, DImode, alpha_compare_op0, alpha_compare_op1);
- operands[3] = gen_rtx (NE, VOIDmode, operands[1], const0_rtx);
+ operands[2] = gen_rtx_LEU (DImode, alpha_compare_op0, alpha_compare_op1);
+ operands[3] = gen_rtx_NE (VOIDmode, operands[1], const0_rtx);
}")
(define_expand "bgtu"
"
{
operands[1] = gen_reg_rtx (DImode);
- operands[2] = gen_rtx (LEU, DImode, alpha_compare_op0, alpha_compare_op1);
- operands[3] = gen_rtx (EQ, VOIDmode, operands[1], const0_rtx);
+ operands[2] = gen_rtx_LEU (DImode, alpha_compare_op0, alpha_compare_op1);
+ operands[3] = gen_rtx_EQ (VOIDmode, operands[1], const0_rtx);
}")
(define_expand "bgeu"
"
{
operands[1] = gen_reg_rtx (DImode);
- operands[2] = gen_rtx (LTU, DImode, alpha_compare_op0, alpha_compare_op1);
- operands[3] = gen_rtx (EQ, VOIDmode, operands[1], const0_rtx);
+ operands[2] = gen_rtx_LTU (DImode, alpha_compare_op0, alpha_compare_op1);
+ operands[3] = gen_rtx_EQ (VOIDmode, operands[1], const0_rtx);
}")
(define_expand "seq"
if (alpha_compare_fp_p)
FAIL;
- operands[1] = gen_rtx (EQ, DImode, alpha_compare_op0, alpha_compare_op1);
+ operands[1] = gen_rtx_EQ (DImode, alpha_compare_op0, alpha_compare_op1);
}")
(define_expand "sne"
if (alpha_compare_fp_p)
FAIL;
- operands[1] = gen_rtx (EQ, DImode, alpha_compare_op0, alpha_compare_op1);
+ operands[1] = gen_rtx_EQ (DImode, alpha_compare_op0, alpha_compare_op1);
}")
(define_expand "slt"
if (alpha_compare_fp_p)
FAIL;
- operands[1] = gen_rtx (LT, DImode, alpha_compare_op0, alpha_compare_op1);
+ operands[1] = gen_rtx_LT (DImode, alpha_compare_op0, alpha_compare_op1);
}")
(define_expand "sle"
if (alpha_compare_fp_p)
FAIL;
- operands[1] = gen_rtx (LE, DImode, alpha_compare_op0, alpha_compare_op1);
+ operands[1] = gen_rtx_LE (DImode, alpha_compare_op0, alpha_compare_op1);
}")
(define_expand "sgt"
if (alpha_compare_fp_p)
FAIL;
- operands[1] = gen_rtx (LT, DImode, force_reg (DImode, alpha_compare_op1),
- alpha_compare_op0);
+ operands[1] = gen_rtx_LT (DImode, force_reg (DImode, alpha_compare_op1),
+ alpha_compare_op0);
}")
(define_expand "sge"
if (alpha_compare_fp_p)
FAIL;
- operands[1] = gen_rtx (LE, DImode, force_reg (DImode, alpha_compare_op1),
- alpha_compare_op0);
+ operands[1] = gen_rtx_LE (DImode, force_reg (DImode, alpha_compare_op1),
+ alpha_compare_op0);
}")
(define_expand "sltu"
if (alpha_compare_fp_p)
FAIL;
- operands[1] = gen_rtx (LTU, DImode, alpha_compare_op0, alpha_compare_op1);
+ operands[1] = gen_rtx_LTU (DImode, alpha_compare_op0, alpha_compare_op1);
}")
(define_expand "sleu"
if (alpha_compare_fp_p)
FAIL;
- operands[1] = gen_rtx (LEU, DImode, alpha_compare_op0, alpha_compare_op1);
+ operands[1] = gen_rtx_LEU (DImode, alpha_compare_op0, alpha_compare_op1);
}")
(define_expand "sgtu"
if (alpha_compare_fp_p)
FAIL;
- operands[1] = gen_rtx (LTU, DImode, force_reg (DImode, alpha_compare_op1),
- alpha_compare_op0);
+ operands[1] = gen_rtx_LTU (DImode, force_reg (DImode, alpha_compare_op1),
+ alpha_compare_op0);
}")
(define_expand "sgeu"
if (alpha_compare_fp_p)
FAIL;
- operands[1] = gen_rtx (LEU, DImode, force_reg (DImode, alpha_compare_op1),
- alpha_compare_op0);
+ operands[1] = gen_rtx_LEU (DImode, force_reg (DImode, alpha_compare_op1),
+ alpha_compare_op0);
}")
\f
;; These are the main define_expand's used to make conditional moves.
&& extended_count (operands[3], DImode, unsignedp) >= 1))
{
if (GET_CODE (operands[3]) == CONST_INT)
- operands[7] = gen_rtx (PLUS, DImode, operands[2],
- GEN_INT (- INTVAL (operands[3])));
+ operands[7] = gen_rtx_PLUS (DImode, operands[2],
+ GEN_INT (- INTVAL (operands[3])));
else
- operands[7] = gen_rtx (MINUS, DImode, operands[2], operands[3]);
+ operands[7] = gen_rtx_MINUS (DImode, operands[2], operands[3]);
- operands[8] = gen_rtx (code, VOIDmode, operands[6], const0_rtx);
+ operands[8] = gen_rtx_fmt_ee (code, VOIDmode, operands[6], const0_rtx);
}
else if (code == EQ || code == LE || code == LT
|| code == LEU || code == LTU)
{
- operands[7] = gen_rtx (code, DImode, operands[2], operands[3]);
- operands[8] = gen_rtx (NE, VOIDmode, operands[6], const0_rtx);
+ operands[7] = gen_rtx_fmt_ee (code, DImode, operands[2], operands[3]);
+ operands[8] = gen_rtx_NE (VOIDmode, operands[6], const0_rtx);
}
else
{
- operands[7] = gen_rtx (reverse_condition (code), DImode, operands[2],
- operands[3]);
- operands[8] = gen_rtx (EQ, VOIDmode, operands[6], const0_rtx);
+ operands[7] = gen_rtx_fmt_ee (reverse_condition (code), DImode,
+ operands[2], operands[3]);
+ operands[8] = gen_rtx_EQ (VOIDmode, operands[6], const0_rtx);
}
}")
FAIL;
if (GET_CODE (operands[3]) == CONST_INT)
- tem = gen_rtx (PLUS, SImode, operands[2],
- GEN_INT (- INTVAL (operands[3])));
+ tem = gen_rtx_PLUS (SImode, operands[2],
+ GEN_INT (- INTVAL (operands[3])));
else
- tem = gen_rtx (MINUS, SImode, operands[2], operands[3]);
+ tem = gen_rtx_MINUS (SImode, operands[2], operands[3]);
- operands[7] = gen_rtx (SIGN_EXTEND, DImode, tem);
- operands[8] = gen_rtx (GET_CODE (operands[1]), VOIDmode, operands[6],
- const0_rtx);
+ operands[7] = gen_rtx_SIGN_EXTEND (DImode, tem);
+ operands[8] = gen_rtx_fmt_ee (GET_CODE (operands[1]), VOIDmode,
+ operands[6], const0_rtx);
}")
(define_split
&& extended_count (operands[3], DImode, unsignedp) >= 1))
{
if (GET_CODE (operands[3]) == CONST_INT)
- operands[5] = gen_rtx (PLUS, DImode, operands[2],
- GEN_INT (- INTVAL (operands[3])));
+ operands[5] = gen_rtx_PLUS (DImode, operands[2],
+ GEN_INT (- INTVAL (operands[3])));
else
- operands[5] = gen_rtx (MINUS, DImode, operands[2], operands[3]);
+ operands[5] = gen_rtx_MINUS (DImode, operands[2], operands[3]);
- operands[6] = gen_rtx (code, VOIDmode, operands[4], const0_rtx);
+ operands[6] = gen_rtx_fmt_ee (code, VOIDmode, operands[4], const0_rtx);
}
else if (code == EQ || code == LE || code == LT
|| code == LEU || code == LTU)
{
- operands[5] = gen_rtx (code, DImode, operands[2], operands[3]);
- operands[6] = gen_rtx (NE, VOIDmode, operands[4], const0_rtx);
+ operands[5] = gen_rtx_fmt_ee (code, DImode, operands[2], operands[3]);
+ operands[6] = gen_rtx_NE (VOIDmode, operands[4], const0_rtx);
}
else
{
- operands[5] = gen_rtx (reverse_condition (code), DImode, operands[2],
- operands[3]);
- operands[6] = gen_rtx (EQ, VOIDmode, operands[4], const0_rtx);
+ operands[5] = gen_rtx_fmt_ee (reverse_condition (code), DImode,
+ operands[2], operands[3]);
+ operands[6] = gen_rtx_EQ (VOIDmode, operands[4], const0_rtx);
}
}")
{ rtx tem;
if (GET_CODE (operands[3]) == CONST_INT)
- tem = gen_rtx (PLUS, SImode, operands[2],
- GEN_INT (- INTVAL (operands[3])));
+ tem = gen_rtx_PLUS (SImode, operands[2],
+ GEN_INT (- INTVAL (operands[3])));
else
- tem = gen_rtx (MINUS, SImode, operands[2], operands[3]);
+ tem = gen_rtx_MINUS (SImode, operands[2], operands[3]);
- operands[5] = gen_rtx (SIGN_EXTEND, DImode, tem);
- operands[6] = gen_rtx (GET_CODE (operands[1]), VOIDmode,
- operands[4], const0_rtx);
+ operands[5] = gen_rtx_SIGN_EXTEND (DImode, tem);
+ operands[6] = gen_rtx_fmt_ee (GET_CODE (operands[1]), VOIDmode,
+ operands[4], const0_rtx);
}")
;; We can convert such things as "a > 0xffff" to "t = a & ~ 0xffff; t != 0".
"
{
operands[5] = GEN_INT (~ INTVAL (operands[3]));
- operands[6] = gen_rtx (((GET_CODE (operands[1]) == GTU
- || GET_CODE (operands[1]) == GT)
- ? NE : EQ),
- DImode, operands[4], const0_rtx);
+ operands[6] = gen_rtx_fmt_ee (((GET_CODE (operands[1]) == GTU
+ || GET_CODE (operands[1]) == GT)
+ ? NE : EQ),
+ DImode, operands[4], const0_rtx);
}")
\f
;; Here are the CALL and unconditional branch insns. Calls on NT and OSF
if (GET_CODE (operands[0]) != SYMBOL_REF
&& ! (GET_CODE (operands[0]) == REG && REGNO (operands[0]) == 27))
{
- rtx tem = gen_rtx (REG, DImode, 27);
+ rtx tem = gen_rtx_REG (DImode, 27);
emit_move_insn (tem, operands[0]);
operands[0] = tem;
}
indirect call differently. Load RA and set operands[2] to PV in
both cases. */
- emit_move_insn (gen_rtx (REG, DImode, 25), operands[1]);
+ emit_move_insn (gen_rtx_REG (DImode, 25), operands[1]);
if (GET_CODE (operands[0]) == SYMBOL_REF)
{
extern char *savealloc ();
linksym[0] = '$';
strcpy (linksym+1, symbol);
strcat (linksym, \"..lk\");
- linkage = gen_rtx (SYMBOL_REF, Pmode, linksym);
+ linkage = gen_rtx_SYMBOL_REF (Pmode, linksym);
- emit_move_insn (gen_rtx (REG, Pmode, 26), gen_rtx (MEM, Pmode, linkage));
+ emit_move_insn (gen_rtx_REG (Pmode, 26), gen_rtx_MEM (Pmode, linkage));
operands[2]
- = validize_mem (gen_rtx (MEM, Pmode, plus_constant (linkage, 8)));
+ = validize_mem (gen_rtx_MEM (Pmode, plus_constant (linkage, 8)));
}
else
{
- emit_move_insn (gen_rtx (REG, Pmode, 26),
- gen_rtx (MEM, Pmode, plus_constant (operands[0], 8)));
+ emit_move_insn (gen_rtx_REG (Pmode, 26),
+ gen_rtx_MEM (Pmode, plus_constant (operands[0], 8)));
operands[2] = operands[0];
}
if (GET_CODE (operands[1]) != SYMBOL_REF
&& ! (GET_CODE (operands[1]) == REG && REGNO (operands[1]) == 27))
{
- rtx tem = gen_rtx (REG, DImode, 27);
+ rtx tem = gen_rtx_REG (DImode, 27);
emit_move_insn (tem, operands[1]);
operands[1] = tem;
}
indirect call differently. Load RA and set operands[3] to PV in
both cases. */
- emit_move_insn (gen_rtx (REG, DImode, 25), operands[2]);
+ emit_move_insn (gen_rtx_REG (DImode, 25), operands[2]);
if (GET_CODE (operands[1]) == SYMBOL_REF)
{
extern char *savealloc ();
linksym[0] = '$';
strcpy (linksym+1, symbol);
strcat (linksym, \"..lk\");
- linkage = gen_rtx (SYMBOL_REF, Pmode, linksym);
+ linkage = gen_rtx_SYMBOL_REF (Pmode, linksym);
- emit_move_insn (gen_rtx (REG, Pmode, 26), gen_rtx (MEM, Pmode, linkage));
+ emit_move_insn (gen_rtx_REG (Pmode, 26), gen_rtx_MEM (Pmode, linkage));
operands[3]
- = validize_mem (gen_rtx (MEM, Pmode, plus_constant (linkage, 8)));
+ = validize_mem (gen_rtx_MEM (Pmode, plus_constant (linkage, 8)));
}
else
{
- emit_move_insn (gen_rtx (REG, Pmode, 26),
- gen_rtx (MEM, Pmode, plus_constant (operands[1], 8)));
+ emit_move_insn (gen_rtx_REG (Pmode, 26),
+ gen_rtx_MEM (Pmode, plus_constant (operands[1], 8)));
operands[3] = operands[1];
}
{
rtx aligned_mem, bitnum;
rtx scratch = (reload_in_progress
- ? gen_rtx (REG, SImode, REGNO (operands[0]))
+ ? gen_rtx_REG (SImode, REGNO (operands[0]))
: gen_reg_rtx (SImode));
/* ??? This code creates a new MEM rtx. If we were called during
&& GET_CODE (operands[1]) == MEM
&& ! strict_memory_address_p (SImode, XEXP (operands[1], 0)))
{
- rtx tmp = gen_rtx (REG, Pmode, REGNO (operands[0]));
+ rtx tmp = gen_rtx_REG (Pmode, REGNO (operands[0]));
emit_insn (gen_move_insn (tmp, XEXP (operands[1], 0)));
XEXP (operands[1], 0) = tmp;
}
{
rtx aligned_mem, bitnum;
rtx scratch = (reload_in_progress
- ? gen_rtx (REG, SImode, REGNO (operands[0]))
+ ? gen_rtx_REG (SImode, REGNO (operands[0]))
: gen_reg_rtx (SImode));
/* ??? This code creates a new MEM rtx. If we were called during
&& GET_CODE (operands[1]) == MEM
&& ! strict_memory_address_p (SImode, XEXP (operands[1], 0)))
{
- rtx tmp = gen_rtx (REG, Pmode, REGNO (operands[0]));
+ rtx tmp = gen_rtx_REG (Pmode, REGNO (operands[0]));
emit_insn (gen_move_insn (tmp, XEXP (operands[1], 0)));
XEXP (operands[1], 0) = tmp;
}
/* It is possible that one of the registers we got for operands[2]
might coincide with that of operands[0] (which is why we made
it TImode). Pick the other one to use as our scratch. */
- scratch = gen_rtx (REG, DImode,
- REGNO (operands[0]) == REGNO (operands[2])
- ? REGNO (operands[2]) + 1 : REGNO (operands[2]));
+ scratch = gen_rtx_REG (DImode,
+ REGNO (operands[0]) == REGNO (operands[2])
+ ? REGNO (operands[2]) + 1 : REGNO (operands[2]));
/* We must be careful to make sure that the new rtx won't need reloading. */
if (GET_CODE (operands[1]) == MEM &&
! strict_memory_address_p (DImode, XEXP (operands[1], 0)))
{
- tmp = gen_rtx (REG, Pmode, REGNO (operands[0]));
+ tmp = gen_rtx_REG (Pmode, REGNO (operands[0]));
emit_insn (gen_move_insn (tmp, XEXP (operands[1], 0)));
XEXP (operands[1], 0) = tmp;
}
addr = get_unaligned_address (operands[1], 0);
seq = gen_unaligned_loadqi (operands[0], addr, scratch,
- gen_rtx (REG, DImode, REGNO (operands[0])));
+ gen_rtx_REG (DImode, REGNO (operands[0])));
alpha_set_memflags (seq, operands[1]);
emit_insn (seq);
/* It is possible that one of the registers we got for operands[2]
might coincide with that of operands[0] (which is why we made
it TImode). Pick the other one to use as our scratch. */
- scratch = gen_rtx (REG, DImode,
- REGNO (operands[0]) == REGNO (operands[2])
- ? REGNO (operands[2]) + 1 : REGNO (operands[2]));
+ scratch = gen_rtx_REG (DImode,
+ REGNO (operands[0]) == REGNO (operands[2])
+ ? REGNO (operands[2]) + 1 : REGNO (operands[2]));
/* We must be careful to make sure that the new rtx won't need reloading. */
if (GET_CODE (operands[1]) == MEM &&
! strict_memory_address_p (DImode, XEXP (operands[1], 0)))
{
- tmp = gen_rtx (REG, Pmode, REGNO (operands[0]));
+ tmp = gen_rtx_REG (Pmode, REGNO (operands[0]));
emit_insn (gen_move_insn (tmp, XEXP (operands[1], 0)));
XEXP (operands[1], 0) = tmp;
}
addr = get_unaligned_address (operands[1], 0);
seq = gen_unaligned_loadhi (operands[0], addr, scratch,
- gen_rtx (REG, DImode, REGNO (operands[0])));
+ gen_rtx_REG (DImode, REGNO (operands[0])));
alpha_set_memflags (seq, operands[1]);
emit_insn (seq);
if (GET_CODE (operands[0]) == MEM &&
! strict_memory_address_p (DImode, XEXP (operands[0], 0)))
{
- rtx scratch1 = gen_rtx (REG, DImode, REGNO (operands[2]));
+ rtx scratch1 = gen_rtx_REG (DImode, REGNO (operands[2]));
emit_insn (gen_move_insn (scratch1, XEXP (operands[0], 0)));
XEXP (operands[0], 0) = scratch1;
}
get_aligned_mem (operands[0], &aligned_mem, &bitnum);
emit_insn (gen_aligned_store (aligned_mem, operands[1], bitnum,
- gen_rtx (REG, SImode, REGNO (operands[2])),
- gen_rtx (REG, SImode,
- REGNO (operands[2]) + 1)));
+ gen_rtx_REG (SImode, REGNO (operands[2])),
+ gen_rtx_REG (SImode,
+ REGNO (operands[2]) + 1)));
}
else
{
rtx addr = get_unaligned_address (operands[0], 0);
- rtx scratch1 = gen_rtx (REG, DImode, REGNO (operands[2]));
- rtx scratch2 = gen_rtx (REG, DImode, REGNO (operands[2]) + 1);
+ rtx scratch1 = gen_rtx_REG (DImode, REGNO (operands[2]));
+ rtx scratch2 = gen_rtx_REG (DImode, REGNO (operands[2]) + 1);
rtx scratch3 = scratch1;
rtx seq;
if (GET_CODE (operands[0]) == MEM &&
! strict_memory_address_p (DImode, XEXP (operands[0], 0)))
{
- rtx scratch1 = gen_rtx (REG, DImode, REGNO (operands[2]));
+ rtx scratch1 = gen_rtx_REG (DImode, REGNO (operands[2]));
emit_insn (gen_move_insn (scratch1, XEXP (operands[0], 0)));
XEXP (operands[0], 0) = scratch1;
}
get_aligned_mem (operands[0], &aligned_mem, &bitnum);
emit_insn (gen_aligned_store (aligned_mem, operands[1], bitnum,
- gen_rtx (REG, SImode, REGNO (operands[2])),
- gen_rtx (REG, SImode,
- REGNO (operands[2]) + 1)));
+ gen_rtx_REG (SImode, REGNO (operands[2])),
+ gen_rtx_REG (SImode,
+ REGNO (operands[2]) + 1)));
}
else
{
rtx addr = get_unaligned_address (operands[0], 0);
- rtx scratch1 = gen_rtx (REG, DImode, REGNO (operands[2]));
- rtx scratch2 = gen_rtx (REG, DImode, REGNO (operands[2]) + 1);
+ rtx scratch1 = gen_rtx_REG (DImode, REGNO (operands[2]));
+ rtx scratch2 = gen_rtx_REG (DImode, REGNO (operands[2]) + 1);
rtx scratch3 = scratch1;
rtx seq;
""
"
{
- operands[1] = gen_rtx (MEM, DImode, plus_constant (stack_pointer_rtx,
- INTVAL (operands[0])));
+ operands[1] = gen_rtx_MEM (DImode, plus_constant (stack_pointer_rtx,
+ INTVAL (operands[0])));
MEM_VOLATILE_P (operands[1]) = 1;
operands[0] = const0_rtx;
}
emit_label (loop_label);
- memref = gen_rtx (MEM, DImode, tmp);
+ memref = gen_rtx_MEM (DImode, tmp);
MEM_VOLATILE_P (memref) = 1;
emit_move_insn (memref, const0_rtx);
emit_insn (gen_adddi3 (tmp, tmp, GEN_INT(-8192)));
emit_insn (gen_cmpdi (tmp, want));
emit_jump_insn (gen_bgtu (loop_label));
if (obey_regdecls)
- gen_rtx (USE, VOIDmode, tmp);
+ gen_rtx_USE (VOIDmode, tmp);
- memref = gen_rtx (MEM, DImode, want);
+ memref = gen_rtx_MEM (DImode, want);
MEM_VOLATILE_P (memref) = 1;
emit_move_insn (memref, const0_rtx);
first = qty_first_reg[reg_qty[REGNO (x)]];
return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
: REGNO_REG_CLASS (first) == NO_REGS ? x
- : gen_rtx (REG, qty_mode[reg_qty[REGNO (x)]], first));
+ : gen_rtx_REG (qty_mode[reg_qty[REGNO (x)]], first));
}
default:
/* Change subtraction from zero into negation. */
if (op0 == CONST0_RTX (mode))
- return gen_rtx (NEG, mode, op1);
+ return gen_rtx_NEG (mode, op1);
/* (-1 - a) is ~a. */
if (op0 == constm1_rtx)
- return gen_rtx (NOT, mode, op1);
+ return gen_rtx_NOT (mode, op1);
/* Subtracting 0 has no effect. */
if (op1 == CONST0_RTX (mode))
if (GET_CODE (op1) == AND)
{
if (rtx_equal_p (op0, XEXP (op1, 0)))
- return cse_gen_binary (AND, mode, op0, gen_rtx (NOT, mode, XEXP (op1, 1)));
+ return cse_gen_binary (AND, mode, op0, gen_rtx_NOT (mode, XEXP (op1, 1)));
if (rtx_equal_p (op0, XEXP (op1, 1)))
- return cse_gen_binary (AND, mode, op0, gen_rtx (NOT, mode, XEXP (op1, 0)));
+ return cse_gen_binary (AND, mode, op0, gen_rtx_NOT (mode, XEXP (op1, 0)));
}
break;
{
tem = simplify_unary_operation (NEG, mode, op0, mode);
- return tem ? tem : gen_rtx (NEG, mode, op0);
+ return tem ? tem : gen_rtx_NEG (mode, op0);
}
/* In IEEE floating point, x*0 is not always 0. */
&& (width <= HOST_BITS_PER_WIDE_INT
|| val != HOST_BITS_PER_WIDE_INT - 1)
&& ! rtx_equal_function_value_matters)
- return gen_rtx (ASHIFT, mode, op0, GEN_INT (val));
+ return gen_rtx_ASHIFT (mode, op0, GEN_INT (val));
if (GET_CODE (op1) == CONST_DOUBLE
&& GET_MODE_CLASS (GET_MODE (op1)) == MODE_FLOAT)
/* x*2 is x+x and x*(-1) is -x */
if (op1is2 && GET_MODE (op0) == mode)
- return gen_rtx (PLUS, mode, op0, copy_rtx (op0));
+ return gen_rtx_PLUS (mode, op0, copy_rtx (op0));
else if (op1ism1 && GET_MODE (op0) == mode)
- return gen_rtx (NEG, mode, op0);
+ return gen_rtx_NEG (mode, op0);
}
break;
return op0;
if (GET_CODE (op1) == CONST_INT
&& (INTVAL (op1) & GET_MODE_MASK (mode)) == GET_MODE_MASK (mode))
- return gen_rtx (NOT, mode, op0);
+ return gen_rtx_NOT (mode, op0);
if (op0 == op1 && ! side_effects_p (op0)
&& GET_MODE_CLASS (mode) != MODE_CC)
return const0_rtx;
below). */
if (GET_CODE (op1) == CONST_INT
&& (arg1 = exact_log2 (INTVAL (op1))) > 0)
- return gen_rtx (LSHIFTRT, mode, op0, GEN_INT (arg1));
+ return gen_rtx_LSHIFTRT (mode, op0, GEN_INT (arg1));
/* ... fall through ... */
{
#if defined (REAL_ARITHMETIC)
REAL_ARITHMETIC (d, rtx_to_tree_code (DIV), dconst1, d);
- return gen_rtx (MULT, mode, op0,
- CONST_DOUBLE_FROM_REAL_VALUE (d, mode));
+ return gen_rtx_MULT (mode, op0,
+ CONST_DOUBLE_FROM_REAL_VALUE (d, mode));
#else
- return gen_rtx (MULT, mode, op0,
- CONST_DOUBLE_FROM_REAL_VALUE (1./d, mode));
+ return gen_rtx_MULT (mode, op0,
+ CONST_DOUBLE_FROM_REAL_VALUE (1./d, mode));
#endif
}
}
/* Handle modulus by power of two (mod with 1 handled below). */
if (GET_CODE (op1) == CONST_INT
&& exact_log2 (INTVAL (op1)) > 0)
- return gen_rtx (AND, mode, op0, GEN_INT (INTVAL (op1) - 1));
+ return gen_rtx_AND (mode, op0, GEN_INT (INTVAL (op1) - 1));
/* ... fall through ... */
for (i = 1; i < n_ops; i++)
result = cse_gen_binary (negs[i] ? MINUS : PLUS, mode, result, ops[i]);
- return negate ? gen_rtx (NEG, mode, result) : result;
+ return negate ? gen_rtx_NEG (mode, result) : result;
}
\f
/* Make a binary operation by properly ordering the operands and
&& GET_MODE (op0) != VOIDmode)
return plus_constant (op0, - INTVAL (op1));
else
- return gen_rtx (code, mode, op0, op1);
+ return gen_rtx_fmt_ee (code, mode, op0, op1);
}
\f
/* Like simplify_binary_operation except used for relational operators.
&& GET_CODE (NEXT_INSN (next)) == JUMP_INSN
&& (GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_VEC
|| GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_DIFF_VEC))
- return gen_rtx (LABEL_REF, Pmode, next);
+ return gen_rtx_LABEL_REF (Pmode, next);
}
break;
< XVECLEN (table, 1)))
{
offset /= GET_MODE_SIZE (GET_MODE (table));
- new = gen_rtx (MINUS, Pmode, XVECEXP (table, 1, offset),
- XEXP (table, 0));
+ new = gen_rtx_MINUS (Pmode, XVECEXP (table, 1, offset),
+ XEXP (table, 0));
if (GET_MODE (table) != Pmode)
- new = gen_rtx (TRUNCATE, GET_MODE (table), new);
+ new = gen_rtx_TRUNCATE (GET_MODE (table), new);
/* Indicate this is a constant. This isn't a
valid form of CONST, but it will only be used
to fold the next insns and then discarded, so
it should be safe. */
- return gen_rtx (CONST, GET_MODE (new), new);
+ return gen_rtx_CONST (GET_MODE (new), new);
}
}
}
const_arg0 ? const_arg0 : folded_arg0,
mode_arg0);
if (new != 0 && is_const)
- new = gen_rtx (CONST, mode, new);
+ new = gen_rtx_CONST (mode, new);
}
break;
unchanged. */
offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
- MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
- new = gen_rtx (MEM, mode, plus_constant (XEXP (x, 0), offset));
+ new = gen_rtx_MEM (mode, plus_constant (XEXP (x, 0), offset));
if (! memory_address_p (mode, XEXP (new, 0)))
return 0;
MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
rtx tem = gen_lowpart_if_possible (inner_mode, op1);
record_jump_cond (code, mode, SUBREG_REG (op0),
- tem ? tem : gen_rtx (SUBREG, inner_mode, op1, 0),
+ tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
reversed_nonequality);
}
rtx tem = gen_lowpart_if_possible (inner_mode, op0);
record_jump_cond (code, mode, SUBREG_REG (op1),
- tem ? tem : gen_rtx (SUBREG, inner_mode, op0, 0),
+ tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
reversed_nonequality);
}
rtx tem = gen_lowpart_if_possible (inner_mode, op1);
record_jump_cond (code, mode, SUBREG_REG (op0),
- tem ? tem : gen_rtx (SUBREG, inner_mode, op1, 0),
+ tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
reversed_nonequality);
}
rtx tem = gen_lowpart_if_possible (inner_mode, op0);
record_jump_cond (code, mode, SUBREG_REG (op1),
- tem ? tem : gen_rtx (SUBREG, inner_mode, op0, 0),
+ tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
reversed_nonequality);
}
&& GET_MODE_SIZE (mode) < UNITS_PER_WORD)
{
enum machine_mode tmode;
- rtx new_and = gen_rtx (AND, VOIDmode, NULL_RTX, XEXP (src, 1));
+ rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
for (tmode = GET_MODE_WIDER_MODE (mode);
GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
&& (GET_CODE (PATTERN (NEXT_INSN (trial))) == ADDR_DIFF_VEC
|| GET_CODE (PATTERN (NEXT_INSN (trial))) == ADDR_VEC))
- trial = gen_rtx (LABEL_REF, Pmode, get_label_after (trial));
+ trial = gen_rtx_LABEL_REF (Pmode, get_label_after (trial));
SET_SRC (sets[i].rtl) = trial;
cse_jumps_altered = 1;
src = SET_SRC (sets[i].rtl)
= first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
- : gen_rtx (REG, GET_MODE (src), first);
+ : gen_rtx_REG (GET_MODE (src), first);
/* If we had a constant that is cheaper than what we are now
setting SRC to, use that constant. We ignored it when we
if (tem)
XEXP (tem, 0) = src_const;
else
- REG_NOTES (insn) = gen_rtx (EXPR_LIST, REG_EQUAL,
- src_const, REG_NOTES (insn));
+ REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL,
+ src_const, REG_NOTES (insn));
/* If storing a constant value in a register that
previously held the constant value 0,
if (note)
XEXP (note, 0) = const_insn;
else
- REG_NOTES (insn) = gen_rtx (INSN_LIST, REG_WAS_0,
- const_insn, REG_NOTES (insn));
+ REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_WAS_0,
+ const_insn,
+ REG_NOTES (insn));
}
}
}
this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
this_insn_cc0_mode = mode;
if (FLOAT_MODE_P (mode))
- this_insn_cc0 = gen_rtx (COMPARE, VOIDmode, this_insn_cc0,
- CONST0_RTX (mode));
+ this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
+ CONST0_RTX (mode));
}
#endif
}
new_src = gen_lowpart_if_possible (new_mode, elt->exp);
if (new_src == 0)
- new_src = gen_rtx (SUBREG, new_mode, elt->exp, 0);
+ new_src = gen_rtx_SUBREG (new_mode, elt->exp, 0);
src_hash = HASH (new_src, new_mode);
src_elt = lookup (new_src, src_hash, new_mode);
/* Allocate scratch rtl here. cse_insn will fill in the memory reference
and change the code and mode as appropriate. */
- memory_extend_rtx = gen_rtx (ZERO_EXTEND, VOIDmode, NULL_RTX);
+ memory_extend_rtx = gen_rtx_ZERO_EXTEND (VOIDmode, NULL_RTX);
#endif
/* Discard all the free elements of the previous function
.debug_frame. */
#define ASM_OUTPUT_DWARF_ADDR(FILE,LABEL) \
- assemble_integer (gen_rtx (SYMBOL_REF, Pmode, LABEL), PTR_SIZE, 1)
+ assemble_integer (gen_rtx_SYMBOL_REF (Pmode, LABEL), PTR_SIZE, 1)
#define ASM_OUTPUT_DWARF_OFFSET4(FILE,LABEL) \
- assemble_integer (gen_rtx (SYMBOL_REF, SImode, LABEL), 4, 1)
+ assemble_integer (gen_rtx_SYMBOL_REF (SImode, LABEL), 4, 1)
#define ASM_OUTPUT_DWARF_OFFSET(FILE,LABEL) \
- assemble_integer (gen_rtx (SYMBOL_REF, SImode, LABEL), 4, 1)
+ assemble_integer (gen_rtx_SYMBOL_REF (SImode, LABEL), 4, 1)
#define ASM_OUTPUT_DWARF_DELTA2(FILE,LABEL1,LABEL2) \
- assemble_integer (gen_rtx (MINUS, HImode, \
- gen_rtx (SYMBOL_REF, Pmode, LABEL1), \
- gen_rtx (SYMBOL_REF, Pmode, LABEL2)), \
+ assemble_integer (gen_rtx_MINUS (HImode, \
+ gen_rtx_SYMBOL_REF (Pmode, LABEL1), \
+ gen_rtx_SYMBOL_REF (Pmode, LABEL2)), \
2, 1)
#define ASM_OUTPUT_DWARF_DELTA4(FILE,LABEL1,LABEL2) \
- assemble_integer (gen_rtx (MINUS, SImode, \
- gen_rtx (SYMBOL_REF, Pmode, LABEL1), \
- gen_rtx (SYMBOL_REF, Pmode, LABEL2)), \
+ assemble_integer (gen_rtx_MINUS (SImode, \
+ gen_rtx_SYMBOL_REF (Pmode, LABEL1), \
+ gen_rtx_SYMBOL_REF (Pmode, LABEL2)), \
4, 1)
#define ASM_OUTPUT_DWARF_ADDR_DELTA(FILE,LABEL1,LABEL2) \
- assemble_integer (gen_rtx (MINUS, Pmode, \
- gen_rtx (SYMBOL_REF, Pmode, LABEL1), \
- gen_rtx (SYMBOL_REF, Pmode, LABEL2)), \
+ assemble_integer (gen_rtx_MINUS (Pmode, \
+ gen_rtx_SYMBOL_REF (Pmode, LABEL1), \
+ gen_rtx_SYMBOL_REF (Pmode, LABEL2)), \
PTR_SIZE, 1)
#define ASM_OUTPUT_DWARF_DELTA(FILE,LABEL1,LABEL2) \
current_function_ehc = gen_reg_rtx (Pmode);
- insn = gen_rtx (USE,
- GET_MODE (current_function_ehc),
- current_function_ehc);
+ insn = gen_rtx_USE (GET_MODE (current_function_ehc),
+ current_function_ehc);
insn = emit_insn_before (insn, get_first_nonparm_insn ());
REG_NOTES (insn)
- = gen_rtx (EXPR_LIST,
- REG_EH_CONTEXT, current_function_ehc,
- REG_NOTES (insn));
+ = gen_rtx_EXPR_LIST (REG_EH_CONTEXT, current_function_ehc,
+ REG_NOTES (insn));
}
return current_function_ehc;
}
result = copy_to_reg (dhc);
/* We don't want a copy of the dcc, but rather, the single dcc. */
- return gen_rtx (MEM, Pmode, result);
+ return gen_rtx_MEM (Pmode, result);
}
/* Get a reference to the dynamic cleanup chain. It points to the
result = copy_to_reg (dcc);
/* We don't want a copy of the dcc, but rather, the single dcc. */
- return gen_rtx (MEM, Pmode, result);
+ return gen_rtx_MEM (Pmode, result);
}
/* Generate code to evaluate X and jump to LABEL if the value is nonzero.
/* Store func and arg into the cleanup list element. */
- new_func = gen_rtx (MEM, Pmode, plus_constant (XEXP (buf, 0),
- GET_MODE_SIZE (Pmode)));
- new_arg = gen_rtx (MEM, Pmode, plus_constant (XEXP (buf, 0),
- GET_MODE_SIZE (Pmode)*2));
+ new_func = gen_rtx_MEM (Pmode, plus_constant (XEXP (buf, 0),
+ GET_MODE_SIZE (Pmode)));
+ new_arg = gen_rtx_MEM (Pmode, plus_constant (XEXP (buf, 0),
+ GET_MODE_SIZE (Pmode)*2));
x = expand_expr (func, new_func, Pmode, 0);
if (x != new_func)
emit_move_insn (new_func, x);
/* Store dhc into the first word of the newly allocated buffer. */
dhc = get_dynamic_handler_chain ();
- dcc = gen_rtx (MEM, Pmode, plus_constant (XEXP (arg, 0),
- GET_MODE_SIZE (Pmode)));
+ dcc = gen_rtx_MEM (Pmode, plus_constant (XEXP (arg, 0),
+ GET_MODE_SIZE (Pmode)));
emit_move_insn (arg, dhc);
/* Zero out the start of the cleanup chain. */
rtx sym;
ASM_GENERATE_INTERNAL_LABEL (buf, "LEHB", n);
- sym = gen_rtx (SYMBOL_REF, Pmode, buf);
+ sym = gen_rtx_SYMBOL_REF (Pmode, buf);
assemble_integer (sym, POINTER_SIZE / BITS_PER_UNIT, 1);
ASM_GENERATE_INTERNAL_LABEL (buf, "LEHE", n);
- sym = gen_rtx (SYMBOL_REF, Pmode, buf);
+ sym = gen_rtx_SYMBOL_REF (Pmode, buf);
assemble_integer (sym, POINTER_SIZE / BITS_PER_UNIT, 1);
ASM_GENERATE_INTERNAL_LABEL (buf, "L", n);
- sym = gen_rtx (SYMBOL_REF, Pmode, buf);
+ sym = gen_rtx_SYMBOL_REF (Pmode, buf);
assemble_integer (sym, POINTER_SIZE / BITS_PER_UNIT, 1);
putc ('\n', file); /* blank line */
if (label)
exception_handler_labels
- = gen_rtx (EXPR_LIST, VOIDmode,
- label, exception_handler_labels);
+ = gen_rtx_EXPR_LIST (VOIDmode,
+ label, exception_handler_labels);
else
warning ("didn't find handler for EH region %d",
NOTE_BLOCK_NUMBER (insn));
for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
if (call_used_regs[i] && ! fixed_regs[i] && i != REGNO (reg1))
{
- reg2 = gen_rtx (REG, Pmode, i);
+ reg2 = gen_rtx_REG (Pmode, i);
break;
}
emit_indirect_jump (handler);
emit_label (after_stub);
- return gen_rtx (LABEL_REF, Pmode, stub_start);
+ return gen_rtx_LABEL_REF (Pmode, stub_start);
}
/* Set up the registers for passing the handler address and stack offset
store_expr (handler, reg1, 0);
/* These will be used by the stub. */
- emit_insn (gen_rtx (USE, VOIDmode, reg1));
- emit_insn (gen_rtx (USE, VOIDmode, reg2));
+ emit_insn (gen_rtx_USE (VOIDmode, reg1));
+ emit_insn (gen_rtx_USE (VOIDmode, reg2));
}
#include "config.h"
+#include <stdio.h>
#include "rtl.h"
#include "tree.h"
#include "flags.h"
if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
&& CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
{
+ /* Any rtl we create here must go in a saveable obstack, since
+ we might have been called from within combine. */
+ push_obstacks_nochange ();
+ rtl_in_saveable_obstack ();
tem
= force_const_mem (GET_MODE (x),
plus_constant (get_pool_constant (XEXP (x, 0)),
c));
+ pop_obstacks ();
if (memory_address_p (GET_MODE (tem), XEXP (tem, 0)))
return tem;
}
if (GET_CODE (XEXP (x, 1)) == CONST_INT)
return plus_constant (XEXP (x, 0), c + INTVAL (XEXP (x, 1)));
else if (CONSTANT_P (XEXP (x, 0)))
- return gen_rtx (PLUS, mode,
- plus_constant (XEXP (x, 0), c),
- XEXP (x, 1));
+ return gen_rtx_PLUS (mode,
+ plus_constant (XEXP (x, 0), c),
+ XEXP (x, 1));
else if (CONSTANT_P (XEXP (x, 1)))
- return gen_rtx (PLUS, mode,
- XEXP (x, 0),
- plus_constant (XEXP (x, 1), c));
+ return gen_rtx_PLUS (mode,
+ XEXP (x, 0),
+ plus_constant (XEXP (x, 1), c));
+ break;
+
+ default:
+ break;
}
if (c != 0)
- x = gen_rtx (PLUS, mode, x, GEN_INT (c));
+ x = gen_rtx_PLUS (mode, x, GEN_INT (c));
if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
return x;
else if (all_constant)
- return gen_rtx (CONST, mode, x);
+ return gen_rtx_CONST (mode, x);
else
return x;
}
int all_constant = 0;
if (GET_CODE (x) == LO_SUM)
- return gen_rtx (LO_SUM, mode, XEXP (x, 0),
+ return gen_rtx_LO_SUM (mode, XEXP (x, 0),
plus_constant_for_output (XEXP (x, 1), c));
else
&& GET_CODE (tem) == CONST_INT)
{
*constptr = tem;
- return gen_rtx (PLUS, GET_MODE (x), x0, x1);
+ return gen_rtx_PLUS (GET_MODE (x), x0, x1);
}
return x;
register rtx op1 = break_out_memory_refs (XEXP (x, 1));
if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
- x = gen_rtx (GET_CODE (x), Pmode, op0, op1);
+ x = gen_rtx_fmt_ee (GET_CODE (x), Pmode, op0, op1);
}
return x;
return x;
case LABEL_REF:
- return gen_rtx (LABEL_REF, to_mode, XEXP (x, 0));
+ temp = gen_rtx_LABEL_REF (to_mode, XEXP (x, 0));
+ LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
+ return temp;
case SYMBOL_REF:
- temp = gen_rtx (SYMBOL_REF, to_mode, XSTR (x, 0));
+ temp = gen_rtx_SYMBOL_REF (to_mode, XSTR (x, 0));
SYMBOL_REF_FLAG (temp) = SYMBOL_REF_FLAG (x);
CONSTANT_POOL_ADDRESS_P (temp) = CONSTANT_POOL_ADDRESS_P (x);
return temp;
case CONST:
- return gen_rtx (CONST, to_mode,
- convert_memory_address (to_mode, XEXP (x, 0)));
+ return gen_rtx_CONST (to_mode,
+ convert_memory_address (to_mode, XEXP (x, 0)));
case PLUS:
case MULT:
/* For addition the second operand is a small constant, we can safely
- permute the converstion and addition operation. We can always safely
+ permute the conversion and addition operation. We can always safely
permute them if we are making the address narrower. In addition,
always permute the operations if this is a constant. */
if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode)
|| (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT
&& (INTVAL (XEXP (x, 1)) + 20000 < 40000
|| CONSTANT_P (XEXP (x, 0)))))
- return gen_rtx (GET_CODE (x), to_mode,
- convert_memory_address (to_mode, XEXP (x, 0)),
- convert_memory_address (to_mode, XEXP (x, 1)));
+ return gen_rtx_fmt_ee (GET_CODE (x), to_mode,
+ convert_memory_address (to_mode, XEXP (x, 0)),
+ convert_memory_address (to_mode, XEXP (x, 1)));
+ break;
+
+ default:
+ break;
}
return convert_modes (to_mode, from_mode,
register rtx op0 = copy_all_regs (XEXP (x, 0));
register rtx op1 = copy_all_regs (XEXP (x, 1));
if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
- x = gen_rtx (GET_CODE (x), Pmode, op0, op1);
+ x = gen_rtx_fmt_ee (GET_CODE (x), Pmode, op0, op1);
}
return x;
}
{
register rtx oldx = x;
+ if (GET_CODE (x) == ADDRESSOF)
+ return x;
+
#ifdef POINTERS_EXTEND_UNSIGNED
if (GET_MODE (x) == ptr_mode)
x = convert_memory_address (Pmode, x);
x = force_operand (x, NULL_RTX);
else
{
- y = gen_rtx (PLUS, GET_MODE (x), copy_to_reg (y), constant_term);
+ y = gen_rtx_PLUS (GET_MODE (x), copy_to_reg (y), constant_term);
if (! memory_address_p (mode, y))
x = force_operand (x, NULL_RTX);
else
rtx mem;
if (GET_CODE (temp) != REG)
temp = copy_to_reg (temp);
- mem = gen_rtx (MEM, GET_MODE (x), temp);
+ mem = gen_rtx_MEM (GET_MODE (x), temp);
/* Mark returned memref with in_struct if it's in an array or
structure. Copy const and volatile from original memref. */
if (note)
XEXP (note, 0) = x;
else
- REG_NOTES (insn) = gen_rtx (EXPR_LIST, REG_EQUAL, x, REG_NOTES (insn));
+ REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL, x, REG_NOTES (insn));
}
return temp;
}
unsignedp = POINTERS_EXTEND_UNSIGNED;
break;
#endif
+
+ default:
+ break;
}
*punsignedp = unsignedp;
}
break;
#endif
+ default:
+ break;
}
/* If there is no save area and we have to allocate one, do so. Otherwise
fcn = gen_restore_stack_nonlocal;
break;
#endif
+ default:
+ break;
}
if (sa != 0)
mark_reg_pointer (target, known_align / BITS_PER_UNIT);
-#ifndef STACK_GROWS_DOWNWARD
- emit_move_insn (target, virtual_stack_dynamic_rtx);
-#endif
-
/* Perform the required allocation from the stack. Some systems do
this differently than simply incrementing/decrementing from the
- stack pointer. */
+ stack pointer, such as acquiring the space by calling malloc(). */
#ifdef HAVE_allocate_stack
if (HAVE_allocate_stack)
{
- enum machine_mode mode
- = insn_operand_mode[(int) CODE_FOR_allocate_stack][0];
-
- size = convert_modes (mode, ptr_mode, size, 1);
+ enum machine_mode mode;
if (insn_operand_predicate[(int) CODE_FOR_allocate_stack][0]
&& ! ((*insn_operand_predicate[(int) CODE_FOR_allocate_stack][0])
+ (target, Pmode)))
+ target = copy_to_mode_reg (Pmode, target);
+ mode = insn_operand_mode[(int) CODE_FOR_allocate_stack][1];
+ size = convert_modes (mode, ptr_mode, size, 1);
+ if (insn_operand_predicate[(int) CODE_FOR_allocate_stack][1]
+ && ! ((*insn_operand_predicate[(int) CODE_FOR_allocate_stack][1])
(size, mode)))
size = copy_to_mode_reg (mode, size);
- emit_insn (gen_allocate_stack (size));
+ emit_insn (gen_allocate_stack (target, size));
}
else
#endif
{
+#ifndef STACK_GROWS_DOWNWARD
+ emit_move_insn (target, virtual_stack_dynamic_rtx);
+#endif
size = convert_modes (Pmode, ptr_mode, size, 1);
anti_adjust_stack (size);
- }
-
#ifdef STACK_GROWS_DOWNWARD
emit_move_insn (target, virtual_stack_dynamic_rtx);
#endif
+ }
if (MUST_ALIGN)
{
emit_stack_probe (address)
rtx address;
{
- rtx memref = gen_rtx (MEM, word_mode, address);
+ rtx memref = gen_rtx_MEM (word_mode, address);
MEM_VOLATILE_P (memref) = 1;
#ifdef HAVE_check_stack
if (HAVE_check_stack)
{
- rtx last_addr = force_operand (gen_rtx (STACK_GROW_OP, Pmode,
- stack_pointer_rtx,
- plus_constant (size, first)),
- NULL_RTX);
+ rtx last_addr
+ = force_operand (gen_rtx_STACK_GROW_OP (Pmode,
+ stack_pointer_rtx,
+ plus_constant (size, first)),
+ NULL_RTX);
if (insn_operand_predicate[(int) CODE_FOR_check_stack][0]
&& ! ((*insn_operand_predicate[(int) CODE_FOR_check_stack][0])
for (offset = first + STACK_CHECK_PROBE_INTERVAL;
offset < INTVAL (size);
offset = offset + STACK_CHECK_PROBE_INTERVAL)
- emit_stack_probe (gen_rtx (STACK_GROW_OP, Pmode,
- stack_pointer_rtx, GEN_INT (offset)));
+ emit_stack_probe (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
+ stack_pointer_rtx,
+ GEN_INT (offset)));
- emit_stack_probe (gen_rtx (STACK_GROW_OP, Pmode, stack_pointer_rtx,
- plus_constant (size, first)));
+ emit_stack_probe (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
+ stack_pointer_rtx,
+ plus_constant (size, first)));
}
/* In the variable case, do the same as above, but in a loop. We emit loop
else
{
rtx test_addr
- = force_operand (gen_rtx (STACK_GROW_OP, Pmode, stack_pointer_rtx,
- GEN_INT (first
- + STACK_CHECK_PROBE_INTERVAL)),
+ = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
+ stack_pointer_rtx,
+ GEN_INT (first + STACK_CHECK_PROBE_INTERVAL)),
NULL_RTX);
rtx last_addr
- = force_operand (gen_rtx (STACK_GROW_OP, Pmode, stack_pointer_rtx,
- plus_constant (size, first)),
+ = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
+ stack_pointer_rtx,
+ plus_constant (size, first)),
NULL_RTX);
rtx incr = GEN_INT (STACK_CHECK_PROBE_INTERVAL);
rtx loop_lab = gen_label_rtx ();
emit_note (NULL_PTR, NOTE_INSN_LOOP_END);
emit_label (end_lab);
+ /* If will be doing stupid optimization, show test_addr is still live. */
+ if (obey_regdecls)
+ emit_insn (gen_rtx_USE (VOIDmode, test_addr));
+
emit_stack_probe (last_addr);
}
}
char *free_point;
/* This is "some random pseudo register" for purposes of calling recog
to see what insns exist. */
- rtx reg = gen_rtx (REG, word_mode, 10000);
+ rtx reg = gen_rtx_REG (word_mode, 10000);
rtx shift_insn, shiftadd_insn, shiftsub_insn;
int dummy;
int m;
free_point = (char *) oballoc (0);
zero_cost = rtx_cost (const0_rtx, 0);
- add_cost = rtx_cost (gen_rtx (PLUS, word_mode, reg, reg), SET);
-
- shift_insn = emit_insn (gen_rtx (SET, VOIDmode, reg,
- gen_rtx (ASHIFT, word_mode, reg,
- const0_rtx)));
-
- shiftadd_insn = emit_insn (gen_rtx (SET, VOIDmode, reg,
- gen_rtx (PLUS, word_mode,
- gen_rtx (MULT, word_mode,
- reg, const0_rtx),
- reg)));
-
- shiftsub_insn = emit_insn (gen_rtx (SET, VOIDmode, reg,
- gen_rtx (MINUS, word_mode,
- gen_rtx (MULT, word_mode,
- reg, const0_rtx),
- reg)));
+ add_cost = rtx_cost (gen_rtx_PLUS (word_mode, reg, reg), SET);
+
+ shift_insn = emit_insn (gen_rtx_SET (VOIDmode, reg,
+ gen_rtx_ASHIFT (word_mode, reg,
+ const0_rtx)));
+
+ shiftadd_insn
+ = emit_insn (gen_rtx_SET (VOIDmode, reg,
+ gen_rtx_PLUS (word_mode,
+ gen_rtx_MULT (word_mode,
+ reg, const0_rtx),
+ reg)));
+
+ shiftsub_insn
+ = emit_insn (gen_rtx_SET (VOIDmode, reg,
+ gen_rtx_MINUS (word_mode,
+ gen_rtx_MULT (word_mode,
+ reg, const0_rtx),
+ reg)));
init_recog ();
shiftsub_cost[m] = rtx_cost (SET_SRC (PATTERN (shiftsub_insn)), SET);
}
- negate_cost = rtx_cost (gen_rtx (NEG, word_mode, reg), SET);
+ negate_cost = rtx_cost (gen_rtx_NEG (word_mode, reg), SET);
sdiv_pow2_cheap
- = (rtx_cost (gen_rtx (DIV, word_mode, reg, GEN_INT (32)), SET)
+ = (rtx_cost (gen_rtx_DIV (word_mode, reg, GEN_INT (32)), SET)
<= 2 * add_cost);
smod_pow2_cheap
- = (rtx_cost (gen_rtx (MOD, word_mode, reg, GEN_INT (32)), SET)
+ = (rtx_cost (gen_rtx_MOD (word_mode, reg, GEN_INT (32)), SET)
<= 2 * add_cost);
for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
mode != VOIDmode;
mode = GET_MODE_WIDER_MODE (mode))
{
- reg = gen_rtx (REG, mode, 10000);
- div_cost[(int) mode] = rtx_cost (gen_rtx (UDIV, mode, reg, reg), SET);
- mul_cost[(int) mode] = rtx_cost (gen_rtx (MULT, mode, reg, reg), SET);
+ reg = gen_rtx_REG (mode, 10000);
+ div_cost[(int) mode] = rtx_cost (gen_rtx_UDIV (mode, reg, reg), SET);
+ mul_cost[(int) mode] = rtx_cost (gen_rtx_MULT (mode, reg, reg), SET);
wider_mode = GET_MODE_WIDER_MODE (mode);
if (wider_mode != VOIDmode)
{
mul_widen_cost[(int) wider_mode]
- = rtx_cost (gen_rtx (MULT, wider_mode,
- gen_rtx (ZERO_EXTEND, wider_mode, reg),
- gen_rtx (ZERO_EXTEND, wider_mode, reg)),
+ = rtx_cost (gen_rtx_MULT (wider_mode,
+ gen_rtx_ZERO_EXTEND (wider_mode, reg),
+ gen_rtx_ZERO_EXTEND (wider_mode, reg)),
SET);
mul_highpart_cost[(int) mode]
- = rtx_cost (gen_rtx (TRUNCATE, mode,
- gen_rtx (LSHIFTRT, wider_mode,
- gen_rtx (MULT, wider_mode,
- gen_rtx (ZERO_EXTEND, wider_mode, reg),
- gen_rtx (ZERO_EXTEND, wider_mode, reg)),
- GEN_INT (GET_MODE_BITSIZE (mode)))),
+ = rtx_cost (gen_rtx_TRUNCATE
+ (mode,
+ gen_rtx_LSHIFTRT
+ (wider_mode,
+ gen_rtx_MULT (wider_mode,
+ gen_rtx_ZERO_EXTEND (wider_mode, reg),
+ gen_rtx_ZERO_EXTEND (wider_mode, reg)),
+ GEN_INT (GET_MODE_BITSIZE (mode)))),
SET);
}
}
if (GET_MODE (op0) != fieldmode)
{
if (GET_CODE (op0) == REG)
- op0 = gen_rtx (SUBREG, fieldmode, op0, offset);
+ op0 = gen_rtx_SUBREG (fieldmode, op0, offset);
else
op0 = change_address (op0, fieldmode,
plus_constant (XEXP (op0, 0), offset));
if(! (*insn_operand_predicate[icode][1]) (value, fieldmode))
value = copy_to_mode_reg (fieldmode, value);
emit_insn (GEN_FCN (icode)
- (gen_rtx (SUBREG, fieldmode, op0, offset), value));
+ (gen_rtx_SUBREG (fieldmode, op0, offset), value));
}
return value;
}
{
if (offset != 0
|| GET_MODE_SIZE (GET_MODE (op0)) > UNITS_PER_WORD)
- op0 = gen_rtx (SUBREG, TYPE_MODE (type_for_size (BITS_PER_WORD, 0)),
+ op0 = gen_rtx_SUBREG (TYPE_MODE (type_for_size (BITS_PER_WORD, 0)),
op0, offset);
offset = 0;
}
{
if (GET_CODE (value) != REG)
value = copy_to_reg (value);
- value = gen_rtx (SUBREG, word_mode, value, 0);
+ value = gen_rtx_SUBREG (word_mode, value, 0);
}
/* Now OFFSET is nonzero only if OP0 is memory
if (GET_CODE (xop0) == SUBREG)
/* We can't just change the mode, because this might clobber op0,
and we will need the original value of op0 if insv fails. */
- xop0 = gen_rtx (SUBREG, maxmode, SUBREG_REG (xop0), SUBREG_WORD (xop0));
+ xop0 = gen_rtx_SUBREG (maxmode, SUBREG_REG (xop0), SUBREG_WORD (xop0));
if (GET_CODE (xop0) == REG && GET_MODE (xop0) != maxmode)
- xop0 = gen_rtx (SUBREG, maxmode, xop0, 0);
+ xop0 = gen_rtx_SUBREG (maxmode, xop0, 0);
/* On big-endian machines, we count bits from the most significant.
If the bit field insn does not, we must invert. */
/* Avoid making subreg of a subreg, or of a mem. */
if (GET_CODE (value1) != REG)
value1 = copy_to_reg (value1);
- value1 = gen_rtx (SUBREG, maxmode, value1, 0);
+ value1 = gen_rtx_SUBREG (maxmode, value1, 0);
}
else
value1 = gen_lowpart (maxmode, value1);
if (mode1 != GET_MODE (op0))
{
if (GET_CODE (op0) == REG)
- op0 = gen_rtx (SUBREG, mode1, op0, offset);
+ op0 = gen_rtx_SUBREG (mode1, op0, offset);
else
op0 = change_address (op0, mode1,
plus_constant (XEXP (op0, 0), offset));
target = gen_reg_rtx (mode);
/* Indicate for flow that the entire target reg is being set. */
- emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
+ emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
for (i = 0; i < nwords; i++)
{
{
if (offset != 0
|| GET_MODE_SIZE (GET_MODE (op0)) > UNITS_PER_WORD)
- op0 = gen_rtx (SUBREG, TYPE_MODE (type_for_size (BITS_PER_WORD, 0)),
+ op0 = gen_rtx_SUBREG (TYPE_MODE (type_for_size (BITS_PER_WORD, 0)),
op0, offset);
offset = 0;
}
if (GET_CODE (xop0) == SUBREG && GET_MODE (xop0) != maxmode)
abort ();
if (GET_CODE (xop0) == REG && GET_MODE (xop0) != maxmode)
- xop0 = gen_rtx (SUBREG, maxmode, xop0, 0);
+ xop0 = gen_rtx_SUBREG (maxmode, xop0, 0);
/* On big-endian machines, we count bits from the most significant.
If the bit field insn does not, we must invert. */
if (GET_CODE (xop0) == SUBREG && GET_MODE (xop0) != maxmode)
abort ();
if (GET_CODE (xop0) == REG && GET_MODE (xop0) != maxmode)
- xop0 = gen_rtx (SUBREG, maxmode, xop0, 0);
+ xop0 = gen_rtx_SUBREG (maxmode, xop0, 0);
/* On big-endian machines, we count bits from the most significant.
If the bit field insn does not, we must invert. */
target, unsignedp);
if (GET_CODE (target) != REG)
target = copy_to_reg (target);
- return gen_rtx (SUBREG, tmode, target, 0);
+ return gen_rtx_SUBREG (tmode, target, 0);
}
else
return convert_to_mode (tmode, target, unsignedp);
and then negate, do the multiplication directly, or do multiplication
by OP1 - 1. */
- mult_cost = rtx_cost (gen_rtx (MULT, mode, op0, op1), SET);
+ mult_cost = rtx_cost (gen_rtx_MULT (mode, op0, op1), SET);
mult_cost = MIN (12 * add_cost, mult_cost);
synth_mult (&alg, val, mult_cost);
case alg_add_t_m2:
tem = expand_shift (LSHIFT_EXPR, mode, op0,
build_int_2 (log, 0), NULL_RTX, 0);
- accum = force_operand (gen_rtx (PLUS, mode, accum, tem),
+ accum = force_operand (gen_rtx_PLUS (mode, accum, tem),
add_target ? add_target : accum_target);
val_so_far += (HOST_WIDE_INT) 1 << log;
break;
case alg_sub_t_m2:
tem = expand_shift (LSHIFT_EXPR, mode, op0,
build_int_2 (log, 0), NULL_RTX, 0);
- accum = force_operand (gen_rtx (MINUS, mode, accum, tem),
+ accum = force_operand (gen_rtx_MINUS (mode, accum, tem),
add_target ? add_target : accum_target);
val_so_far -= (HOST_WIDE_INT) 1 << log;
break;
accum = expand_shift (LSHIFT_EXPR, mode, accum,
build_int_2 (log, 0), shift_subtarget,
0);
- accum = force_operand (gen_rtx (PLUS, mode, accum, op0),
+ accum = force_operand (gen_rtx_PLUS (mode, accum, op0),
add_target ? add_target : accum_target);
val_so_far = (val_so_far << log) + 1;
break;
accum = expand_shift (LSHIFT_EXPR, mode, accum,
build_int_2 (log, 0), shift_subtarget,
0);
- accum = force_operand (gen_rtx (MINUS, mode, accum, op0),
+ accum = force_operand (gen_rtx_MINUS (mode, accum, op0),
add_target ? add_target : accum_target);
val_so_far = (val_so_far << log) - 1;
break;
case alg_add_factor:
tem = expand_shift (LSHIFT_EXPR, mode, accum,
build_int_2 (log, 0), NULL_RTX, 0);
- accum = force_operand (gen_rtx (PLUS, mode, accum, tem),
+ accum = force_operand (gen_rtx_PLUS (mode, accum, tem),
add_target ? add_target : accum_target);
val_so_far += val_so_far << log;
break;
case alg_sub_factor:
tem = expand_shift (LSHIFT_EXPR, mode, accum,
build_int_2 (log, 0), NULL_RTX, 0);
- accum = force_operand (gen_rtx (MINUS, mode, tem, accum),
+ accum = force_operand (gen_rtx_MINUS (mode, tem, accum),
(add_target ? add_target
: preserve ? 0 : tem));
val_so_far = (val_so_far << log) - val_so_far;
insn = get_last_insn ();
REG_NOTES (insn)
- = gen_rtx (EXPR_LIST, REG_EQUAL,
- gen_rtx (MULT, mode, op0, GEN_INT (val_so_far)),
- REG_NOTES (insn));
+ = gen_rtx_EXPR_LIST (REG_EQUAL,
+ gen_rtx_MULT (mode, op0, GEN_INT (val_so_far)),
+ REG_NOTES (insn));
}
if (variant == negate_variant)
else if (variant == add_variant)
{
val_so_far = val_so_far + 1;
- accum = force_operand (gen_rtx (PLUS, mode, accum, op0), target);
+ accum = force_operand (gen_rtx_PLUS (mode, accum, op0), target);
}
if (val != val_so_far)
build_int_2 (GET_MODE_BITSIZE (mode) - 1, 0),
NULL_RTX, 0);
tem = expand_and (tem, op1, NULL_RTX);
- adj_operand = force_operand (gen_rtx (adj_code, mode, adj_operand, tem),
- adj_operand);
+ adj_operand
+ = force_operand (gen_rtx_fmt_ee (adj_code, mode, adj_operand, tem),
+ adj_operand);
tem = expand_shift (RSHIFT_EXPR, mode, op1,
build_int_2 (GET_MODE_BITSIZE (mode) - 1, 0),
NULL_RTX, 0);
tem = expand_and (tem, op0, NULL_RTX);
- target = force_operand (gen_rtx (adj_code, mode, adj_operand, tem), target);
+ target = force_operand (gen_rtx_fmt_ee (adj_code, mode, adj_operand, tem),
+ target);
return target;
}
max_cost - extra_cost);
if (t1 == 0)
goto fail1;
- t2 = force_operand (gen_rtx (MINUS, compute_mode,
- op0, t1),
+ t2 = force_operand (gen_rtx_MINUS (compute_mode,
+ op0, t1),
NULL_RTX);
t3 = expand_shift (RSHIFT_EXPR, compute_mode, t2,
build_int_2 (1, 0), NULL_RTX,1);
- t4 = force_operand (gen_rtx (PLUS, compute_mode,
- t1, t3),
+ t4 = force_operand (gen_rtx_PLUS (compute_mode,
+ t1, t3),
NULL_RTX);
quotient
= expand_shift (RSHIFT_EXPR, compute_mode, t4,
&& (set = single_set (insn)) != 0
&& SET_DEST (set) == quotient)
REG_NOTES (insn)
- = gen_rtx (EXPR_LIST, REG_EQUAL,
- gen_rtx (UDIV, compute_mode, op0, op1),
- REG_NOTES (insn));
+ = gen_rtx_EXPR_LIST (REG_EQUAL,
+ gen_rtx_UDIV (compute_mode, op0, op1),
+ REG_NOTES (insn));
}
else /* TRUNC_DIV, signed */
{
t2 = expand_shift (RSHIFT_EXPR, compute_mode, t1,
build_int_2 (size - lgup, 0),
NULL_RTX, 1);
- t3 = force_operand (gen_rtx (PLUS, compute_mode,
- op0, t2),
+ t3 = force_operand (gen_rtx_PLUS (compute_mode,
+ op0, t2),
NULL_RTX);
quotient = expand_shift (RSHIFT_EXPR, compute_mode, t3,
build_int_2 (lgup, 0),
&& (set = single_set (insn)) != 0
&& SET_DEST (set) == quotient)
REG_NOTES (insn)
- = gen_rtx (EXPR_LIST, REG_EQUAL,
- gen_rtx (DIV, compute_mode, op0,
- GEN_INT (abs_d)),
+ = gen_rtx_EXPR_LIST (REG_EQUAL,
+ gen_rtx_DIV (compute_mode,
+ op0,
+ GEN_INT (abs_d)),
REG_NOTES (insn));
quotient = expand_unop (compute_mode, neg_optab,
t3 = expand_shift (RSHIFT_EXPR, compute_mode, op0,
build_int_2 (size - 1, 0), NULL_RTX, 0);
if (d < 0)
- quotient = force_operand (gen_rtx (MINUS, compute_mode, t3, t2),
+ quotient = force_operand (gen_rtx_MINUS (compute_mode, t3, t2),
tquotient);
else
- quotient = force_operand (gen_rtx (MINUS, compute_mode, t2, t3),
+ quotient = force_operand (gen_rtx_MINUS (compute_mode, t2, t3),
tquotient);
}
else
max_cost - extra_cost);
if (t1 == 0)
goto fail1;
- t2 = force_operand (gen_rtx (PLUS, compute_mode, t1, op0),
+ t2 = force_operand (gen_rtx_PLUS (compute_mode, t1, op0),
NULL_RTX);
t3 = expand_shift (RSHIFT_EXPR, compute_mode, t2,
build_int_2 (post_shift, 0), NULL_RTX, 0);
t4 = expand_shift (RSHIFT_EXPR, compute_mode, op0,
build_int_2 (size - 1, 0), NULL_RTX, 0);
if (d < 0)
- quotient = force_operand (gen_rtx (MINUS, compute_mode, t4, t3),
+ quotient = force_operand (gen_rtx_MINUS (compute_mode, t4, t3),
tquotient);
else
- quotient = force_operand (gen_rtx (MINUS, compute_mode, t3, t4),
+ quotient = force_operand (gen_rtx_MINUS (compute_mode, t3, t4),
tquotient);
}
}
&& (set = single_set (insn)) != 0
&& SET_DEST (set) == quotient)
REG_NOTES (insn)
- = gen_rtx (EXPR_LIST, REG_EQUAL,
- gen_rtx (DIV, compute_mode, op0, op1),
- REG_NOTES (insn));
+ = gen_rtx_EXPR_LIST (REG_EQUAL,
+ gen_rtx_DIV (compute_mode, op0, op1),
+ REG_NOTES (insn));
}
break;
}
else
{
rtx nsign, t1, t2, t3, t4;
- t1 = force_operand (gen_rtx (PLUS, compute_mode,
- op0, constm1_rtx), NULL_RTX);
+ t1 = force_operand (gen_rtx_PLUS (compute_mode,
+ op0, constm1_rtx), NULL_RTX);
t2 = expand_binop (compute_mode, ior_optab, op0, t1, NULL_RTX,
0, OPTAB_WIDEN);
nsign = expand_shift (RSHIFT_EXPR, compute_mode, t2,
build_int_2 (size - 1, 0), NULL_RTX, 0);
- t3 = force_operand (gen_rtx (MINUS, compute_mode, t1, nsign),
+ t3 = force_operand (gen_rtx_MINUS (compute_mode, t1, nsign),
NULL_RTX);
t4 = expand_divmod (0, TRUNC_DIV_EXPR, compute_mode, t3, op1,
NULL_RTX, 0);
rtx t5;
t5 = expand_unop (compute_mode, one_cmpl_optab, nsign,
NULL_RTX, 0);
- quotient = force_operand (gen_rtx (PLUS, compute_mode,
- t4, t5),
+ quotient = force_operand (gen_rtx_PLUS (compute_mode,
+ t4, t5),
tquotient);
}
}
quotient = t1;
}
else
- quotient = force_operand (gen_rtx (PLUS, compute_mode,
- t1, t3),
+ quotient = force_operand (gen_rtx_PLUS (compute_mode,
+ t1, t3),
tquotient);
break;
}
quotient = t1;
}
else
- quotient = force_operand (gen_rtx (PLUS, compute_mode,
- t1, t3),
+ quotient = force_operand (gen_rtx_PLUS (compute_mode,
+ t1, t3),
tquotient);
break;
}
insn = get_last_insn ();
REG_NOTES (insn)
- = gen_rtx (EXPR_LIST, REG_EQUAL,
- gen_rtx (unsignedp ? UDIV : DIV, compute_mode,
- op0, op1),
- REG_NOTES (insn));
+ = gen_rtx_EXPR_LIST (REG_EQUAL,
+ gen_rtx_fmt_ee (unsignedp ? UDIV : DIV,
+ compute_mode,
+ op0, op1),
+ REG_NOTES (insn));
}
break;
/* Try indexing by frame ptr and try by stack ptr.
It is known that on the Convex the stack ptr isn't a valid index.
With luck, one or the other is valid on any machine. */
- rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
- rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
+ rtx mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
+ rtx mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
start_sequence ();
- insn = emit_insn (gen_rtx (SET, 0, NULL_RTX, NULL_RTX));
+ insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
pat = PATTERN (insn);
for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
if (! HARD_REGNO_MODE_OK (regno, mode))
continue;
- reg = gen_rtx (REG, mode, regno);
+ reg = gen_rtx_REG (mode, regno);
SET_SRC (pat) = mem;
SET_DEST (pat) = reg;
enqueue_insn (var, body)
rtx var, body;
{
- pending_chain = gen_rtx (QUEUED, GET_MODE (var),
- var, NULL_RTX, NULL_RTX, body, pending_chain);
+ pending_chain = gen_rtx_QUEUED (GET_MODE (var),
+ var, NULL_RTX, NULL_RTX, body,
+ pending_chain);
return pending_chain;
}
&& GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
{
register rtx y = XEXP (x, 0);
- register rtx new = gen_rtx (MEM, GET_MODE (x), QUEUED_VAR (y));
+ register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
!= CODE_FOR_nothing))
{
if (GET_CODE (to) == REG)
- emit_insn (gen_rtx (CLOBBER, VOIDmode, to));
+ emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
convert_move (gen_lowpart (word_mode, to), from, unsignedp);
emit_unop_insn (code, to,
gen_lowpart (word_mode, to), equiv_code);
end_sequence ();
emit_no_conflict_block (insns, to, from, NULL_RTX,
- gen_rtx (equiv_code, to_mode, copy_rtx (from)));
+ gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
return;
}
if (data->reverse) data->offset -= size;
to1 = (data->autinc_to
- ? gen_rtx (MEM, mode, data->to_addr)
+ ? gen_rtx_MEM (mode, data->to_addr)
: copy_rtx (change_address (data->to, mode,
plus_constant (data->to_addr,
data->offset))));
from1
= (data->autinc_from
- ? gen_rtx (MEM, mode, data->from_addr)
+ ? gen_rtx_MEM (mode, data->from_addr)
: copy_rtx (change_address (data->from, mode,
plus_constant (data->from_addr,
data->offset))));
if (HAVE_load_multiple)
{
last = get_last_insn ();
- pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
+ pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
GEN_INT (nregs));
if (pat)
{
#endif
for (i = 0; i < nregs; i++)
- emit_move_insn (gen_rtx (REG, word_mode, regno + i),
+ emit_move_insn (gen_rtx_REG (word_mode, regno + i),
operand_subword_force (x, i, mode));
}
&& (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
{
emit_move_insn (change_address (x, mode, NULL),
- gen_rtx (REG, mode, regno));
+ gen_rtx_REG (mode, regno));
return;
}
abort ();
shift = expand_shift (LSHIFT_EXPR, word_mode,
- gen_rtx (REG, word_mode, regno),
+ gen_rtx_REG (word_mode, regno),
build_int_2 ((UNITS_PER_WORD - size)
* BITS_PER_UNIT, 0), NULL_RTX, 0);
emit_move_insn (tem, shift);
if (HAVE_store_multiple)
{
last = get_last_insn ();
- pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
+ pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
GEN_INT (nregs));
if (pat)
{
if (tem == 0)
abort ();
- emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
+ emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
}
}
abort();
*call_fusage
- = gen_rtx (EXPR_LIST, VOIDmode,
- gen_rtx (USE, VOIDmode, reg), *call_fusage);
+ = gen_rtx_EXPR_LIST (VOIDmode,
+ gen_rtx_USE (VOIDmode, reg), *call_fusage);
}
/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
abort ();
for (i = 0; i < nregs; i++)
- use_reg (call_fusage, gen_rtx (REG, reg_raw_mode[regno + i], regno + i));
+ use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
}
/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
if (data->reverse) data->offset -= size;
to1 = (data->autinc_to
- ? gen_rtx (MEM, mode, data->to_addr)
+ ? gen_rtx_MEM (mode, data->to_addr)
: copy_rtx (change_address (data->to, mode,
plus_constant (data->to_addr,
data->offset))));
regardless of machine's endianness. */
#ifdef STACK_GROWS_DOWNWARD
emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
- (gen_rtx (MEM, submode, (XEXP (x, 0))),
+ (gen_rtx_MEM (submode, (XEXP (x, 0))),
gen_imagpart (submode, y)));
emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
- (gen_rtx (MEM, submode, (XEXP (x, 0))),
+ (gen_rtx_MEM (submode, (XEXP (x, 0))),
gen_realpart (submode, y)));
#else
emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
- (gen_rtx (MEM, submode, (XEXP (x, 0))),
+ (gen_rtx_MEM (submode, (XEXP (x, 0))),
gen_realpart (submode, y)));
emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
- (gen_rtx (MEM, submode, (XEXP (x, 0))),
+ (gen_rtx_MEM (submode, (XEXP (x, 0))),
gen_imagpart (submode, y)));
#endif
}
/* Show the output dies here. */
if (x != y)
- emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
+ emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
for (i = 0;
i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
temp = plus_constant (virtual_outgoing_args_rtx,
- INTVAL (size) - (below ? 0 : extra));
else if (extra != 0 && !below)
- temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
+ temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
negate_rtx (Pmode, plus_constant (size, extra)));
else
- temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
+ temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
negate_rtx (Pmode, size));
#endif
rtx
gen_push_operand ()
{
- return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
+ return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
}
/* Return an rtx for the address of the beginning of a as-if-it-was-pushed
register rtx temp;
if (STACK_PUSH_CODE == POST_DEC)
- temp = gen_rtx (PLUS, Pmode, stack_pointer_rtx, GEN_INT (size));
+ temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
else if (STACK_PUSH_CODE == POST_INC)
- temp = gen_rtx (MINUS, Pmode, stack_pointer_rtx, GEN_INT (size));
+ temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
else
temp = stack_pointer_rtx;
&& where_pad != none && where_pad != stack_direction)
anti_adjust_stack (GEN_INT (extra));
- move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
+ move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
INTVAL (size) - used, align);
if (flag_check_memory_usage && ! in_check_memory_usage)
skip + INTVAL (args_so_far)));
else
temp = memory_address (BLKmode,
- plus_constant (gen_rtx (PLUS, Pmode,
- args_addr, args_so_far),
+ plus_constant (gen_rtx_PLUS (Pmode,
+ args_addr,
+ args_so_far),
skip));
if (flag_check_memory_usage && ! in_check_memory_usage)
{
&& (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
< MOVE_RATIO))
{
- move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
+ move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
INTVAL (size), align);
goto ret;
}
&& ((unsigned) INTVAL (size)
< (1 << (GET_MODE_BITSIZE (QImode) - 1))))
{
- rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
+ rtx pat = gen_movstrqi (gen_rtx_MEM (BLKmode, temp),
xinner, size, GEN_INT (align));
if (pat != 0)
{
&& ((unsigned) INTVAL (size)
< (1 << (GET_MODE_BITSIZE (HImode) - 1))))
{
- rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
+ rtx pat = gen_movstrhi (gen_rtx_MEM (BLKmode, temp),
xinner, size, GEN_INT (align));
if (pat != 0)
{
#ifdef HAVE_movstrsi
if (HAVE_movstrsi)
{
- rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
+ rtx pat = gen_movstrsi (gen_rtx_MEM (BLKmode, temp),
xinner, size, GEN_INT (align));
if (pat != 0)
{
#ifdef HAVE_movstrdi
if (HAVE_movstrdi)
{
- rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
+ rtx pat = gen_movstrdi (gen_rtx_MEM (BLKmode, temp),
xinner, size, GEN_INT (align));
if (pat != 0)
{
plus_constant (args_addr,
INTVAL (args_so_far)));
else
- addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
- args_so_far));
+ addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
+ args_so_far));
target = addr;
}
- emit_move_insn (gen_rtx (MEM, mode, addr), x);
+ emit_move_insn (gen_rtx_MEM (mode, addr), x);
if (flag_check_memory_usage && ! in_check_memory_usage)
{
if (GET_CODE (to_rtx) != MEM)
abort ();
to_rtx = change_address (to_rtx, VOIDmode,
- gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
- force_reg (ptr_mode, offset_rtx)));
+ gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
+ force_reg (ptr_mode, offset_rtx)));
}
if (volatilep)
{
/* Inform later passes that the whole union value is dead. */
if (TREE_CODE (type) == UNION_TYPE
|| TREE_CODE (type) == QUAL_UNION_TYPE)
- emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
+ emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
/* If we are building a static constructor into a register,
set the initial value as zero so we can fold the value into
}
else
/* Inform later passes that the old value is dead. */
- emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
+ emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
/* Store each element of the constructor into
the corresponding field of TARGET. */
to_rtx
= change_address (to_rtx, VOIDmode,
- gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
+ gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
force_reg (ptr_mode, offset_rtx)));
}
if (TREE_READONLY (field))
}
else
/* Inform later passes that the old value is dead. */
- emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
+ emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
/* Store each element of the constructor into
the corresponding element of TARGET, determined
TYPE_MIN_VALUE (domain)),
position);
pos_rtx = expand_expr (position, 0, VOIDmode, 0);
- addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
+ addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
xtarget = change_address (target, mode, addr);
if (TREE_CODE (value) == CONSTRUCTOR)
store_constructor (value, xtarget, cleared);
/* Needed by stupid register allocation. to extend the
lifetime of pseudo-regs used by target past the end
of the loop. */
- emit_insn (gen_rtx (USE, GET_MODE (target), target));
+ emit_insn (gen_rtx_USE (GET_MODE (target), target));
}
}
else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
size_int (BITS_PER_UNIT));
position = size_binop (MULT_EXPR, index, position);
pos_rtx = expand_expr (position, 0, VOIDmode, 0);
- addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
+ addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
xtarget = change_address (target, mode, addr);
store_expr (value, xtarget, 0);
}
else
#endif
{
- emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__setbits"),
+ emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
bitlength_rtx, TYPE_MODE (sizetype),
startbit_rtx, TYPE_MODE (sizetype),
push_obstacks (p->function_obstack,
p->function_maybepermanent_obstack);
- p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
- label_rtx (exp), p->forced_labels);
+ p->forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
+ label_rtx (exp),
+ p->forced_labels);
pop_obstacks ();
}
else if (modifier == EXPAND_INITIALIZER)
- forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
- label_rtx (exp), forced_labels);
- temp = gen_rtx (MEM, FUNCTION_MODE,
- gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
+ forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
+ label_rtx (exp), forced_labels);
+ temp = gen_rtx_MEM (FUNCTION_MODE,
+ gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
if (function != current_function_decl
&& function != inline_function_decl && function != 0)
LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
abort ();
addr = XEXP (DECL_RTL (exp), 0);
if (GET_CODE (addr) == MEM)
- addr = gen_rtx (MEM, Pmode,
- fix_lexical_addr (XEXP (addr, 0), exp));
+ addr = gen_rtx_MEM (Pmode,
+ fix_lexical_addr (XEXP (addr, 0), exp));
else
addr = fix_lexical_addr (addr, exp);
temp = change_address (DECL_RTL (exp), mode, addr);
!= promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
abort ();
- temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
+ temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
SUBREG_PROMOTED_VAR_P (temp) = 1;
SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
return temp;
SAVE_EXPR_RTL (exp) = temp;
if (!optimize && GET_CODE (temp) == REG)
- save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
- save_expr_regs);
+ save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
+ save_expr_regs);
/* If the mode of TEMP does not match that of the expression, it
must be a promoted value. We pass store_expr a SUBREG of the
if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
{
- temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
+ temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
SUBREG_PROMOTED_VAR_P (temp) = 1;
SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
}
{
/* Compute the signedness and make the proper SUBREG. */
promote_mode (type, mode, &unsignedp, 0);
- temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
+ temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
SUBREG_PROMOTED_VAR_P (temp) = 1;
SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
return temp;
}
}
- temp = gen_rtx (MEM, mode, op0);
+ temp = gen_rtx_MEM (mode, op0);
/* If address was computed by addition,
mark this as an element of an aggregate. */
if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
#endif
op0 = change_address (op0, VOIDmode,
- gen_rtx (PLUS, ptr_mode, XEXP (op0, 0),
- force_reg (ptr_mode, offset_rtx)));
+ gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
+ force_reg (ptr_mode, offset_rtx)));
}
/* Don't forget about volatility even if this is a bitfield. */
/* Get a reference to just this component. */
if (modifier == EXPAND_CONST_ADDRESS
|| modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
- op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
- (bitpos / BITS_PER_UNIT)));
+ op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
+ (bitpos / BITS_PER_UNIT)));
else
op0 = change_address (op0, mode1,
plus_constant (XEXP (op0, 0),
/* Extract the bit we want to examine */
bit = expand_shift (RSHIFT_EXPR, byte_mode,
- gen_rtx (MEM, byte_mode, addr),
+ gen_rtx_MEM (byte_mode, addr),
make_tree (TREE_TYPE (index), rem),
NULL_RTX, 1);
result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
if (modifier == EXPAND_INITIALIZER)
- return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
+ return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
if (target == 0)
return
op0 = temp;
/* Ensure that MULT comes first if there is one. */
else if (GET_CODE (op0) == MULT)
- op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
+ op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
else
- op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
+ op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
/* Let's also eliminate constants from op0 if possible. */
op0 = eliminate_constant_term (op0, &constant_term);
if (temp != 0)
op1 = temp;
else
- op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
+ op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
}
/* Put a constant term last and put a multiplication first. */
temp = op1, op1 = op0, op0 = temp;
temp = simplify_binary_operation (PLUS, mode, op0, op1);
- return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
+ return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
case MINUS_EXPR:
/* For initializers, we are allowed to return a MINUS of two
if (GET_CODE (op1) == CONST_INT)
return plus_constant (op0, - INTVAL (op1));
else
- return gen_rtx (MINUS, mode, op0, op1);
+ return gen_rtx_MINUS (mode, op0, op1);
}
/* Convert A - const to A + (-const). */
if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
/* Apply distributive law if OP0 is x+c. */
if (GET_CODE (op0) == PLUS
&& GET_CODE (XEXP (op0, 1)) == CONST_INT)
- return gen_rtx (PLUS, mode,
- gen_rtx (MULT, mode, XEXP (op0, 0),
- GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
+ return gen_rtx_PLUS (mode,
+ gen_rtx_MULT (mode, XEXP (op0, 0),
+ GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
* INTVAL (XEXP (op0, 1))));
if (GET_CODE (op0) != REG)
op0 = copy_to_mode_reg (mode, op0);
- return gen_rtx (MULT, mode, op0,
- GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
+ return gen_rtx_MULT (mode, op0,
+ GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
}
if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
case POPDCC_EXPR:
{
rtx dcc = get_dynamic_cleanup_chain ();
- emit_move_insn (dcc, validize_mem (gen_rtx (MEM, Pmode, dcc)));
+ emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
return const0_rtx;
}
case POPDHC_EXPR:
{
rtx dhc = get_dynamic_handler_chain ();
- emit_move_insn (dhc, validize_mem (gen_rtx (MEM, Pmode, dhc)));
+ emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
return const0_rtx;
}
tem = DYNAMIC_CHAIN_ADDRESS (tem);
#endif
tem = memory_address (Pmode, tem);
- tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
+ tem = copy_to_reg (gen_rtx_MEM (Pmode, tem));
}
/* For __builtin_frame_address, return what we've got. */
#else
tem = memory_address (Pmode,
plus_constant (tem, GET_MODE_SIZE (Pmode)));
- tem = gen_rtx (MEM, Pmode, tem);
+ tem = gen_rtx_MEM (Pmode, tem);
#endif
return tem;
}
/* We store the frame pointer and the address of lab1 in the buffer
and use the rest of it for the stack save area, which is
machine-dependent. */
- emit_move_insn (gen_rtx (MEM, Pmode, buf_addr),
+ emit_move_insn (gen_rtx_MEM (Pmode, buf_addr),
virtual_stack_vars_rtx);
emit_move_insn
- (validize_mem (gen_rtx (MEM, Pmode,
- plus_constant (buf_addr,
- GET_MODE_SIZE (Pmode)))),
- gen_rtx (LABEL_REF, Pmode, lab1));
+ (validize_mem (gen_rtx_MEM (Pmode,
+ plus_constant (buf_addr,
+ GET_MODE_SIZE (Pmode)))),
+ gen_rtx_LABEL_REF (Pmode, lab1));
#ifdef HAVE_save_stack_nonlocal
if (HAVE_save_stack_nonlocal)
sa_mode = insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0];
#endif
- stack_save = gen_rtx (MEM, sa_mode,
- plus_constant (buf_addr,
- 2 * GET_MODE_SIZE (Pmode)));
+ stack_save = gen_rtx_MEM (sa_mode,
+ plus_constant (buf_addr,
+ 2 * GET_MODE_SIZE (Pmode)));
emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
#ifdef HAVE_setjmp
/* Note that setjmp clobbers FP when we get here, so we have to make
sure it's marked as used by this function. */
- emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
+ emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
/* Mark the static chain as clobbered here so life information
doesn't get messed up for it. */
- emit_insn (gen_rtx (CLOBBER, VOIDmode, static_chain_rtx));
+ emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
/* Now put in the code to restore the frame pointer, and argument
pointer, if needed. The code below is from expand_end_bindings
op0 = memory_address (FUNCTION_MODE, op0);
INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE,
- gen_rtx (SYMBOL_REF, Pmode, "__dummy"), 1);
+ gen_rtx_SYMBOL_REF (Pmode, "__dummy"), 1);
next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1);
#ifndef ACCUMULATE_OUTGOING_ARGS
#ifdef HAVE_call_pop
if (HAVE_call_pop)
- emit_call_insn (gen_call_pop (gen_rtx (MEM, FUNCTION_MODE, op0),
+ emit_call_insn (gen_call_pop (gen_rtx_MEM (FUNCTION_MODE, op0),
const0_rtx, next_arg_reg,
GEN_INT (return_pops)));
else
#ifdef HAVE_call
if (HAVE_call)
- emit_call_insn (gen_call (gen_rtx (MEM, FUNCTION_MODE, op0),
+ emit_call_insn (gen_call (gen_rtx_MEM (FUNCTION_MODE, op0),
const0_rtx, next_arg_reg, const0_rtx));
else
#endif
rtx errno_rtx = GEN_ERRNO_RTX;
#else
rtx errno_rtx
- = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "errno"));
+ = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
#endif
emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
char_rtx = copy_to_mode_reg (char_mode, char_rtx);
emit_insn (GEN_FCN (icode) (result,
- gen_rtx (MEM, BLKmode, src_rtx),
+ gen_rtx_MEM (BLKmode, src_rtx),
char_rtx, GEN_INT (align)));
/* Return the value in the proper mode for this function. */
}
dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
- dest_mem = gen_rtx (MEM, BLKmode,
- memory_address (BLKmode, dest_rtx));
+ dest_mem = gen_rtx_MEM (BLKmode,
+ memory_address (BLKmode, dest_rtx));
/* There could be a void* cast on top of the object. */
while (TREE_CODE (dest) == NOP_EXPR)
dest = TREE_OPERAND (dest, 0);
type = TREE_TYPE (TREE_TYPE (dest));
MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
src_rtx = expand_expr (src, NULL_RTX, ptr_mode, EXPAND_SUM);
- src_mem = gen_rtx (MEM, BLKmode,
- memory_address (BLKmode, src_rtx));
+ src_mem = gen_rtx_MEM (BLKmode,
+ memory_address (BLKmode, src_rtx));
len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
/* Just copy the rights of SRC to the rights of DEST. */
break;
dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
- dest_mem = gen_rtx (MEM, BLKmode,
- memory_address (BLKmode, dest_rtx));
+ dest_mem = gen_rtx_MEM (BLKmode,
+ memory_address (BLKmode, dest_rtx));
/* Just check DST is writable and mark it as readable. */
if (flag_check_memory_usage)
result = gen_reg_rtx (insn_mode);
emit_insn (gen_cmpstrsi (result,
- gen_rtx (MEM, BLKmode,
- expand_expr (arg1, NULL_RTX,
- ptr_mode,
- EXPAND_NORMAL)),
- gen_rtx (MEM, BLKmode,
- expand_expr (arg2, NULL_RTX,
- ptr_mode,
- EXPAND_NORMAL)),
+ gen_rtx_MEM (BLKmode,
+ expand_expr (arg1, NULL_RTX,
+ ptr_mode,
+ EXPAND_NORMAL)),
+ gen_rtx_MEM (BLKmode,
+ expand_expr (arg2, NULL_RTX,
+ ptr_mode,
+ EXPAND_NORMAL)),
expand_expr (len, NULL_RTX, VOIDmode, 0),
GEN_INT (MIN (arg1_align, arg2_align))));
NULL_RTX,
VOIDmode, 0));
#endif
- rtx fp = gen_rtx (MEM, Pmode, buf_addr);
- rtx lab = gen_rtx (MEM, Pmode,
- plus_constant (buf_addr, GET_MODE_SIZE (Pmode)));
+ rtx fp = gen_rtx_MEM (Pmode, buf_addr);
+ rtx lab = gen_rtx_MEM (Pmode,
+ plus_constant (buf_addr,
+ GET_MODE_SIZE (Pmode)));
enum machine_mode sa_mode
#ifdef HAVE_save_stack_nonlocal
= (HAVE_save_stack_nonlocal
#else
= Pmode;
#endif
- rtx stack = gen_rtx (MEM, sa_mode,
- plus_constant (buf_addr,
- 2 * GET_MODE_SIZE (Pmode)));
+ rtx stack = gen_rtx_MEM (sa_mode,
+ plus_constant (buf_addr,
+ 2 * GET_MODE_SIZE (Pmode)));
DECL_EXTERNAL (dummy_decl) = 1;
TREE_PUBLIC (dummy_decl) = 1;
/* Put in the static chain register the address of the dummy
function. */
emit_move_insn (static_chain_rtx, XEXP (DECL_RTL (dummy_decl), 0));
- emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
- emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
- emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
+ emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
+ emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
+ emit_insn (gen_rtx_USE (VOIDmode, static_chain_rtx));
emit_indirect_jump (lab);
}
align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
if (size % align != 0)
size = CEIL (size, align) * align;
- reg = gen_rtx (REG, mode, savep ? regno : INCOMING_REGNO (regno));
+ reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
mem = change_address (result, mode,
plus_constant (XEXP (result, 0), size));
savevec[nelts++] = (savep
- ? gen_rtx (SET, VOIDmode, mem, reg)
- : gen_rtx (SET, VOIDmode, reg, mem));
+ ? gen_rtx_SET (VOIDmode, mem, reg)
+ : gen_rtx_SET (VOIDmode, reg, mem));
size += GET_MODE_SIZE (mode);
}
- return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
+ return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
}
#endif /* HAVE_untyped_call or HAVE_untyped_return */
if (size % align != 0)
size = CEIL (size, align) * align;
- tem = gen_rtx (REG, mode, INCOMING_REGNO (regno));
+ tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
#ifdef STACK_REGS
/* For reg-stack.c's stack register household.
Compare with a similar piece of code in function.c. */
- emit_insn (gen_rtx (USE, mode, tem));
+ emit_insn (gen_rtx_USE (mode, tem));
#endif
emit_move_insn (change_address (registers, mode,
/* Fetch the arg pointer from the ARGUMENTS block. */
incoming_args = gen_reg_rtx (Pmode);
emit_move_insn (incoming_args,
- gen_rtx (MEM, Pmode, arguments));
+ gen_rtx_MEM (Pmode, arguments));
#ifndef STACK_GROWS_DOWNWARD
incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
incoming_args, 0, OPTAB_LIB_WIDEN);
but it's likely that the source and/or destination addresses in
the block copy will need updating in machine specific ways. */
dest = allocate_dynamic_stack_space (argsize, 0, 0);
- emit_block_move (gen_rtx (MEM, BLKmode, dest),
- gen_rtx (MEM, BLKmode, incoming_args),
+ emit_block_move (gen_rtx_MEM (BLKmode, dest),
+ gen_rtx_MEM (BLKmode, incoming_args),
argsize,
PARM_BOUNDARY / BITS_PER_UNIT);
/* Refer to the argument block. */
apply_args_size ();
- arguments = gen_rtx (MEM, BLKmode, arguments);
+ arguments = gen_rtx_MEM (BLKmode, arguments);
/* Walk past the arg-pointer and structure value address. */
size = GET_MODE_SIZE (Pmode);
align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
if (size % align != 0)
size = CEIL (size, align) * align;
- reg = gen_rtx (REG, mode, regno);
+ reg = gen_rtx_REG (mode, regno);
emit_move_insn (reg,
change_address (arguments, mode,
plus_constant (XEXP (arguments, 0),
/* Generate the actual call instruction and save the return value. */
#ifdef HAVE_untyped_call
if (HAVE_untyped_call)
- emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
+ emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
result, result_vector (1, result)));
else
#endif
{
if (valreg)
abort (); /* HAVE_untyped_call required. */
- valreg = gen_rtx (REG, mode, regno);
+ valreg = gen_rtx_REG (mode, regno);
}
emit_call_insn (gen_call_value (valreg,
- gen_rtx (MEM, FUNCTION_MODE, function),
+ gen_rtx_MEM (FUNCTION_MODE, function),
const0_rtx, NULL_RTX, const0_rtx));
emit_move_insn (change_address (result, GET_MODE (valreg),
rtx call_fusage = 0;
apply_result_size ();
- result = gen_rtx (MEM, BLKmode, result);
+ result = gen_rtx_MEM (BLKmode, result);
#ifdef HAVE_untyped_return
if (HAVE_untyped_return)
align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
if (size % align != 0)
size = CEIL (size, align) * align;
- reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
+ reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
emit_move_insn (reg,
change_address (result, mode,
plus_constant (XEXP (result, 0),
size)));
push_to_sequence (call_fusage);
- emit_insn (gen_rtx (USE, VOIDmode, reg));
+ emit_insn (gen_rtx_USE (VOIDmode, reg));
call_fusage = get_insns ();
end_sequence ();
size += GET_MODE_SIZE (mode);
emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
- return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
+ return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
}
\f
/* Generate code to calculate EXP using a store-flag instruction
GET_MODE_SIZE, because this indicates how large insns are. The other
uses should all be Pmode, because they are addresses. This code
could fail if addresses and insns are not the same size. */
- index = gen_rtx (PLUS, Pmode,
- gen_rtx (MULT, Pmode, index,
- GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
- gen_rtx (LABEL_REF, Pmode, table_label));
+ index = gen_rtx_PLUS (Pmode,
+ gen_rtx_MULT (Pmode, index,
+ GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
+ gen_rtx_LABEL_REF (Pmode, table_label));
#ifdef PIC_CASE_VECTOR_ADDRESS
if (flag_pic)
index = PIC_CASE_VECTOR_ADDRESS (index);
#endif
index = memory_address_noforce (CASE_VECTOR_MODE, index);
temp = gen_reg_rtx (CASE_VECTOR_MODE);
- vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
+ vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
RTX_UNCHANGING_P (vector) = 1;
convert_move (temp, vector, 0);
/* address of filename */
ASM_GENERATE_INTERNAL_LABEL (name, "LPBX", 1);
- assemble_integer (gen_rtx (SYMBOL_REF, Pmode, name), pointer_bytes, 1);
+ assemble_integer (gen_rtx_SYMBOL_REF (Pmode, name), pointer_bytes, 1);
/* address of count table */
ASM_GENERATE_INTERNAL_LABEL (name, "LPBX", 2);
- assemble_integer (gen_rtx (SYMBOL_REF, Pmode, name), pointer_bytes, 1);
+ assemble_integer (gen_rtx_SYMBOL_REF (Pmode, name), pointer_bytes, 1);
/* count of the # of basic blocks or # of instrumented arcs */
if (profile_block_flag)
if (profile_block_flag)
{
ASM_GENERATE_INTERNAL_LABEL (name, "LPBX", 3);
- assemble_integer (gen_rtx (SYMBOL_REF, Pmode, name), pointer_bytes,
+ assemble_integer (gen_rtx_SYMBOL_REF (Pmode, name), pointer_bytes,
1);
}
else
if (profile_block_flag)
{
ASM_GENERATE_INTERNAL_LABEL (name, "LPBX", 4);
- assemble_integer (gen_rtx (SYMBOL_REF, Pmode, name), pointer_bytes,
+ assemble_integer (gen_rtx_SYMBOL_REF (Pmode, name), pointer_bytes,
1);
}
else
if (write_symbols != NO_DEBUG && profile_block_flag)
{
ASM_GENERATE_INTERNAL_LABEL (name, "LPBX", 5);
- assemble_integer (gen_rtx (SYMBOL_REF, Pmode, name), pointer_bytes, 1);
+ assemble_integer (gen_rtx_SYMBOL_REF (Pmode, name), pointer_bytes, 1);
ASM_GENERATE_INTERNAL_LABEL (name, "LPBX", 6);
- assemble_integer (gen_rtx (SYMBOL_REF, Pmode, name), pointer_bytes, 1);
+ assemble_integer (gen_rtx_SYMBOL_REF (Pmode, name), pointer_bytes, 1);
}
else
{
for (i = 0; i < count_basic_blocks; i++)
{
ASM_GENERATE_INTERNAL_LABEL (name, "LPB", i);
- assemble_integer (gen_rtx (SYMBOL_REF, Pmode, name),
+ assemble_integer (gen_rtx_SYMBOL_REF (Pmode, name),
pointer_bytes, 1);
}
}
{
ASM_GENERATE_INTERNAL_LABEL (name, "LPBC",
ptr->func_label_num);
- assemble_integer (gen_rtx (SYMBOL_REF, Pmode, name),
+ assemble_integer (gen_rtx_SYMBOL_REF (Pmode, name),
pointer_bytes, 1);
}
else
{
ASM_GENERATE_INTERNAL_LABEL (name, "LPBC",
ptr->file_label_num);
- assemble_integer (gen_rtx (SYMBOL_REF, Pmode, name),
+ assemble_integer (gen_rtx_SYMBOL_REF (Pmode, name),
pointer_bytes, 1);
}
else
if (profile_block_flag)
{
ASM_GENERATE_INTERNAL_LABEL (name, "LPBX", 3);
- assemble_integer (gen_rtx (SYMBOL_REF, Pmode, name), pointer_bytes,
+ assemble_integer (gen_rtx_SYMBOL_REF (Pmode, name), pointer_bytes,
1);
}
}
/* Make a list of all labels referred to other than by jumps. */
for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
if (REG_NOTE_KIND (note) == REG_LABEL)
- label_value_list = gen_rtx (EXPR_LIST, VOIDmode, XEXP (note, 0),
- label_value_list);
+ label_value_list = gen_rtx_EXPR_LIST (VOIDmode, XEXP (note, 0),
+ label_value_list);
}
/* Keep a lifo list of the currently active exception handlers. */
for (x = exception_handler_labels; x; x = XEXP (x, 1))
if (CODE_LABEL_NUMBER (XEXP (x, 0)) == NOTE_BLOCK_NUMBER (insn))
{
- eh_note = gen_rtx (EXPR_LIST, VOIDmode,
- XEXP (x, 0), eh_note);
+ eh_note = gen_rtx_EXPR_LIST (VOIDmode,
+ XEXP (x, 0), eh_note);
break;
}
if (x == NULL_RTX)
reachable from this block. */
for (x = forced_labels; x; x = XEXP (x, 1))
if (! LABEL_REF_NONLOCAL_P (x))
- mark_label_ref (gen_rtx (LABEL_REF, VOIDmode, XEXP (x, 0)),
+ mark_label_ref (gen_rtx_LABEL_REF (VOIDmode, XEXP (x, 0)),
insn, 0);
/* Now scan the insns for this block, we may need to make
{
x = XEXP (note, 0);
block_live[BLOCK_NUM (x)] = 1;
- mark_label_ref (gen_rtx (LABEL_REF,
- VOIDmode, x),
+ mark_label_ref (gen_rtx_LABEL_REF (VOIDmode, x),
insn, 0);
}
}
if (computed_jump_p (insn))
{
for (x = label_value_list; x; x = XEXP (x, 1))
- mark_label_ref (gen_rtx (LABEL_REF, VOIDmode,
- XEXP (x, 0)),
+ mark_label_ref (gen_rtx_LABEL_REF (VOIDmode,
+ XEXP (x, 0)),
insn, 0);
for (x = forced_labels; x; x = XEXP (x, 1))
- mark_label_ref (gen_rtx (LABEL_REF, VOIDmode,
- XEXP (x, 0)),
+ mark_label_ref (gen_rtx_LABEL_REF (VOIDmode,
+ XEXP (x, 0)),
insn, 0);
}
NULL_RTX)))
{
if (active_eh_handler[INSN_UID (insn)])
- mark_label_ref (gen_rtx (LABEL_REF, VOIDmode,
- active_eh_handler[INSN_UID (insn)]),
+ mark_label_ref (gen_rtx_LABEL_REF (VOIDmode,
+ active_eh_handler[INSN_UID (insn)]),
insn, 0);
if (!asynchronous_exceptions)
for (x = nonlocal_label_list;
x;
x = XEXP (x, 1))
- mark_label_ref (gen_rtx (LABEL_REF, VOIDmode,
- XEXP (x, 0)),
+ mark_label_ref (gen_rtx_LABEL_REF (VOIDmode,
+ XEXP (x, 0)),
insn, 0);
}
/* ??? This could be made smarter:
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
if (global_regs[i])
mark_used_regs (old, live,
- gen_rtx (REG, reg_raw_mode[i], i),
+ gen_rtx_REG (reg_raw_mode[i], i),
final, insn);
/* Calls also clobber memory. */
&& (regno >= FIRST_PSEUDO_REGISTER
|| asm_noperands (PATTERN (y)) < 0))
LOG_LINKS (y)
- = gen_rtx (INSN_LIST, VOIDmode, insn, LOG_LINKS (y));
+ = gen_rtx_INSN_LIST (VOIDmode, insn, LOG_LINKS (y));
}
else if (! some_needed)
{
be eliminated (because the same insn does something useful).
Indicate this by marking the reg being set as dying here. */
REG_NOTES (insn)
- = gen_rtx (EXPR_LIST, REG_UNUSED, reg, REG_NOTES (insn));
+ = gen_rtx_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
REG_N_DEATHS (REGNO (reg))++;
}
else
i >= 0; i--)
if (!REGNO_REG_SET_P (needed, regno + i))
REG_NOTES (insn)
- = gen_rtx (EXPR_LIST, REG_UNUSED,
- gen_rtx (REG, reg_raw_mode[regno + i],
- regno + i),
- REG_NOTES (insn));
+ = gen_rtx_EXPR_LIST (REG_UNUSED,
+ gen_rtx_REG (reg_raw_mode[regno + i],
+ regno + i),
+ REG_NOTES (insn));
}
}
}
else if (GET_CODE (reg) == SCRATCH && insn != 0)
{
REG_NOTES (insn)
- = gen_rtx (EXPR_LIST, REG_UNUSED, reg, REG_NOTES (insn));
+ = gen_rtx_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
num_scratch++;
}
}
we can't, we are done. Otherwise, we will do any
needed updates below. */
if (! validate_change (insn, &XEXP (x, 0),
- gen_rtx (inc_code, Pmode, addr),
+ gen_rtx_fmt_e (inc_code, Pmode, addr),
0))
return;
}
so is not correct in the pre-inc case. */
validate_change (insn, &XEXP (x, 0),
- gen_rtx (inc_code, Pmode, q),
+ gen_rtx_fmt_e (inc_code, Pmode, q),
1);
validate_change (incr, &XEXP (y, 0), q, 1);
if (! apply_change_group ())
has an implicit side effect. */
REG_NOTES (insn)
- = gen_rtx (EXPR_LIST, REG_INC, addr, REG_NOTES (insn));
+ = gen_rtx_EXPR_LIST (REG_INC, addr, REG_NOTES (insn));
/* Modify the old increment-insn to simply copy
the already-incremented value of our register. */
if (! some_needed)
{
REG_NOTES (insn)
- = gen_rtx (EXPR_LIST, REG_DEAD, x, REG_NOTES (insn));
+ = gen_rtx_EXPR_LIST (REG_DEAD, x, REG_NOTES (insn));
REG_N_DEATHS (regno)++;
}
else
if (!REGNO_REG_SET_P (needed, regno + i)
&& ! dead_or_set_regno_p (insn, regno + i))
REG_NOTES (insn)
- = gen_rtx (EXPR_LIST, REG_DEAD,
- gen_rtx (REG, reg_raw_mode[regno + i],
- regno + i),
- REG_NOTES (insn));
+ = gen_rtx_EXPR_LIST (REG_DEAD,
+ gen_rtx_REG (reg_raw_mode[regno + i],
+ regno + i),
+ REG_NOTES (insn));
}
}
}
/* See if this combination of instruction and addressing mode exists. */
if (! validate_change (insn, &XEXP (use, 0),
- gen_rtx (amount > 0
- ? (do_post ? POST_INC : PRE_INC)
- : (do_post ? POST_DEC : PRE_DEC),
- Pmode, reg), 0))
+ gen_rtx_fmt_e (amount > 0
+ ? (do_post ? POST_INC : PRE_INC)
+ : (do_post ? POST_DEC : PRE_DEC),
+ Pmode, reg), 0))
return 0;
/* Record that this insn now has an implicit side effect on X. */
- REG_NOTES (insn) = gen_rtx (EXPR_LIST, REG_INC, reg, REG_NOTES (insn));
+ REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
return 1;
}
frame_offset += size;
#endif
- x = gen_rtx (MEM, mode, addr);
+ x = gen_rtx_MEM (mode, addr);
- stack_slot_list = gen_rtx (EXPR_LIST, VOIDmode, x, stack_slot_list);
+ stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, x, stack_slot_list);
return x;
}
function->frame_offset += size;
#endif
- x = gen_rtx (MEM, mode, addr);
+ x = gen_rtx_MEM (mode, addr);
function->stack_slot_list
- = gen_rtx (EXPR_LIST, VOIDmode, x, function->stack_slot_list);
+ = gen_rtx_EXPR_LIST (VOIDmode, x, function->stack_slot_list);
pop_obstacks ();
p->size = best_p->size - rounded_size;
p->base_offset = best_p->base_offset + rounded_size;
p->full_size = best_p->full_size - rounded_size;
- p->slot = gen_rtx (MEM, BLKmode,
- plus_constant (XEXP (best_p->slot, 0),
- rounded_size));
+ p->slot = gen_rtx_MEM (BLKmode,
+ plus_constant (XEXP (best_p->slot, 0),
+ rounded_size));
p->address = 0;
p->rtl_expr = 0;
p->next = temp_slots;
temp_slots = p;
- stack_slot_list = gen_rtx (EXPR_LIST, VOIDmode, p->slot,
- stack_slot_list);
+ stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
+ stack_slot_list);
best_p->size = rounded_size;
best_p->full_size = rounded_size;
else
{
if (GET_CODE (p->address) != EXPR_LIST)
- p->address = gen_rtx (EXPR_LIST, VOIDmode, p->address, NULL_RTX);
+ p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
- p->address = gen_rtx (EXPR_LIST, VOIDmode, new, p->address);
+ p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
}
}
pos %= GET_MODE_BITSIZE (wanted_mode);
- newmem = gen_rtx (MEM, wanted_mode,
- plus_constant (XEXP (tem, 0), offset));
+ newmem = gen_rtx_MEM (wanted_mode,
+ plus_constant (XEXP (tem, 0), offset));
RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
pos %= GET_MODE_BITSIZE (wanted_mode);
- newmem = gen_rtx (MEM, wanted_mode,
- plus_constant (XEXP (tem, 0), offset));
+ newmem = gen_rtx_MEM (wanted_mode,
+ plus_constant (XEXP (tem, 0), offset));
RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (tem);
MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (tem);
MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (tem);
{
tree type = TREE_TYPE (decl);
- rtx r = gen_rtx (ADDRESSOF, Pmode, gen_reg_rtx (GET_MODE (reg)));
- ADDRESSOF_REGNO (r) = REGNO (reg);
+ rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)), REGNO (reg));
SET_ADDRESSOF_DECL (r, decl);
XEXP (reg, 0) = r;
{
rtx sub = XEXP (XEXP (x, 0), 0);
if (GET_CODE (sub) == MEM)
- sub = gen_rtx (MEM, GET_MODE (x), copy_rtx (XEXP (sub, 0)));
+ sub = gen_rtx_MEM (GET_MODE (x), copy_rtx (XEXP (sub, 0)));
if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
{
if (! BYTES_BIG_ENDIAN && ! WORDS_BIG_ENDIAN)
{
- rtx sub2 = gen_rtx (SUBREG, GET_MODE (x), sub, 0);
+ rtx sub2 = gen_rtx_SUBREG (GET_MODE (x), sub, 0);
if (validate_change (insn, loc, sub2, 0))
goto restart;
}
instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
extra_insns);
- new = gen_rtx (PLUS, Pmode, new, XEXP (XEXP (x, 0), 1));
+ new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
}
else if (XEXP (x, 0) == virtual_incoming_args_rtx)
register containing the sum. */
XEXP (x, 0) = old;
- new = gen_rtx (PLUS, Pmode, new, new_offset);
+ new = gen_rtx_PLUS (Pmode, new, new_offset);
start_sequence ();
temp = force_operand (new, NULL_RTX);
rtx x = 0;
for (t = nonlocal_labels; t; t = TREE_CHAIN (t))
- x = gen_rtx (EXPR_LIST, VOIDmode, label_rtx (TREE_VALUE (t)), x);
+ x = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (TREE_VALUE (t)), x);
return x;
}
{
if (GET_CODE (rtl) == REG)
/* This is a register variable. */
- emit_insn (gen_rtx (USE, VOIDmode, rtl));
+ emit_insn (gen_rtx_USE (VOIDmode, rtl));
else if (GET_CODE (rtl) == MEM
&& GET_CODE (XEXP (rtl, 0)) == REG
&& (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
|| REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
&& XEXP (rtl, 0) != current_function_internal_arg_pointer)
/* This is a variable-sized structure. */
- emit_insn (gen_rtx (USE, VOIDmode, XEXP (rtl, 0)));
+ emit_insn (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)));
}
/* Like use_variable except that it outputs the USEs after INSN
{
if (GET_CODE (rtl) == REG)
/* This is a register variable. */
- emit_insn_after (gen_rtx (USE, VOIDmode, rtl), insn);
+ emit_insn_after (gen_rtx_USE (VOIDmode, rtl), insn);
else if (GET_CODE (rtl) == MEM
&& GET_CODE (XEXP (rtl, 0)) == REG
&& (REGNO (XEXP (rtl, 0)) < FIRST_VIRTUAL_REGISTER
|| REGNO (XEXP (rtl, 0)) > LAST_VIRTUAL_REGISTER)
&& XEXP (rtl, 0) != current_function_internal_arg_pointer)
/* This is a variable-sized structure. */
- emit_insn_after (gen_rtx (USE, VOIDmode, XEXP (rtl, 0)), insn);
+ emit_insn_after (gen_rtx_USE (VOIDmode, XEXP (rtl, 0)), insn);
}
\f
int
|| TREE_CODE (parm) != PARM_DECL
|| passed_type == NULL)
{
- DECL_INCOMING_RTL (parm) = DECL_RTL (parm) = gen_rtx (MEM, BLKmode,
- const0_rtx);
+ DECL_INCOMING_RTL (parm) = DECL_RTL (parm)
+ = gen_rtx_MEM (BLKmode, const0_rtx);
TREE_USED (parm) = 1;
continue;
}
rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
if (offset_rtx == const0_rtx)
- stack_parm = gen_rtx (MEM, promoted_mode, internal_arg_pointer);
+ stack_parm = gen_rtx_MEM (promoted_mode, internal_arg_pointer);
else
- stack_parm = gen_rtx (MEM, promoted_mode,
- gen_rtx (PLUS, Pmode,
- internal_arg_pointer, offset_rtx));
+ stack_parm = gen_rtx_MEM (promoted_mode,
+ gen_rtx_PLUS (Pmode,
+ internal_arg_pointer,
+ offset_rtx));
/* If this is a memory ref that contains aggregate components,
mark it as such for cse and loop optimize. Likewise if it
offset_rtx = ARGS_SIZE_RTX (stack_offset);
if (offset_rtx == const0_rtx)
- stack_parm = gen_rtx (MEM, nominal_mode, internal_arg_pointer);
+ stack_parm = gen_rtx_MEM (nominal_mode, internal_arg_pointer);
else
- stack_parm = gen_rtx (MEM, nominal_mode,
- gen_rtx (PLUS, Pmode,
- if (flag_check_memory_usage)
- {
- push_to_sequence (conversion_insns);
- emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
- XEXP (stack_parm, 0), ptr_mode,
- GEN_INT (int_size_in_bytes
- (TREE_TYPE (parm))),
- TYPE_MODE (sizetype),
- GEN_INT (MEMORY_USE_RW),
- TYPE_MODE (integer_type_node));
- conversion_insns = get_insns ();
- end_sequence ();
- }
- internal_arg_pointer, offset_rtx));
+ stack_parm = gen_rtx_MEM (nominal_mode,
+ gen_rtx_PLUS (Pmode,
+ internal_arg_pointer,
+ offset_rtx));
/* If this is a memory ref that contains aggregate components,
mark it as such for cse and loop optimize. */
have been optimised away. */
if (GET_CODE (entry_parm) == REG && !(hide_last_arg && last_named))
- emit_insn (gen_rtx (USE, GET_MODE (entry_parm), entry_parm));
+ emit_insn (gen_rtx_USE (GET_MODE (entry_parm), entry_parm));
#endif
/* ENTRY_PARM is an RTX for the parameter as it arrives,
if (passed_pointer)
{
DECL_RTL (parm)
- = gen_rtx (MEM, TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
+ = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)), parmreg);
MEM_IN_STRUCT_P (DECL_RTL (parm)) = aggregate;
}
else
if (TYPE_SIZE (type) == 0
|| TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
/* This is a variable sized object. */
- copy = gen_rtx (MEM, BLKmode,
- allocate_dynamic_stack_space
- (expr_size (parm), NULL_RTX,
- TYPE_ALIGN (type)));
+ copy = gen_rtx_MEM (BLKmode,
+ allocate_dynamic_stack_space
+ (expr_size (parm), NULL_RTX,
+ TYPE_ALIGN (type)));
else
copy = assign_stack_temp (TYPE_MODE (type),
int_size_in_bytes (type), 1);
if (set != 0
&& SET_DEST (set) == regno_reg_rtx [regnoi])
REG_NOTES (sinsn)
- = gen_rtx (EXPR_LIST, REG_EQUIV,
- parm_reg_stack_loc[regnoi],
- REG_NOTES (sinsn));
+ = gen_rtx_EXPR_LIST (REG_EQUIV,
+ parm_reg_stack_loc[regnoi],
+ REG_NOTES (sinsn));
else if (set != 0
&& SET_DEST (set) == regno_reg_rtx [regnor])
REG_NOTES (sinsn)
- = gen_rtx (EXPR_LIST, REG_EQUIV,
- parm_reg_stack_loc[regnor],
- REG_NOTES (sinsn));
+ = gen_rtx_EXPR_LIST (REG_EQUIV,
+ parm_reg_stack_loc[regnor],
+ REG_NOTES (sinsn));
}
else if ((set = single_set (linsn)) != 0
&& SET_DEST (set) == parmreg)
REG_NOTES (linsn)
- = gen_rtx (EXPR_LIST, REG_EQUIV,
- stack_parm, REG_NOTES (linsn));
+ = gen_rtx_EXPR_LIST (REG_EQUIV,
+ stack_parm, REG_NOTES (linsn));
}
/* For pointer data type, suggest pointer register. */
tree restype = TREE_TYPE (result);
DECL_RTL (result)
- = gen_rtx (MEM, DECL_MODE (result), DECL_RTL (parm));
+ = gen_rtx_MEM (DECL_MODE (result), DECL_RTL (parm));
MEM_IN_STRUCT_P (DECL_RTL (result)) = AGGREGATE_TYPE_P (restype);
}
addr = fix_lexical_addr (XEXP (fp->arg_pointer_save_area, 0), var);
addr = memory_address (Pmode, addr);
- base = copy_to_reg (gen_rtx (MEM, Pmode, addr));
+ base = copy_to_reg (gen_rtx_MEM (Pmode, addr));
#else
displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
base = lookup_static_chain (var);
/* The zero below avoids a possible parse error */
0;
#if !defined (HAS_INIT_SECTION)
- emit_library_call (gen_rtx (SYMBOL_REF, Pmode, NAME__MAIN), 0,
+ emit_library_call (gen_rtx_SYMBOL_REF (Pmode, NAME__MAIN), 0,
VOIDmode, 0);
#endif /* not HAS_INIT_SECTION */
}
if (value_address)
{
DECL_RTL (DECL_RESULT (subr))
- = gen_rtx (MEM, DECL_MODE (DECL_RESULT (subr)), value_address);
+ = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
MEM_IN_STRUCT_P (DECL_RTL (DECL_RESULT (subr)))
= AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
}
#ifdef FRAME_GROWS_DOWNWARD
last_ptr = plus_constant (last_ptr, - GET_MODE_SIZE (Pmode));
#endif
- last_ptr = copy_to_reg (gen_rtx (MEM, Pmode,
- memory_address (Pmode, last_ptr)));
+ last_ptr = copy_to_reg (gen_rtx_MEM (Pmode,
+ memory_address (Pmode, last_ptr)));
/* If we are not optimizing, ensure that we know that this
piece of context is live over the entire function. */
if (! optimize)
- save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, last_ptr,
- save_expr_regs);
+ save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr,
+ save_expr_regs);
}
}
{
end_temporary_allocation ();
initial_trampoline
- = gen_rtx (MEM, BLKmode, assemble_trampoline_template ());
+ = gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
resume_temporary_allocation ();
}
#endif
GET_MODE (DECL_RTL (DECL_RESULT (current_function_decl))));
emit_move_insn (real_decl_result,
DECL_RTL (DECL_RESULT (current_function_decl)));
- emit_insn (gen_rtx (USE, VOIDmode, real_decl_result));
+ emit_insn (gen_rtx_USE (VOIDmode, real_decl_result));
/* The delay slot scheduler assumes that current_function_return_rtx
holds the hard register containing the return value, not a temporary
/* Generate code from machine description to perform peephole optimizations.
- Copyright (C) 1987, 1989, 1992 Free Software Foundation, Inc.
+ Copyright (C) 1987, 1989, 1992, 1997 Free Software Foundation, Inc.
This file is part of GNU CC.
char *xmalloc ();
static void match_rtx ();
-static void gen_exp ();
static void fatal ();
void fancy_abort ();
So use a simple regular form: a PARALLEL containing a vector
of all the operands. */
- printf (" PATTERN (ins1) = gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (%d, operands));\n", n_operands);
+ printf (" PATTERN (ins1) = gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (%d, operands));\n", n_operands);
#if 0
printf (" if (want_jump && GET_CODE (ins1) != JUMP_INSN)\n");
case ADDRESS:
match_rtx (XEXP (x, 0), path, fail_label);
return;
+
+ default:
+ break;
}
printf (" x = ");
from the machine description file `md'. */\n\n");
printf ("#include \"config.h\"\n");
+ printf ("#include <stdio.h>\n");
printf ("#include \"rtl.h\"\n");
printf ("#include \"regs.h\"\n");
printf ("#include \"output.h\"\n");
add_dependence (insn, XEXP (u, 0), REG_DEP_ANTI);
last_pending_memory_flush =
- gen_rtx (INSN_LIST, VOIDmode, insn, NULL_RTX);
+ gen_rtx_INSN_LIST (VOIDmode, insn, NULL_RTX);
}
/* Analyze a single SET or CLOBBER rtx, X, creating all dependencies generated
while (--i >= 0)
{
reg_last_uses[regno + i]
- = gen_rtx (INSN_LIST, VOIDmode,
- insn, reg_last_uses[regno + i]);
+ = gen_rtx_INSN_LIST (VOIDmode,
+ insn, reg_last_uses[regno + i]);
for (u = reg_last_sets[regno + i]; u; u = XEXP (u, 1))
add_dependence (insn, XEXP (u, 0), 0);
else
{
reg_last_uses[regno]
- = gen_rtx (INSN_LIST, VOIDmode, insn, reg_last_uses[regno]);
+ = gen_rtx_INSN_LIST (VOIDmode, insn, reg_last_uses[regno]);
for (u = reg_last_sets[regno]; u; u = XEXP (u, 1))
add_dependence (insn, XEXP (u, 0), 0);
{
/* reg_last_sets[r] is now a list of insns */
reg_last_sets[i]
- = gen_rtx (INSN_LIST, VOIDmode, insn, NULL_RTX);
+ = gen_rtx_INSN_LIST (VOIDmode, insn, NULL_RTX);
});
CLEAR_REG_SET (reg_pending_sets);
/* reg_last_sets[r] is now a list of insns */
reg_last_sets[i]
- = gen_rtx (INSN_LIST, VOIDmode, insn, NULL_RTX);
+ = gen_rtx_INSN_LIST (VOIDmode, insn, NULL_RTX);
reg_pending_sets_all = 0;
}
/* Add a pair of fake REG_NOTE which we will later
convert back into a NOTE_INSN_SETJMP note. See
reemit_notes for why we use a pair of NOTEs. */
- REG_NOTES (insn) = gen_rtx (EXPR_LIST, REG_DEAD,
- GEN_INT (0),
- REG_NOTES (insn));
- REG_NOTES (insn) = gen_rtx (EXPR_LIST, REG_DEAD,
- GEN_INT (NOTE_INSN_SETJMP),
- REG_NOTES (insn));
+ REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_DEAD,
+ GEN_INT (0),
+ REG_NOTES (insn));
+ REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_DEAD,
+ GEN_INT (NOTE_INSN_SETJMP),
+ REG_NOTES (insn));
}
else
{
/* last_function_call is now a list of insns */
last_function_call
- = gen_rtx (INSN_LIST, VOIDmode, insn, NULL_RTX);
+ = gen_rtx_INSN_LIST (VOIDmode, insn, NULL_RTX);
}
/* See comments on reemit_notes as to why we do this. */
|| (NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP
&& GET_CODE (PREV_INSN (insn)) != CALL_INSN)))
{
- loop_notes = gen_rtx (EXPR_LIST, REG_DEAD,
- GEN_INT (NOTE_BLOCK_NUMBER (insn)), loop_notes);
- loop_notes = gen_rtx (EXPR_LIST, REG_DEAD,
- GEN_INT (NOTE_LINE_NUMBER (insn)), loop_notes);
+ loop_notes = gen_rtx_EXPR_LIST (REG_DEAD,
+ GEN_INT (NOTE_BLOCK_NUMBER (insn)),
+ loop_notes);
+ loop_notes = gen_rtx_EXPR_LIST (REG_DEAD,
+ GEN_INT (NOTE_LINE_NUMBER (insn)),
+ loop_notes);
CONST_CALL_P (loop_notes) = CONST_CALL_P (insn);
}
{
link = rtx_alloc (EXPR_LIST);
PUT_REG_NOTE_KIND (link, REG_DEAD);
- XEXP (link, 0) = gen_rtx (REG, word_mode, 0);
+ XEXP (link, 0) = gen_rtx_REG (word_mode, 0);
XEXP (link, 1) = NULL_RTX;
}
{
rtx temp_reg, temp_link;
- temp_reg = gen_rtx (REG, word_mode, 0);
+ temp_reg = gen_rtx_REG (word_mode, 0);
temp_link = rtx_alloc (EXPR_LIST);
PUT_REG_NOTE_KIND (temp_link, REG_DEAD);
XEXP (temp_link, 0) = temp_reg;
i >= 0; i--)
if (! REGNO_REG_SET_P (old_live_regs, regno+i)
&& ! dead_or_set_regno_p (insn, regno + i))
- create_reg_dead_note (gen_rtx (REG,
- reg_raw_mode[regno + i],
- regno + i),
+ create_reg_dead_note (gen_rtx_REG (reg_raw_mode[regno + i],
+ regno + i),
insn);
}
}
for (bb = 0; bb < n_bbs; bb++)
{
bb_sched_before_next_call[bb] =
- gen_rtx (INSN, VOIDmode, 0, NULL_RTX, NULL_RTX,
- NULL_RTX, 0, NULL_RTX, NULL_RTX);
+ gen_rtx_INSN (VOIDmode, 0, NULL_RTX, NULL_RTX,
+ NULL_RTX, 0, NULL_RTX, NULL_RTX);
LOG_LINKS (bb_sched_before_next_call[bb]) = 0;
}
}
last_function_call = 0;
last_pending_memory_flush = 0;
sched_before_next_call
- = gen_rtx (INSN, VOIDmode, 0, NULL_RTX, NULL_RTX,
- NULL_RTX, 0, NULL_RTX, NULL_RTX);
+ = gen_rtx_INSN (VOIDmode, 0, NULL_RTX, NULL_RTX,
+ NULL_RTX, 0, NULL_RTX, NULL_RTX);
LOG_LINKS (sched_before_next_call) = 0;
}
else
continue;
(bb_reg_last_uses[bb_succ])[reg]
- = gen_rtx (INSN_LIST, VOIDmode, XEXP (u, 0),
- (bb_reg_last_uses[bb_succ])[reg]);
+ = gen_rtx_INSN_LIST (VOIDmode, XEXP (u, 0),
+ (bb_reg_last_uses[bb_succ])[reg]);
}
/* reg-last-defs lists are inherited by bb_succ */
continue;
(bb_reg_last_sets[bb_succ])[reg]
- = gen_rtx (INSN_LIST, VOIDmode, XEXP (u, 0),
- (bb_reg_last_sets[bb_succ])[reg]);
+ = gen_rtx_INSN_LIST (VOIDmode, XEXP (u, 0),
+ (bb_reg_last_sets[bb_succ])[reg]);
}
}
continue;
bb_last_function_call[bb_succ]
- = gen_rtx (INSN_LIST, VOIDmode, XEXP (u, 0),
- bb_last_function_call[bb_succ]);
+ = gen_rtx_INSN_LIST (VOIDmode, XEXP (u, 0),
+ bb_last_function_call[bb_succ]);
}
/* last_pending_memory_flush is inherited by bb_succ */
continue;
bb_last_pending_memory_flush[bb_succ]
- = gen_rtx (INSN_LIST, VOIDmode, XEXP (u, 0),
- bb_last_pending_memory_flush[bb_succ]);
+ = gen_rtx_INSN_LIST (VOIDmode, XEXP (u, 0),
+ bb_last_pending_memory_flush[bb_succ]);
}
/* sched_before_next_call is inherited by bb_succ */
for (insn = first; insn != NEXT_INSN (last); insn = NEXT_INSN (insn))
if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
&& reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
- REG_NOTES (insn) = gen_rtx (EXPR_LIST, REG_LABEL,
- XEXP (note, 0), REG_NOTES (insn));
+ REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_LABEL,
+ XEXP (note, 0),
+ REG_NOTES (insn));
break;
case REG_CC_SETTER:
/* OSF/rose half-pic support functions.
- Copyright (C) 1992 Free Software Foundation, Inc.
+ Copyright (C) 1992, 1997 Free Software Foundation, Inc.
This file is part of GNU CC.
#ifdef HALF_PIC_INIT
+#include <stdio.h>
#include "tree.h"
#include "rtl.h"
-#include <stdio.h>
#include "obstack.h"
#define obstack_chunk_alloc xmalloc
if (p->pointer_p)
{
ASM_OUTPUT_LABEL (stream, p->ref_name);
- ASM_OUTPUT_INT (stream, gen_rtx (SYMBOL_REF, Pmode, p->real_name));
+ ASM_OUTPUT_INT (stream, gen_rtx_SYMBOL_REF (Pmode, p->real_name));
}
}
}
}
half_pic_number_refs++;
- return gen_rtx (SYMBOL_REF, Pmode, p->ref_name);
+ return gen_rtx_SYMBOL_REF (Pmode, p->ref_name);
}
#endif /* HALF_PIC_INIT */
&& CONSTANT_POOL_ADDRESS_P (XEXP (x,0)))
{
enum machine_mode const_mode = get_pool_mode (XEXP (x, 0));
- rtx new = gen_rtx (CONST, const_mode, get_pool_constant (XEXP (x, 0)));
+ rtx new = gen_rtx_CONST (const_mode, get_pool_constant (XEXP (x, 0)));
RTX_INTEGRATED_P (new) = 1;
/* If the MEM was in a different mode than the constant (perhaps we
if (GET_MODE (x) != const_mode)
{
- new = gen_rtx (SUBREG, GET_MODE (x), new, 0);
+ new = gen_rtx_SUBREG (GET_MODE (x), new, 0);
RTX_INTEGRATED_P (new) = 1;
}
else if (GET_CODE (x) == SYMBOL_REF
&& CONSTANT_POOL_ADDRESS_P (x))
{
- *px = gen_rtx (ADDRESS, GET_MODE (x),
- gen_rtx (CONST, get_pool_mode (x),
- get_pool_constant (x)));
+ *px = gen_rtx_ADDRESS (GET_MODE (x),
+ gen_rtx_CONST (get_pool_mode (x),
+ get_pool_constant (x)));
save_constants (&XEXP (*px, 0));
RTX_INTEGRATED_P (*px) = 1;
}
case LABEL_REF:
/* If this is a non-local label, just make a new LABEL_REF.
Otherwise, use the new label as well. */
- x = gen_rtx (LABEL_REF, GET_MODE (orig),
- LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
- : label_map[CODE_LABEL_NUMBER (XEXP (orig, 0))]);
+ x = gen_rtx_LABEL_REF (GET_MODE (orig),
+ LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
+ : label_map[CODE_LABEL_NUMBER (XEXP (orig, 0))]);
LABEL_REF_NONLOCAL_P (x) = LABEL_REF_NONLOCAL_P (orig);
LABEL_OUTSIDE_LOOP_P (x) = LABEL_OUTSIDE_LOOP_P (orig);
return x;
target = gen_lowpart (departing_mode, reg_to_map);
}
else
- reg_to_map = gen_rtx (SUBREG, arriving_mode, target, 0);
+ reg_to_map = gen_rtx_SUBREG (arriving_mode, target, 0);
}
else
reg_to_map = target;
if (structure_value_addr)
{
- target = gen_rtx (MEM, TYPE_MODE (type),
- memory_address (TYPE_MODE (type), structure_value_addr));
+ target = gen_rtx_MEM (TYPE_MODE (type),
+ memory_address (TYPE_MODE (type),
+ structure_value_addr));
MEM_IN_STRUCT_P (target) = 1;
}
copy = copy_rtx_and_substitute (SUBREG_REG (orig), map);
/* SUBREG is ordinary, but don't make nested SUBREGs. */
if (GET_CODE (copy) == SUBREG)
- return gen_rtx (SUBREG, GET_MODE (orig), SUBREG_REG (copy),
- SUBREG_WORD (orig) + SUBREG_WORD (copy));
+ return gen_rtx_SUBREG (GET_MODE (orig), SUBREG_REG (copy),
+ SUBREG_WORD (orig) + SUBREG_WORD (copy));
else if (GET_CODE (copy) == CONCAT)
return (subreg_realpart_p (orig) ? XEXP (copy, 0) : XEXP (copy, 1));
else
- return gen_rtx (SUBREG, GET_MODE (orig), copy,
- SUBREG_WORD (orig));
+ return gen_rtx_SUBREG (GET_MODE (orig), copy,
+ SUBREG_WORD (orig));
case ADDRESSOF:
- copy = gen_rtx (ADDRESSOF, mode,
- copy_rtx_and_substitute (XEXP (orig, 0), map));
+ copy = gen_rtx_ADDRESSOF (mode,
+ copy_rtx_and_substitute (XEXP (orig, 0), map), 0);
SET_ADDRESSOF_DECL (copy, ADDRESSOF_DECL (orig));
regno = ADDRESSOF_REGNO (orig);
if (map->reg_map[regno])
copy = copy_rtx_and_substitute (XEXP (orig, 0), map);
if (GET_CODE (copy) == SUBREG && GET_CODE (XEXP (orig, 0)) != SUBREG)
copy = SUBREG_REG (copy);
- return gen_rtx (code, VOIDmode, copy);
+ return gen_rtx_fmt_e (code, VOIDmode, copy);
case CODE_LABEL:
LABEL_PRESERVE_P (get_label_from_map (map, CODE_LABEL_NUMBER (orig)))
return get_label_from_map (map, CODE_LABEL_NUMBER (orig));
case LABEL_REF:
- copy = gen_rtx (LABEL_REF, mode,
- LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
- : get_label_from_map (map,
- CODE_LABEL_NUMBER (XEXP (orig, 0))));
+ copy = gen_rtx_LABEL_REF (mode,
+ LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
+ : get_label_from_map (map,
+ CODE_LABEL_NUMBER (XEXP (orig, 0))));
LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
/* The fact that this label was previously nonlocal does not mean
#ifndef NO_FUNCTION_CSE
if (! (optimize && ! flag_no_function_cse))
#endif
- return gen_rtx (CALL, GET_MODE (orig),
- gen_rtx (MEM, GET_MODE (XEXP (orig, 0)),
- copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0), map)),
+ return gen_rtx_CALL (GET_MODE (orig),
+ gen_rtx_MEM (GET_MODE (XEXP (orig, 0)),
+ copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0), map)),
copy_rtx_and_substitute (XEXP (orig, 1), map));
break;
HOST_WIDE_INT loc_offset
= GET_CODE (equiv_loc) == REG ? 0 : INTVAL (XEXP (equiv_loc, 1));
- return gen_rtx (SET, VOIDmode, SET_DEST (orig),
- force_operand
- (plus_constant
- (copy_rtx_and_substitute (SET_SRC (orig), map),
- - loc_offset),
- NULL_RTX));
+ return gen_rtx_SET (VOIDmode, SET_DEST (orig),
+ force_operand
+ (plus_constant
+ (copy_rtx_and_substitute (SET_SRC (orig), map),
+ - loc_offset),
+ NULL_RTX));
}
break;
/* Change this into a USE so that we won't emit
code for it, but still can keep the note. */
PATTERN (insn)
- = gen_rtx (USE, VOIDmode, XEXP (trial, 0));
+ = gen_rtx_USE (VOIDmode, XEXP (trial, 0));
INSN_CODE (insn) = -1;
/* Remove all reg notes but the REG_DEAD one. */
REG_NOTES (insn) = trial;
/* Make the old conditional jump
into an unconditional one. */
SET_SRC (PATTERN (insn))
- = gen_rtx (LABEL_REF, VOIDmode, JUMP_LABEL (insn));
+ = gen_rtx_LABEL_REF (VOIDmode, JUMP_LABEL (insn));
INSN_CODE (insn) = -1;
emit_barrier_after (insn);
/* Add to jump_chain unless this is a new label
for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
if (REG_NOTE_KIND (link) != REG_LABEL)
REG_NOTES (copy)
- = copy_rtx (gen_rtx (EXPR_LIST, REG_NOTE_KIND (link),
- XEXP (link, 0), REG_NOTES (copy)));
+ = copy_rtx (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
+ XEXP (link, 0),
+ REG_NOTES (copy)));
if (reg_map && REG_NOTES (copy))
replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
break;
|| ! (GET_CODE (next) == JUMP_INSN
&& (GET_CODE (PATTERN (next)) == ADDR_VEC
|| GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC)))
- REG_NOTES (insn) = gen_rtx (EXPR_LIST, REG_LABEL, label,
- REG_NOTES (insn));
+ REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_LABEL, label,
+ REG_NOTES (insn));
}
}
return;
delete_computation (prev);
else
/* Otherwise, show that cc0 won't be used. */
- REG_NOTES (prev) = gen_rtx (EXPR_LIST, REG_UNUSED,
- cc0_rtx, REG_NOTES (prev));
+ REG_NOTES (prev) = gen_rtx_EXPR_LIST (REG_UNUSED,
+ cc0_rtx, REG_NOTES (prev));
}
}
#endif
if (can_reverse_comparison_p (comp, insn)
&& validate_change (insn, &XEXP (x, 0),
- gen_rtx (reverse_condition (GET_CODE (comp)),
- GET_MODE (comp), XEXP (comp, 0),
- XEXP (comp, 1)), 0))
+ gen_rtx_fmt_ee (reverse_condition (GET_CODE (comp)),
+ GET_MODE (comp), XEXP (comp, 0),
+ XEXP (comp, 1)), 0))
return 1;
tem = XEXP (x, 1);
if (nlabel)
XEXP (x, 0) = nlabel;
else
- return validate_change (insn, loc, gen_rtx (RETURN, VOIDmode), 0);
+ return validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 0);
return 1;
}
}
else if (code == RETURN && olabel == 0)
{
- x = gen_rtx (LABEL_REF, VOIDmode, nlabel);
+ x = gen_rtx_LABEL_REF (VOIDmode, nlabel);
if (loc == &PATTERN (insn))
- x = gen_rtx (SET, VOIDmode, pc_rtx, x);
+ x = gen_rtx_SET (VOIDmode, pc_rtx, x);
return validate_change (insn, loc, x, 0);
}
if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
&& GET_CODE (SET_SRC (x)) == LABEL_REF
&& XEXP (SET_SRC (x), 0) == olabel)
- return validate_change (insn, loc, gen_rtx (RETURN, VOIDmode), 0);
+ return validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 0);
fmt = GET_RTX_FORMAT (code);
for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
&& ! memref_used_between_p (SET_DEST (set),
reg_equiv_init_insn[regno], insn))
REG_NOTES (reg_equiv_init_insn[regno])
- = gen_rtx (EXPR_LIST, REG_EQUIV, dest,
- REG_NOTES (reg_equiv_init_insn[regno]));
+ = gen_rtx_EXPR_LIST (REG_EQUIV, dest,
+ REG_NOTES (reg_equiv_init_insn[regno]));
/* If this is a register-register copy where SRC is not dead, see if we
can optimize it. */
if (note == 0 && REG_BASIC_BLOCK (regno) >= 0
&& GET_CODE (SET_SRC (set)) == MEM
&& validate_equiv_mem (insn, dest, SET_SRC (set)))
- REG_NOTES (insn) = note = gen_rtx (EXPR_LIST, REG_EQUIV, SET_SRC (set),
- REG_NOTES (insn));
+ REG_NOTES (insn) = note = gen_rtx_EXPR_LIST (REG_EQUIV, SET_SRC (set),
+ REG_NOTES (insn));
if (note)
{
{
if (GET_CODE (qty_scratch_rtx[q]) == REG)
abort ();
- qty_scratch_rtx[q] = gen_rtx (REG, GET_MODE (qty_scratch_rtx[q]),
- qty_phys_reg[q]);
+ qty_scratch_rtx[q] = gen_rtx_REG (GET_MODE (qty_scratch_rtx[q]),
+ qty_phys_reg[q]);
scratch_block[scratch_index] = b;
scratch_list[scratch_index++] = qty_scratch_rtx[q];
init_loop ()
{
char *free_point = (char *) oballoc (1);
- rtx reg = gen_rtx (REG, word_mode, LAST_VIRTUAL_REGISTER + 1);
+ rtx reg = gen_rtx_REG (word_mode, LAST_VIRTUAL_REGISTER + 1);
- add_cost = rtx_cost (gen_rtx (PLUS, word_mode, reg, reg), SET);
+ add_cost = rtx_cost (gen_rtx_PLUS (word_mode, reg, reg), SET);
/* We multiply by 2 to reconcile the difference in scale between
these two ways of computing costs. Otherwise the cost of a copy
case REG:
if (REGNO (in_this) >= FIRST_PSEUDO_REGISTER
&& ! reg_mentioned_p (in_this, not_in_this))
- *output = gen_rtx (EXPR_LIST, VOIDmode, in_this, *output);
+ *output = gen_rtx_EXPR_LIST (VOIDmode, in_this, *output);
return;
default:
{
for (insn = insns; insn; insn = NEXT_INSN (insn))
if (reg_mentioned_p (XEXP (x, 0), insn))
- REG_NOTES (insn) = gen_rtx (EXPR_LIST, REG_LABEL, XEXP (x, 0),
- REG_NOTES (insn));
+ REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_LABEL, XEXP (x, 0),
+ REG_NOTES (insn));
}
return;
}
REG_NOTES (i1) = REG_NOTES (m->insn);
r1 = SET_DEST (PATTERN (m->insn));
r2 = SET_DEST (PATTERN (m1->insn));
- regs_may_share = gen_rtx (EXPR_LIST, VOIDmode, r1,
- gen_rtx (EXPR_LIST, VOIDmode, r2,
- regs_may_share));
+ regs_may_share
+ = gen_rtx_EXPR_LIST (VOIDmode, r1,
+ gen_rtx_EXPR_LIST (VOIDmode, r2,
+ regs_may_share));
delete_insn (m->insn);
if (new_start == 0)
i1 = emit_insns_before (temp, loop_start);
if (! find_reg_note (i1, REG_EQUAL, NULL_RTX))
REG_NOTES (i1)
- = gen_rtx (EXPR_LIST,
- m->is_equiv ? REG_EQUIV : REG_EQUAL,
- m->set_src, REG_NOTES (i1));
+ = gen_rtx_EXPR_LIST (m->is_equiv ? REG_EQUIV : REG_EQUAL,
+ m->set_src, REG_NOTES (i1));
if (loop_dump_stream)
fprintf (loop_dump_stream, " moved to %d", INSN_UID (i1));
(since it might get set outside). */
&& CONSTANT_P (SET_SRC (PATTERN (p))))
REG_NOTES (i1)
- = gen_rtx (EXPR_LIST, REG_EQUAL,
- SET_SRC (PATTERN (p)), REG_NOTES (i1));
+ = gen_rtx_EXPR_LIST (REG_EQUAL,
+ SET_SRC (PATTERN (p)),
+ REG_NOTES (i1));
#endif
/* If library call, now fix the REG_NOTES that contain
/* Try to change (SET (REG ...) (ZERO_EXTEND (..:B ...)))
to (SET (STRICT_LOW_PART (SUBREG:B (REG...))) ...). */
- new = gen_rtx (SET, VOIDmode,
- gen_rtx (STRICT_LOW_PART, VOIDmode,
- gen_rtx (SUBREG, GET_MODE (XEXP (SET_SRC (PATTERN (p)), 0)),
+ new = gen_rtx_SET (VOIDmode,
+ gen_rtx_STRICT_LOW_PART (VOIDmode,
+ gen_rtx_SUBREG (GET_MODE (XEXP (SET_SRC (PATTERN (p)), 0)),
SET_DEST (PATTERN (p)),
0)),
XEXP (SET_SRC (PATTERN (p)), 0));
register int i;
/* Clear destination register before the loop. */
- emit_insn_before (gen_rtx (SET, VOIDmode,
- SET_DEST (PATTERN (p)),
- const0_rtx),
+ emit_insn_before (gen_rtx_SET (VOIDmode, SET_DEST (PATTERN (p)),
+ const0_rtx),
loop_start);
/* Inside the loop, just load the low part. */
if (GET_CODE (test) == NE)
{
bl->init_insn = p;
- bl->init_set = gen_rtx (SET, VOIDmode,
- XEXP (test, 0), XEXP (test, 1));
+ bl->init_set = gen_rtx_SET (VOIDmode,
+ XEXP (test, 0), XEXP (test, 1));
}
else
bl->initial_test = test;
tem = 0;
if (biv->mult_val == const1_rtx)
- tem = simplify_giv_expr (gen_rtx (MULT, giv->mode,
- biv->add_val,
- giv->mult_val),
+ tem = simplify_giv_expr (gen_rtx_MULT (giv->mode,
+ biv->add_val,
+ giv->mult_val),
&dummy);
if (tem && giv->derive_adjustment)
- tem = simplify_giv_expr (gen_rtx (PLUS, giv->mode, tem,
- giv->derive_adjustment),
+ tem = simplify_giv_expr (gen_rtx_PLUS (giv->mode, tem,
+ giv->derive_adjustment),
&dummy);
if (tem)
giv->derive_adjustment = tem;
{
tem = plus_constant (arg0, INTVAL (arg1));
if (GET_CODE (tem) != CONST_INT)
- tem = gen_rtx (USE, mode, tem);
+ tem = gen_rtx_USE (mode, tem);
}
else
{
/* Adding two invariants must result in an invariant,
so enclose addition operation inside a USE and
return it. */
- tem = gen_rtx (USE, mode, gen_rtx (PLUS, mode, arg0, arg1));
+ tem = gen_rtx_USE (mode, gen_rtx_PLUS (mode, arg0, arg1));
}
return tem;
case REG:
case MULT:
/* biv + invar or mult + invar. Return sum. */
- return gen_rtx (PLUS, mode, arg0, arg1);
+ return gen_rtx_PLUS (mode, arg0, arg1);
case PLUS:
/* (a + invar_1) + invar_2. Associate. */
- return simplify_giv_expr (gen_rtx (PLUS, mode,
- XEXP (arg0, 0),
- gen_rtx (PLUS, mode,
- XEXP (arg0, 1), arg1)),
+ return simplify_giv_expr (gen_rtx_PLUS (mode,
+ XEXP (arg0, 0),
+ gen_rtx_PLUS (mode,
+ XEXP (arg0, 1), arg1)),
benefit);
default:
/* Each argument must be either REG, PLUS, or MULT. Convert REG to
MULT to reduce cases. */
if (GET_CODE (arg0) == REG)
- arg0 = gen_rtx (MULT, mode, arg0, const1_rtx);
+ arg0 = gen_rtx_MULT (mode, arg0, const1_rtx);
if (GET_CODE (arg1) == REG)
- arg1 = gen_rtx (MULT, mode, arg1, const1_rtx);
+ arg1 = gen_rtx_MULT (mode, arg1, const1_rtx);
/* Now have PLUS + PLUS, PLUS + MULT, MULT + PLUS, or MULT + MULT.
Put a MULT first, leaving PLUS + PLUS, MULT + PLUS, or MULT + MULT.
tem = arg0, arg0 = arg1, arg1 = tem;
if (GET_CODE (arg1) == PLUS)
- return simplify_giv_expr (gen_rtx (PLUS, mode,
- gen_rtx (PLUS, mode,
- arg0, XEXP (arg1, 0)),
- XEXP (arg1, 1)),
+ return simplify_giv_expr (gen_rtx_PLUS (mode,
+ gen_rtx_PLUS (mode, arg0,
+ XEXP (arg1, 0)),
+ XEXP (arg1, 1)),
benefit);
/* Now must have MULT + MULT. Distribute if same biv, else not giv. */
if (XEXP (arg0, 0) != XEXP (arg1, 0))
return 0;
- return simplify_giv_expr (gen_rtx (MULT, mode,
- XEXP (arg0, 0),
- gen_rtx (PLUS, mode,
- XEXP (arg0, 1),
- XEXP (arg1, 1))),
+ return simplify_giv_expr (gen_rtx_MULT (mode,
+ XEXP (arg0, 0),
+ gen_rtx_PLUS (mode,
+ XEXP (arg0, 1),
+ XEXP (arg1, 1))),
benefit);
case MINUS:
/* Handle "a - b" as "a + b * (-1)". */
- return simplify_giv_expr (gen_rtx (PLUS, mode,
- XEXP (x, 0),
- gen_rtx (MULT, mode,
- XEXP (x, 1), constm1_rtx)),
+ return simplify_giv_expr (gen_rtx_PLUS (mode,
+ XEXP (x, 0),
+ gen_rtx_MULT (mode, XEXP (x, 1),
+ constm1_rtx)),
benefit);
case MULT:
{
case REG:
/* biv * invar. Done. */
- return gen_rtx (MULT, mode, arg0, arg1);
+ return gen_rtx_MULT (mode, arg0, arg1);
case CONST_INT:
/* Product of two constants. */
case MULT:
/* (a * invar_1) * invar_2. Associate. */
- return simplify_giv_expr (gen_rtx (MULT, mode,
- XEXP (arg0, 0),
- gen_rtx (MULT, mode,
- XEXP (arg0, 1), arg1)),
+ return simplify_giv_expr (gen_rtx_MULT (mode, XEXP (arg0, 0),
+ gen_rtx_MULT (mode,
+ XEXP (arg0, 1),
+ arg1)),
benefit);
case PLUS:
/* (a + invar_1) * invar_2. Distribute. */
- return simplify_giv_expr (gen_rtx (PLUS, mode,
- gen_rtx (MULT, mode,
- XEXP (arg0, 0), arg1),
- gen_rtx (MULT, mode,
- XEXP (arg0, 1), arg1)),
+ return simplify_giv_expr (gen_rtx_PLUS (mode,
+ gen_rtx_MULT (mode,
+ XEXP (arg0, 0),
+ arg1),
+ gen_rtx_MULT (mode,
+ XEXP (arg0, 1),
+ arg1)),
benefit);
default:
if (GET_CODE (XEXP (x, 1)) != CONST_INT)
return 0;
- return simplify_giv_expr (gen_rtx (MULT, mode,
- XEXP (x, 0),
- GEN_INT ((HOST_WIDE_INT) 1
- << INTVAL (XEXP (x, 1)))),
+ return simplify_giv_expr (gen_rtx_MULT (mode,
+ XEXP (x, 0),
+ GEN_INT ((HOST_WIDE_INT) 1
+ << INTVAL (XEXP (x, 1)))),
benefit);
case NEG:
/* "-a" is "a * (-1)" */
- return simplify_giv_expr (gen_rtx (MULT, mode, XEXP (x, 0), constm1_rtx),
+ return simplify_giv_expr (gen_rtx_MULT (mode, XEXP (x, 0), constm1_rtx),
benefit);
case NOT:
/* "~a" is "-a - 1". Silly, but easy. */
- return simplify_giv_expr (gen_rtx (MINUS, mode,
- gen_rtx (NEG, mode, XEXP (x, 0)),
- const1_rtx),
+ return simplify_giv_expr (gen_rtx_MINUS (mode,
+ gen_rtx_NEG (mode, XEXP (x, 0)),
+ const1_rtx),
benefit);
case USE:
if (v->cant_derive)
return 0;
- tem = gen_rtx (PLUS, mode, gen_rtx (MULT, mode,
- v->src_reg, v->mult_val),
+ tem = gen_rtx_PLUS (mode, gen_rtx_MULT (mode, v->src_reg,
+ v->mult_val),
v->add_val);
if (v->derive_adjustment)
- tem = gen_rtx (MINUS, mode, tem, v->derive_adjustment);
+ tem = gen_rtx_MINUS (mode, tem, v->derive_adjustment);
return simplify_giv_expr (tem, benefit);
}
if (GET_CODE (x) == CONST_INT)
return x;
else
- return gen_rtx (USE, mode, x);
+ return gen_rtx_USE (mode, x);
}
else
return 0;
else if (mult == const1_rtx)
mult = g1->dest_reg;
else
- mult = gen_rtx (MULT, g2->mode, g1->dest_reg, mult);
+ mult = gen_rtx_MULT (g2->mode, g1->dest_reg, mult);
if (add == const0_rtx)
return mult;
else
- return gen_rtx (PLUS, g2->mode, mult, add);
+ return gen_rtx_PLUS (g2->mode, mult, add);
}
#endif
\f
{
/* register always nonnegative, add REG_NOTE to branch */
REG_NOTES (PREV_INSN (loop_end))
- = gen_rtx (EXPR_LIST, REG_NONNEG, NULL_RTX,
- REG_NOTES (PREV_INSN (loop_end)));
+ = gen_rtx_EXPR_LIST (REG_NONNEG, NULL_RTX,
+ REG_NOTES (PREV_INSN (loop_end)));
bl->nonneg = 1;
return 1;
&& INTVAL (bl->biv->add_val) == -1)
{
REG_NOTES (PREV_INSN (loop_end))
- = gen_rtx (EXPR_LIST, REG_NONNEG, NULL_RTX,
- REG_NOTES (PREV_INSN (loop_end)));
+ = gen_rtx_EXPR_LIST (REG_NONNEG, NULL_RTX,
+ REG_NOTES (PREV_INSN (loop_end)));
bl->nonneg = 1;
return 1;
/* Increment of LABEL_NUSES done above. */
/* Register is now always nonnegative,
so add REG_NONNEG note to the branch. */
- REG_NOTES (tem) = gen_rtx (EXPR_LIST, REG_NONNEG, NULL_RTX,
- REG_NOTES (tem));
+ REG_NOTES (tem) = gen_rtx_EXPR_LIST (REG_NONNEG, NULL_RTX,
+ REG_NOTES (tem));
}
bl->nonneg = 1;
/* If the giv has the opposite direction of change,
then reverse the comparison. */
if (INTVAL (v->mult_val) < 0)
- new = gen_rtx (COMPARE, GET_MODE (v->new_reg),
- const0_rtx, v->new_reg);
+ new = gen_rtx_COMPARE (GET_MODE (v->new_reg),
+ const0_rtx, v->new_reg);
else
new = v->new_reg;
/* If the giv has the opposite direction of change,
then reverse the comparison. */
if (INTVAL (v->mult_val) < 0)
- new = gen_rtx (COMPARE, VOIDmode, copy_rtx (v->add_val),
- v->new_reg);
+ new = gen_rtx_COMPARE (VOIDmode, copy_rtx (v->add_val),
+ v->new_reg);
else
- new = gen_rtx (COMPARE, VOIDmode, v->new_reg,
- copy_rtx (v->add_val));
+ new = gen_rtx_COMPARE (VOIDmode, v->new_reg,
+ copy_rtx (v->add_val));
/* Replace biv with the giv's reduced register. */
update_reg_last_use (v->add_val, insn);
return 0;
#endif
- return gen_rtx (code, VOIDmode, op0, op1);
+ return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
}
/* Similar to above routine, except that we also put an invariant last
|| invariant_p (XEXP (comparison, 1)))
return comparison;
- return gen_rtx (swap_condition (GET_CODE (comparison)), VOIDmode,
- XEXP (comparison, 1), XEXP (comparison, 0));
+ return gen_rtx_fmt_ee (swap_condition (GET_CODE (comparison)), VOIDmode,
+ XEXP (comparison, 1), XEXP (comparison, 0));
}
#ifdef HAIFA
emit_insn (gen_move_insn (temp_reg1, loop_num_iterations));
/* this will be count register */
- temp_reg2 = gen_rtx (REG, loop_var_mode, COUNT_REGISTER_REGNUM);
+ temp_reg2 = gen_rtx_REG (loop_var_mode, COUNT_REGISTER_REGNUM);
/* we have to move the value to the count register from an GPR
because rtx pointed to by loop_num_iterations could contain
expression which cannot be moved into count register */
emit_barrier_after (arcptr->branch_insn);
/* Fix up the table jump. */
- new_lref = gen_rtx (LABEL_REF, Pmode, new_label);
+ new_lref = gen_rtx_LABEL_REF (Pmode, new_label);
XVECEXP (PATTERN (arcptr->branch_insn),
(code == ADDR_DIFF_VEC), index) = new_lref;
}
{
/* Make a fake insn to tag our notes on. */
bb_graph[i].first_insn = insn
- = emit_insn_after (gen_rtx (USE, VOIDmode, stack_pointer_rtx),
+ = emit_insn_after (gen_rtx_USE (VOIDmode, stack_pointer_rtx),
insn);
prev_code = CALL_INSN;
}
num_branches++;
REG_NOTES (arcptr->branch_insn)
- = gen_rtx (EXPR_LIST, REG_BR_PROB, GEN_INT (prob),
- REG_NOTES (arcptr->branch_insn));
+ = gen_rtx_EXPR_LIST (REG_BR_PROB, GEN_INT (prob),
+ REG_NOTES (arcptr->branch_insn));
}
}
else
{
REG_NOTES (binfo->first_insn)
- = gen_rtx (EXPR_LIST, REG_EXEC_COUNT, GEN_INT (total),
- REG_NOTES (binfo->first_insn));
+ = gen_rtx_EXPR_LIST (REG_EXEC_COUNT, GEN_INT (total),
+ REG_NOTES (binfo->first_insn));
if (i == num_blocks - 1)
return_label_execution_count = total;
}
/* Generate and save a copy of this so it can be shared. */
char *name = xmalloc (20);
ASM_GENERATE_INTERNAL_LABEL (name, "LPBX", 2);
- profiler_label = gen_rtx (SYMBOL_REF, Pmode, name);
+ profiler_label = gen_rtx_SYMBOL_REF (Pmode, name);
}
/* Output instructions as RTL to increment the arc execution count. */
{
rtx profiler_target_addr
= (arcno
- ? gen_rtx (CONST, Pmode,
- gen_rtx (PLUS, Pmode, profiler_label,
- gen_rtx (CONST_INT, VOIDmode,
- LONG_TYPE_SIZE / BITS_PER_UNIT * arcno)))
+ ? gen_rtx_CONST (Pmode,
+ gen_rtx_PLUS (Pmode, profiler_label,
+ GEN_INT (LONG_TYPE_SIZE / BITS_PER_UNIT * arcno)))
: profiler_label);
enum machine_mode mode = mode_for_size (LONG_TYPE_SIZE, MODE_INT, 0);
rtx profiler_reg = gen_reg_rtx (mode);
start_sequence ();
emit_move_insn (address_reg, profiler_target_addr);
- mem_ref = gen_rtx (MEM, mode, address_reg);
+ mem_ref = gen_rtx_MEM (mode, address_reg);
emit_move_insn (profiler_reg, mem_ref);
- add_ref = gen_rtx (PLUS, mode, profiler_reg, GEN_INT (1));
+ add_ref = gen_rtx_PLUS (mode, profiler_reg, GEN_INT (1));
emit_move_insn (profiler_reg, add_ref);
/* This is the same rtx as above, but it is not legal to share this rtx. */
- mem_ref = gen_rtx (MEM, mode, address_reg);
+ mem_ref = gen_rtx_MEM (mode, address_reg);
emit_move_insn (mem_ref, profiler_reg);
sequence = gen_sequence ();
/* Actually generate the code to call __bb_init_func. */
name = xmalloc (20);
ASM_GENERATE_INTERNAL_LABEL (name, "LPBX", 0);
- table_address = force_reg (Pmode, gen_rtx (SYMBOL_REF, Pmode, name));
- emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__bb_init_func"), 0,
+ table_address = force_reg (Pmode, gen_rtx_SYMBOL_REF (Pmode, name));
+ emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__bb_init_func"), 0,
mode, 1, table_address, Pmode);
expand_function_end (input_filename, lineno, 0);
/* Subroutines used by or related to instruction recognition.
- Copyright (C) 1987, 1988, 1991-6, 1997 Free Software Foundation, Inc.
+ Copyright (C) 1987, 1988, 91-6, 1997 Free Software Foundation, Inc.
This file is part of GNU CC.
#include "config.h"
-#include "rtl.h"
#include <stdio.h>
+#include "rtl.h"
#include "insn-config.h"
#include "insn-attr.h"
#include "insn-flags.h"
/* Import from final.c: */
extern rtx alter_subreg ();
-int strict_memory_address_p ();
-int memory_address_p ();
+static rtx *find_single_use_1 PROTO((rtx, rtx *));
/* Nonzero means allow operands to be volatile.
This should be 0 if you are generating rtl, such as if you are calling
{
int j;
- newpat = gen_rtx (PARALLEL, VOIDmode,
- gen_rtvec (XVECLEN (pat, 0) - 1));
+ newpat = gen_rtx_PARALLEL (VOIDmode,
+ gen_rtvec (XVECLEN (pat, 0) - 1));
for (j = 0; j < XVECLEN (newpat, 0); j++)
XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
}
if (prev_changes != num_changes && CONSTANT_P (XEXP (x, 0)))
{
validate_change (object, loc,
- gen_rtx (GET_RTX_CLASS (code) == 'c' ? code
- : swap_condition (code),
- GET_MODE (x), XEXP (x, 1), XEXP (x, 0)),
+ gen_rtx_fmt_ee (GET_RTX_CLASS (code) == 'c' ? code
+ : swap_condition (code),
+ GET_MODE (x), XEXP (x, 1),
+ XEXP (x, 0)),
1);
x = *loc;
code = GET_CODE (x);
rtx new = simplify_unary_operation (code, GET_MODE (x), to,
GET_MODE (from));
if (new == 0)
- new = gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
+ new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
validate_change (object, loc, new, 1);
return;
GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
- MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
- new = gen_rtx (MEM, mode, plus_constant (XEXP (to, 0), offset));
+ new = gen_rtx_MEM (mode, plus_constant (XEXP (to, 0), offset));
MEM_VOLATILE_P (new) = MEM_VOLATILE_P (to);
RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (to);
MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (to);
pos %= GET_MODE_BITSIZE (wanted_mode);
- newmem = gen_rtx (MEM, wanted_mode,
- plus_constant (XEXP (to, 0), offset));
+ newmem = gen_rtx_MEM (wanted_mode,
+ plus_constant (XEXP (to, 0), offset));
RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (to);
MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (to);
MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (to);
}
break;
+
+ default:
+ break;
}
fmt = GET_RTX_FORMAT (code);
case MEM:
case SUBREG:
return find_single_use_1 (dest, &XEXP (x, 0));
+
+ default:
+ break;
}
/* If it wasn't one of the common cases above, check each expression and
register rtx y = XEXP (op, 0);
if (! volatile_ok && MEM_VOLATILE_P (op))
return 0;
+ if (GET_CODE (y) == ADDRESSOF)
+ return 1;
/* Use the mem's mode, since it will be reloaded thus. */
mode = GET_MODE (op);
GO_IF_LEGITIMATE_ADDRESS (mode, y, win);
}
+
+ /* Pretend this is an operand for now; we'll run force_operand
+ on its replacement in fixup_var_refs_1. */
+ if (code == ADDRESSOF)
+ return 1;
+
return 0;
win:
enum machine_mode mode;
register rtx addr;
{
+ if (GET_CODE (addr) == ADDRESSOF)
+ return 1;
+
GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
return 0;
if (CONSTANT_ADDRESS_P (y))
{
- new = gen_rtx (MEM, GET_MODE (op), plus_constant_for_output (y, offset));
+ new = gen_rtx_MEM (GET_MODE (op), plus_constant_for_output (y, offset));
RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (op);
return new;
}
}
}
- new = gen_rtx (MEM, GET_MODE (op), plus_constant_for_output (y, offset));
+ new = gen_rtx_MEM (GET_MODE (op), plus_constant_for_output (y, offset));
RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (op);
return new;
}
earlyclobber[opno] = 0;
/* A unary operator may be accepted by the predicate, but it
- is irrelevant for matching contraints. */
+ is irrelevant for matching constraints. */
if (GET_RTX_CLASS (GET_CODE (op)) == '1')
op = XEXP (op, 0);
}
/* Return 1 iff OPERAND (assumed to be a REG rtx)
- is a hard reg in class CLASS when its regno is offsetted by OFFSET
+ is a hard reg in class CLASS when its regno is offset by OFFSET
and changed to mode MODE.
If REG occupies multiple hard regs, all of them must be in CLASS. */
{
for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
mode = GET_MODE_WIDER_MODE (mode))
- FP_MODE_REG (i, mode) = gen_rtx (REG, mode, i);
+ FP_MODE_REG (i, mode) = gen_rtx_REG (mode, i);
for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT); mode != VOIDmode;
mode = GET_MODE_WIDER_MODE (mode))
- FP_MODE_REG (i, mode) = gen_rtx (REG, mode, i);
+ FP_MODE_REG (i, mode) = gen_rtx_REG (mode, i);
}
}
}
if (malformed_asm)
{
/* Avoid further trouble with this insn. */
- PATTERN (insn) = gen_rtx (USE, VOIDmode, const0_rtx);
+ PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
PUT_MODE (insn, VOIDmode);
return;
}
REG_UNUSED. */
if (! TEST_HARD_REG_BIT (regstack->reg_set, REGNO (op)))
- REG_NOTES (insn) = gen_rtx (EXPR_LIST, REG_UNUSED, op,
- REG_NOTES (insn));
+ REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_UNUSED, op,
+ REG_NOTES (insn));
CLEAR_HARD_REG_BIT (regstack->reg_set, REGNO (op));
}
if (! TEST_HARD_REG_BIT (regstack->reg_set, REGNO (operands[i]))
&& operand_matches[i] == -1
&& find_regno_note (insn, REG_DEAD, REGNO (operands[i])) == NULL_RTX)
- REG_NOTES (insn) = gen_rtx (EXPR_LIST, REG_DEAD, operands[i],
- REG_NOTES (insn));
+ REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_DEAD, operands[i],
+ REG_NOTES (insn));
SET_HARD_REG_BIT (regstack->reg_set, REGNO (operands[i]));
}
{
if (TEST_HARD_REG_BIT (src, regno)
&& ! TEST_HARD_REG_BIT (dest, regno))
- REG_NOTES (insn) = gen_rtx (EXPR_LIST, REG_DEAD,
- FP_MODE_REG (regno, DFmode),
- REG_NOTES (insn));
+ REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_DEAD,
+ FP_MODE_REG (regno, DFmode),
+ REG_NOTES (insn));
else if (TEST_HARD_REG_BIT (dest, regno))
- REG_NOTES (insn) = gen_rtx (EXPR_LIST, REG_UNUSED,
- FP_MODE_REG (regno, DFmode),
- REG_NOTES (insn));
+ REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_UNUSED,
+ FP_MODE_REG (regno, DFmode),
+ REG_NOTES (insn));
}
if (GET_CODE (insn) == CALL_INSN)
cannot be used on these insns, because they do not appear in
block_number[]. */
- pat = gen_rtx (SET, VOIDmode, FP_MODE_REG (reg, DFmode),
- CONST0_RTX (DFmode));
+ pat = gen_rtx_SET (VOIDmode, FP_MODE_REG (reg, DFmode),
+ CONST0_RTX (DFmode));
init = emit_insn_after (pat, insn);
PUT_MODE (init, QImode);
/* Make a list of all labels referred to other than by jumps. */
for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
if (REG_NOTE_KIND (note) == REG_LABEL)
- label_value_list = gen_rtx (EXPR_LIST, VOIDmode, XEXP (note, 0),
- label_value_list);
+ label_value_list = gen_rtx_EXPR_LIST (VOIDmode, XEXP (note, 0),
+ label_value_list);
}
block_number[INSN_UID (insn)] = block;
{
for (x = label_value_list; x; x = XEXP (x, 1))
record_label_references (insn,
- gen_rtx (LABEL_REF, VOIDmode,
- XEXP (x, 0)));
+ gen_rtx_LABEL_REF (VOIDmode,
+ XEXP (x, 0)));
for (x = forced_labels; x; x = XEXP (x, 1))
record_label_references (insn,
- gen_rtx (LABEL_REF, VOIDmode,
- XEXP (x, 0)));
+ gen_rtx_LABEL_REF (VOIDmode,
+ XEXP (x, 0)));
}
record_label_references (insn, pat);
{
rtx init_rtx;
- init_rtx = gen_rtx (SET, VOIDmode, FP_MODE_REG(reg, DFmode),
- CONST0_RTX (DFmode));
+ init_rtx = gen_rtx_SET (VOIDmode, FP_MODE_REG(reg, DFmode),
+ CONST0_RTX (DFmode));
block_begin[0] = emit_insn_after (init_rtx, first);
PUT_MODE (block_begin[0], QImode);
if (hard_regno < FIRST_STACK_REG)
abort ();
- pop_rtx = gen_rtx (SET, VOIDmode, FP_MODE_REG (hard_regno, DFmode),
- FP_MODE_REG (FIRST_STACK_REG, DFmode));
+ pop_rtx = gen_rtx_SET (VOIDmode, FP_MODE_REG (hard_regno, DFmode),
+ FP_MODE_REG (FIRST_STACK_REG, DFmode));
pop_insn = (*when) (pop_rtx, insn);
/* ??? This used to be VOIDmode, but that seems wrong. */
PUT_MODE (pop_insn, QImode);
- REG_NOTES (pop_insn) = gen_rtx (EXPR_LIST, REG_DEAD,
- FP_MODE_REG (FIRST_STACK_REG, DFmode),
- REG_NOTES (pop_insn));
+ REG_NOTES (pop_insn) = gen_rtx_EXPR_LIST (REG_DEAD,
+ FP_MODE_REG (FIRST_STACK_REG, DFmode),
+ REG_NOTES (pop_insn));
regstack->reg[regstack->top - (hard_regno - FIRST_STACK_REG)]
= regstack->reg[regstack->top];
push_rtx = gen_movxf (top_stack_reg, top_stack_reg);
push_insn = emit_insn_before (push_rtx, insn);
PUT_MODE (push_insn, QImode);
- REG_NOTES (insn) = gen_rtx (EXPR_LIST, REG_DEAD, top_stack_reg,
- REG_NOTES (insn));
+ REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_DEAD, top_stack_reg,
+ REG_NOTES (insn));
}
replace_reg (psrc, FIRST_STACK_REG);
for (i = 0; i < N_REG_CLASSES; i++)
{
- rtx r = gen_rtx (REG, VOIDmode, 0);
+ rtx r = gen_rtx_REG (VOIDmode, 0);
enum machine_mode m;
for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
enum machine_mode mode;
{
#ifdef HAVE_POST_INCREMENT
- if (memory_address_p (mode, gen_rtx (POST_INC, Pmode, reg)))
+ if (memory_address_p (mode, gen_rtx_POST_INC (Pmode, reg)))
return 1;
#endif
#ifdef HAVE_POST_DECREMENT
- if (memory_address_p (mode, gen_rtx (POST_DEC, Pmode, reg)))
+ if (memory_address_p (mode, gen_rtx_POST_DEC (Pmode, reg)))
return 1;
#endif
#ifdef HAVE_PRE_INCREMENT
- if (memory_address_p (mode, gen_rtx (PRE_INC, Pmode, reg)))
+ if (memory_address_p (mode, gen_rtx_PRE_INC (Pmode, reg)))
return 1;
#endif
#ifdef HAVE_PRE_DECREMENT
- if (memory_address_p (mode, gen_rtx (PRE_DEC, Pmode, reg)))
+ if (memory_address_p (mode, gen_rtx_PRE_DEC (Pmode, reg)))
return 1;
#endif
&SET_SRC (inc_insn_set),
XEXP (SET_SRC (inc_insn_set), 0), 1);
validate_change (insn, &XEXP (use, 0),
- gen_rtx (inc_code,
- Pmode,
- reg), 1);
+ gen_rtx_fmt_e (inc_code, Pmode, reg), 1);
if (apply_change_group ())
{
REG_NOTES (insn)
- = gen_rtx (EXPR_LIST, REG_INC,
- reg, REG_NOTES (insn));
+ = gen_rtx_EXPR_LIST (REG_INC,
+ reg, REG_NOTES (insn));
if (! inc_insn_set)
{
PUT_CODE (inc_insn, NOTE);
XEXP (src, 0) = src_reg;
return;
}
- subreg = gen_rtx(SUBREG, old_mode, src_reg, 0);
+ subreg = gen_rtx_SUBREG (old_mode, src_reg, 0);
while (p = NEXT_INSN (p), p != insn)
{
if (GET_RTX_CLASS (GET_CODE (p)) != 'i')
>= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dst))))
{
src_subreg
- = gen_rtx(SUBREG, GET_MODE (SUBREG_REG (dst)),
- src, SUBREG_WORD (dst));
+ = gen_rtx_SUBREG (GET_MODE (SUBREG_REG (dst)),
+ src, SUBREG_WORD (dst));
dst = SUBREG_REG (dst);
}
if (GET_CODE (dst) != REG
break;
if (i + 2 >= FIRST_PSEUDO_REGISTER)
break;
- reg0 = gen_rtx (REG, insn_operand_mode[icode][0], i);
- reg1 = gen_rtx (REG, insn_operand_mode[icode][1], i + 1);
- reg2 = gen_rtx (REG, insn_operand_mode[icode][2], i + 2);
+ reg0 = gen_rtx_REG (insn_operand_mode[icode][0], i);
+ reg1 = gen_rtx_REG (insn_operand_mode[icode][1], i + 1);
+ reg2 = gen_rtx_REG (insn_operand_mode[icode][2], i + 2);
if (! (*insn_operand_predicate[icode][0]) (reg0, VOIDmode)
|| ! (*insn_operand_predicate[icode][1]) (reg1, VOIDmode)
|| ! (*insn_operand_predicate[icode][2]) (reg2, VOIDmode))
{
if (GET_CODE (XEXP (in, 0)) == POST_INC
|| GET_CODE (XEXP (in, 0)) == POST_DEC)
- in = gen_rtx (MEM, GET_MODE (in), XEXP (XEXP (in, 0), 0));
+ in = gen_rtx_MEM (GET_MODE (in), XEXP (XEXP (in, 0), 0));
if (GET_CODE (XEXP (in, 0)) == PRE_INC
|| GET_CODE (XEXP (in, 0)) == PRE_DEC)
- out = gen_rtx (MEM, GET_MODE (out), XEXP (XEXP (out, 0), 0));
+ out = gen_rtx_MEM (GET_MODE (out), XEXP (XEXP (out, 0), 0));
}
/* If we are reloading a (SUBREG constant ...), really reload just the
if (in != 0 && GET_CODE (in) == SUBREG && GET_CODE (SUBREG_REG (in)) == REG
&& REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
&& ! dont_remove_subreg)
- in = gen_rtx (REG, GET_MODE (in),
- REGNO (SUBREG_REG (in)) + SUBREG_WORD (in));
+ in = gen_rtx_REG (GET_MODE (in),
+ REGNO (SUBREG_REG (in)) + SUBREG_WORD (in));
/* Similarly for OUT. */
if (out != 0 && GET_CODE (out) == SUBREG
&& GET_CODE (SUBREG_REG (out)) == REG
&& REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
&& ! dont_remove_subreg)
- out = gen_rtx (REG, GET_MODE (out),
- REGNO (SUBREG_REG (out)) + SUBREG_WORD (out));
+ out = gen_rtx_REG (GET_MODE (out),
+ REGNO (SUBREG_REG (out)) + SUBREG_WORD (out));
/* Narrow down the class of register wanted if that is
desirable on this machine for efficiency. */
&& TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno)
&& !fixed_regs[regno])
{
- reload_reg_rtx[i] = gen_rtx (REG, inmode, regno);
+ reload_reg_rtx[i] = gen_rtx_REG (inmode, regno);
break;
}
}
REGNO (XEXP (note, 0)))))))
&& ! fixed_regs[REGNO (XEXP (note, 0))])
{
- reload_reg_rtx[output_reload] = gen_rtx (REG,
- reload_outmode[output_reload],
- REGNO (XEXP (note, 0)));
+ reload_reg_rtx[output_reload]
+ = gen_rtx_REG (reload_outmode[output_reload],
+ REGNO (XEXP (note, 0)));
return;
}
}
if (GET_CODE (real_out) == REG)
value = real_out;
else
- value = gen_rtx (REG, outmode, regno);
+ value = gen_rtx_REG (outmode, regno);
}
}
if (GET_CODE (real_in) == REG)
value = real_in;
else
- value = gen_rtx (REG, inmode, regno);
+ value = gen_rtx_REG (inmode, regno);
}
}
}
{
if (GET_CODE (XEXP (offset, 0)) == CONST_INT)
{
- base = gen_rtx (PLUS, GET_MODE (base), base, XEXP (offset, 1));
+ base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 1));
offset = XEXP (offset, 0);
}
else if (GET_CODE (XEXP (offset, 1)) == CONST_INT)
{
- base = gen_rtx (PLUS, GET_MODE (base), base, XEXP (offset, 0));
+ base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 0));
offset = XEXP (offset, 1);
}
else
{
- base = gen_rtx (PLUS, GET_MODE (base), base, offset);
+ base = gen_rtx_PLUS (GET_MODE (base), base, offset);
offset = const0_rtx;
}
}
else if (GET_CODE (offset) != CONST_INT)
{
- base = gen_rtx (PLUS, GET_MODE (base), base, offset);
+ base = gen_rtx_PLUS (GET_MODE (base), base, offset);
offset = const0_rtx;
}
if (all_const && GET_CODE (base) == PLUS)
- base = gen_rtx (CONST, GET_MODE (base), base);
+ base = gen_rtx_CONST (GET_MODE (base), base);
if (GET_CODE (offset) != CONST_INT)
abort ();
{
error_for_asm (insn, "operand constraints differ in number of alternatives");
/* Avoid further trouble with this insn. */
- PATTERN (insn) = gen_rtx (USE, VOIDmode, const0_rtx);
+ PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
n_reloads = 0;
return;
}
after it. */
if (modified[i] != RELOAD_READ)
- PUT_MODE (emit_insn_after (gen_rtx (CLOBBER, VOIDmode,
- recog_operand[i]),
+ PUT_MODE (emit_insn_after (gen_rtx_CLOBBER (VOIDmode,
+ recog_operand[i]),
insn),
DImode);
*recog_operand_loc[i] = recog_operand[i]
- = gen_rtx (MEM, GET_MODE (recog_operand[i]), address);
+ = gen_rtx_MEM (GET_MODE (recog_operand[i]), address);
RTX_UNCHANGING_P (recog_operand[i])
= RTX_UNCHANGING_P (regno_reg_rtx[regno]);
find_reloads_address (GET_MODE (recog_operand[i]),
abort ();
error_for_asm (insn, "inconsistent operand constraints in an `asm'");
/* Avoid further trouble with this insn. */
- PATTERN (insn) = gen_rtx (USE, VOIDmode, const0_rtx);
+ PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
n_reloads = 0;
return;
}
{
error_for_asm (insn, "inconsistent operand constraints in an `asm'");
/* Avoid further trouble with this insn. */
- PATTERN (insn) = gen_rtx (USE, VOIDmode, const0_rtx);
+ PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
n_reloads = 0;
return;
}
if (rtx_varies_p (addr))
addr = copy_rtx (addr);
- x = gen_rtx (MEM, GET_MODE (x), addr);
+ x = gen_rtx_MEM (GET_MODE (x), addr);
RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[regno]);
find_reloads_address (GET_MODE (x), NULL_PTR,
XEXP (x, 0),
offset -= MIN (size, UNITS_PER_WORD);
}
addr = plus_constant (addr, offset);
- x = gen_rtx (MEM, GET_MODE (x), addr);
+ x = gen_rtx_MEM (GET_MODE (x), addr);
RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[regno]);
find_reloads_address (GET_MODE (x), NULL_PTR,
XEXP (x, 0),
if (rtx_varies_p (tem))
tem = copy_rtx (tem);
- tem = gen_rtx (MEM, GET_MODE (ad), tem);
+ tem = gen_rtx_MEM (GET_MODE (ad), tem);
RTX_UNCHANGING_P (tem) = RTX_UNCHANGING_P (regno_reg_rtx[regno]);
memlocs[n_memlocs++] = tem;
return tem;
|| XEXP (XEXP (ad, 0), 0) == stack_pointer_rtx)
&& ! memory_address_p (mode, ad))
{
- *loc = ad = gen_rtx (PLUS, GET_MODE (ad),
- plus_constant (XEXP (XEXP (ad, 0), 0),
- INTVAL (XEXP (ad, 1))),
+ *loc = ad = gen_rtx_PLUS (GET_MODE (ad),
+ plus_constant (XEXP (XEXP (ad, 0), 0),
+ INTVAL (XEXP (ad, 1))),
XEXP (XEXP (ad, 0), 1));
find_reloads_address_part (XEXP (ad, 0), &XEXP (ad, 0),
reload_address_base_reg_class,
|| XEXP (XEXP (ad, 0), 1) == stack_pointer_rtx)
&& ! memory_address_p (mode, ad))
{
- *loc = ad = gen_rtx (PLUS, GET_MODE (ad),
- XEXP (XEXP (ad, 0), 0),
- plus_constant (XEXP (XEXP (ad, 0), 1),
- INTVAL (XEXP (ad, 1))));
+ *loc = ad = gen_rtx_PLUS (GET_MODE (ad),
+ XEXP (XEXP (ad, 0), 0),
+ plus_constant (XEXP (XEXP (ad, 0), 1),
+ INTVAL (XEXP (ad, 1))));
find_reloads_address_part (XEXP (ad, 1), &XEXP (ad, 1),
reload_address_base_reg_class,
GET_MODE (ad), opnum, type, ind_levels);
if (GET_CODE (y) == CONST)
y = XEXP (y, 0);
- return gen_rtx (CONST, VOIDmode, gen_rtx (PLUS, mode, x, y));
+ return gen_rtx_CONST (VOIDmode, gen_rtx_PLUS (mode, x, y));
}
- return gen_rtx (PLUS, mode, x, y);
+ return gen_rtx_PLUS (mode, x, y);
}
\f
/* If ADDR is a sum containing a pseudo register that should be
op0 = SUBREG_REG (op0);
code0 = GET_CODE (op0);
if (code0 == REG && REGNO (op0) < FIRST_PSEUDO_REGISTER)
- op0 = gen_rtx (REG, word_mode,
- REGNO (op0) + SUBREG_WORD (orig_op0));
+ op0 = gen_rtx_REG (word_mode,
+ REGNO (op0) + SUBREG_WORD (orig_op0));
}
if (GET_CODE (op1) == SUBREG)
op1 = SUBREG_REG (op1);
code1 = GET_CODE (op1);
if (code1 == REG && REGNO (op1) < FIRST_PSEUDO_REGISTER)
- op1 = gen_rtx (REG, GET_MODE (op1),
- REGNO (op1) + SUBREG_WORD (orig_op1));
+ op1 = gen_rtx_REG (GET_MODE (op1),
+ REGNO (op1) + SUBREG_WORD (orig_op1));
}
if (code0 == MULT || code0 == SIGN_EXTEND || code0 == TRUNCATE
&XEXP (tem, 0), opnum, type,
ind_levels, insn);
/* Put this inside a new increment-expression. */
- x = gen_rtx (GET_CODE (x), GET_MODE (x), tem);
+ x = gen_rtx_fmt_e (GET_CODE (x), GET_MODE (x), tem);
/* Proceed to reload that, as if it contained a register. */
}
{
rtx tem = force_const_mem (GET_MODE (x), XEXP (x, 1));
- x = gen_rtx (PLUS, GET_MODE (x), XEXP (x, 0), tem);
+ x = gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0), tem);
find_reloads_address (mode, &tem, XEXP (tem, 0), &XEXP (tem, 0),
opnum, type, ind_levels, 0);
}
do the wrong thing if RELOADREG is multi-word. RELOADREG
will always be a REG here. */
if (GET_MODE (reloadreg) != r->mode && r->mode != VOIDmode)
- reloadreg = gen_rtx (REG, r->mode, REGNO (reloadreg));
+ reloadreg = gen_rtx_REG (r->mode, REGNO (reloadreg));
/* If we are putting this into a SUBREG and RELOADREG is a
SUBREG, we would be making nested SUBREGs, so we have to fix
if (reloadreg && r->where == loc)
{
if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
- reloadreg = gen_rtx (REG, r->mode, REGNO (reloadreg));
+ reloadreg = gen_rtx_REG (r->mode, REGNO (reloadreg));
return reloadreg;
}
??? Is it actually still ever a SUBREG? If so, why? */
if (GET_CODE (reloadreg) == REG)
- return gen_rtx (REG, GET_MODE (*loc),
- REGNO (reloadreg) + SUBREG_WORD (*loc));
+ return gen_rtx_REG (GET_MODE (*loc),
+ REGNO (reloadreg) + SUBREG_WORD (*loc));
else if (GET_MODE (reloadreg) == GET_MODE (*loc))
return reloadreg;
else
- return gen_rtx (SUBREG, GET_MODE (*loc), SUBREG_REG (reloadreg),
- SUBREG_WORD (reloadreg) + SUBREG_WORD (*loc));
+ return gen_rtx_SUBREG (GET_MODE (*loc), SUBREG_REG (reloadreg),
+ SUBREG_WORD (reloadreg) + SUBREG_WORD (*loc));
}
}
rtx y = find_replacement (&XEXP (*loc, 1));
if (x != XEXP (*loc, 0) || y != XEXP (*loc, 1))
- return gen_rtx (GET_CODE (*loc), GET_MODE (*loc), x, y);
+ return gen_rtx_fmt_ee (GET_CODE (*loc), GET_MODE (*loc), x, y);
}
return *loc;
permitted, zero if it is not permitted at all. */
register rtx tem
- = gen_rtx (MEM, Pmode,
- gen_rtx (PLUS, Pmode,
- gen_rtx (REG, Pmode, LAST_VIRTUAL_REGISTER + 1),
- GEN_INT (4)));
+ = gen_rtx_MEM (Pmode,
+ gen_rtx_PLUS (Pmode,
+ gen_rtx_REG (Pmode, LAST_VIRTUAL_REGISTER + 1),
+ GEN_INT (4)));
spill_indirect_levels = 0;
while (memory_address_p (QImode, tem))
{
spill_indirect_levels++;
- tem = gen_rtx (MEM, Pmode, tem);
+ tem = gen_rtx_MEM (Pmode, tem);
}
/* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
- tem = gen_rtx (MEM, Pmode, gen_rtx (SYMBOL_REF, Pmode, "foo"));
+ tem = gen_rtx_MEM (Pmode, gen_rtx_SYMBOL_REF (Pmode, "foo"));
indirect_symref_ok = memory_address_p (QImode, tem);
/* See if reg+reg is a valid (and offsettable) address. */
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
{
- tem = gen_rtx (PLUS, Pmode,
- gen_rtx (REG, Pmode, HARD_FRAME_POINTER_REGNUM),
- gen_rtx (REG, Pmode, i));
+ tem = gen_rtx_PLUS (Pmode,
+ gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
+ gen_rtx_REG (Pmode, i));
/* This way, we make sure that reg+reg is an offsettable address. */
tem = plus_constant (tem, 4);
for (ep = reg_eliminate; ep < ®_eliminate[NUM_ELIMINABLE_REGS]; ep++)
{
num_eliminable += ep->can_eliminate;
- ep->from_rtx = gen_rtx (REG, Pmode, ep->from);
- ep->to_rtx = gen_rtx (REG, Pmode, ep->to);
+ ep->from_rtx = gen_rtx_REG (Pmode, ep->from);
+ ep->to_rtx = gen_rtx_REG (Pmode, ep->to);
}
num_labels = max_label_num () - get_first_label_num ();
below. */
adjust = GET_MODE_SIZE (mode) - total_size;
if (adjust)
- stack_slot = gen_rtx (MEM, mode_for_size (total_size
- * BITS_PER_UNIT,
- MODE_INT, 1),
+ stack_slot = gen_rtx_MEM (mode_for_size (total_size
+ * BITS_PER_UNIT,
+ MODE_INT, 1),
plus_constant (XEXP (x, 0), adjust));
}
spill_stack_slot[from_reg] = stack_slot;
wrong mode, make a new stack slot. */
if (adjust != 0 || GET_MODE (x) != GET_MODE (regno_reg_rtx[i]))
{
- x = gen_rtx (MEM, GET_MODE (regno_reg_rtx[i]),
+ x = gen_rtx_MEM (GET_MODE (regno_reg_rtx[i]),
plus_constant (XEXP (x, 0), adjust));
RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
}
&& INTVAL (XEXP (x, 1)) == - ep->previous_offset)
return ep->to_rtx;
else
- return gen_rtx (PLUS, Pmode, ep->to_rtx,
- plus_constant (XEXP (x, 1),
- ep->previous_offset));
+ return gen_rtx_PLUS (Pmode, ep->to_rtx,
+ plus_constant (XEXP (x, 1),
+ ep->previous_offset));
}
/* If the register is not eliminable, we are done since the other
turn a PLUS into something else. We might try to do so here
for an addition of 0 if we aren't optimizing. */
if (! mem_mode && GET_CODE (new) != PLUS)
- return gen_rtx (PLUS, GET_MODE (x), new, const0_rtx);
+ return gen_rtx_PLUS (GET_MODE (x), new, const0_rtx);
else
return new;
}
ep->ref_outside_mem = 1;
return
- plus_constant (gen_rtx (MULT, Pmode, ep->to_rtx, XEXP (x, 1)),
+ plus_constant (gen_rtx_MULT (Pmode, ep->to_rtx, XEXP (x, 1)),
ep->previous_offset * INTVAL (XEXP (x, 1)));
}
= XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, insn) : 0;
if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
- return gen_rtx (code, GET_MODE (x), new0, new1);
+ return gen_rtx_fmt_ee (code, GET_MODE (x), new0, new1);
}
return x;
{
new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
if (new != XEXP (x, 0))
- x = gen_rtx (EXPR_LIST, REG_NOTE_KIND (x), new, XEXP (x, 1));
+ x = gen_rtx_EXPR_LIST (REG_NOTE_KIND (x), new, XEXP (x, 1));
}
/* ... fall through ... */
{
new = eliminate_regs (XEXP (x, 1), mem_mode, insn);
if (new != XEXP (x, 1))
- return gen_rtx (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new);
+ return gen_rtx_fmt_ee (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new);
}
return x;
case FFS:
new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
if (new != XEXP (x, 0))
- return gen_rtx (code, GET_MODE (x), new);
+ return gen_rtx_fmt_e (code, GET_MODE (x), new);
return x;
case SUBREG:
insn so that delete_output_reload will do the right thing. */
if (insn != 0 && GET_CODE (insn) != EXPR_LIST
&& GET_CODE (insn) != INSN_LIST)
- emit_insn_before (gen_rtx (USE, VOIDmode, SUBREG_REG (x)),
+ emit_insn_before (gen_rtx_USE (VOIDmode, SUBREG_REG (x)),
insn);
}
}
return new;
}
else
- return gen_rtx (SUBREG, GET_MODE (x), new, SUBREG_WORD (x));
+ return gen_rtx_SUBREG (GET_MODE (x), new, SUBREG_WORD (x));
}
return x;
new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
if (new != XEXP (x, 0))
- return gen_rtx (code, GET_MODE (x), new);
+ return gen_rtx_fmt_e (code, GET_MODE (x), new);
return x;
case CLOBBER:
new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
if (new != XEXP (x, 0))
- return gen_rtx (code, GET_MODE (x), new);
+ return gen_rtx_fmt_e (code, GET_MODE (x), new);
return x;
case ASM_OPERANDS:
if (new_asm_operands_vec == old_asm_operands_vec)
return x;
- new = gen_rtx (ASM_OPERANDS, VOIDmode, ASM_OPERANDS_TEMPLATE (x),
- ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
- ASM_OPERANDS_OUTPUT_IDX (x), new_asm_operands_vec,
- ASM_OPERANDS_INPUT_CONSTRAINT_VEC (x),
- ASM_OPERANDS_SOURCE_FILE (x),
- ASM_OPERANDS_SOURCE_LINE (x));
+ new = gen_rtx_ASM_OPERANDS (VOIDmode, ASM_OPERANDS_TEMPLATE (x),
+ ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
+ ASM_OPERANDS_OUTPUT_IDX (x),
+ new_asm_operands_vec,
+ ASM_OPERANDS_INPUT_CONSTRAINT_VEC (x),
+ ASM_OPERANDS_SOURCE_FILE (x),
+ ASM_OPERANDS_SOURCE_LINE (x));
new->volatil = x->volatil;
return new;
}
if (GET_CODE (SET_DEST (x)) == REG && GET_CODE (new0) == MEM
&& insn != 0 && GET_CODE (insn) != EXPR_LIST
&& GET_CODE (insn) != INSN_LIST)
- emit_insn_after (gen_rtx (CLOBBER, VOIDmode, SET_DEST (x)), insn);
+ emit_insn_after (gen_rtx_CLOBBER (VOIDmode, SET_DEST (x)), insn);
if (new0 != SET_DEST (x) || new1 != SET_SRC (x))
- return gen_rtx (SET, VOIDmode, new0, new1);
+ return gen_rtx_SET (VOIDmode, new0, new1);
}
return x;
new = eliminate_regs (XEXP (x, 0), GET_MODE (x), insn);
if (new != XEXP (x, 0))
{
- new = gen_rtx (MEM, GET_MODE (x), new);
+ new = gen_rtx_MEM (GET_MODE (x), new);
new->volatil = x->volatil;
new->unchanging = x->unchanging;
new->in_struct = x->in_struct;
/* We assume here that we don't need a PARALLEL of
any CLOBBERs for this assignment. There's not
much we can do if we do need it. */
- PATTERN (insn) = gen_rtx (SET, VOIDmode,
- SET_DEST (old_set), ep->to_rtx);
+ PATTERN (insn) = gen_rtx_SET (VOIDmode,
+ SET_DEST (old_set),
+ ep->to_rtx);
INSN_CODE (insn) = -1;
val = 1;
goto done;
if (new == 0 || GET_MODE (new) != reload_mode[r])
spill_reg_rtx[i] = new
- = gen_rtx (REG, reload_mode[r], spill_regs[i]);
+ = gen_rtx_REG (reload_mode[r], spill_regs[i]);
regno = true_regnum (new);
address and not all machines support SUBREGs
there. */
regno = REGNO (SUBREG_REG (equiv)) + SUBREG_WORD (equiv);
- equiv = gen_rtx (REG, reload_mode[r], regno);
+ equiv = gen_rtx_REG (reload_mode[r], regno);
}
else
abort ();
must always be a REG here. */
if (GET_MODE (reloadreg) != mode)
- reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
+ reloadreg = gen_rtx_REG (mode, REGNO (reloadreg));
while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
oldequiv = SUBREG_REG (oldequiv);
if (GET_MODE (oldequiv) != VOIDmode
&& mode != GET_MODE (oldequiv))
- oldequiv = gen_rtx (SUBREG, mode, oldequiv, 0);
+ oldequiv = gen_rtx_SUBREG (mode, oldequiv, 0);
/* Switch to the right place to emit the reload insns. */
switch (reload_when_needed[j])
oldequiv = old, real_oldequiv = real_old;
else
second_reload_reg
- = gen_rtx (REG, new_mode,
- REGNO (second_reload_reg));
+ = gen_rtx_REG (new_mode,
+ REGNO (second_reload_reg));
}
}
}
&& reg_overlap_mentioned_for_reload_p (second_reload_reg,
PATTERN (prev)))
{
- REG_NOTES (prev) = gen_rtx (EXPR_LIST, REG_DEAD,
- second_reload_reg,
- REG_NOTES (prev));
+ REG_NOTES (prev) = gen_rtx_EXPR_LIST (REG_DEAD,
+ second_reload_reg,
+ REG_NOTES (prev));
break;
}
}
|| reload_when_needed[j] == RELOAD_FOR_INPUT)
&& ! dead_or_set_p (insn, reloadreg))
REG_NOTES (insn)
- = gen_rtx (EXPR_LIST, REG_DEAD,
- reloadreg, REG_NOTES (insn));
+ = gen_rtx_EXPR_LIST (REG_DEAD,
+ reloadreg, REG_NOTES (insn));
}
/* When we inherit a reload, the last marked death of the reload reg
&& reg_overlap_mentioned_for_reload_p (oldequiv_reg,
PATTERN (prev1)))
{
- REG_NOTES (prev1) = gen_rtx (EXPR_LIST, REG_DEAD,
- oldequiv_reg,
- REG_NOTES (prev1));
+ REG_NOTES (prev1) = gen_rtx_EXPR_LIST (REG_DEAD,
+ oldequiv_reg,
+ REG_NOTES (prev1));
break;
}
remove_death (REGNO (oldequiv_reg), prev);
error_for_asm (insn, "output operand is constant in `asm'");
/* Prevent crash--use something we know is valid. */
mode = word_mode;
- old = gen_rtx (REG, mode, REGNO (reloadreg));
+ old = gen_rtx_REG (mode, REGNO (reloadreg));
}
if (GET_MODE (reloadreg) != mode)
- reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
+ reloadreg = gen_rtx_REG (mode, REGNO (reloadreg));
#ifdef SECONDARY_OUTPUT_RELOAD_CLASS
= reload_secondary_out_icode[secondary_reload];
if (GET_MODE (reloadreg) != mode)
- reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
+ reloadreg = gen_rtx_REG (mode, REGNO (reloadreg));
if (tertiary_icode != CODE_FOR_nothing)
{
if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
&& reg_overlap_mentioned_for_reload_p (reloadreg,
PATTERN (p)))
- REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
- reloadreg, REG_NOTES (p));
+ REG_NOTES (p) = gen_rtx_EXPR_LIST (REG_DEAD,
+ reloadreg, REG_NOTES (p));
#ifdef SECONDARY_OUTPUT_RELOAD_CLASS
if (! special && second_reloadreg
if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
&& reg_overlap_mentioned_for_reload_p (second_reloadreg,
PATTERN (p)))
- REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
- second_reloadreg, REG_NOTES (p));
+ REG_NOTES (p) = gen_rtx_EXPR_LIST (REG_DEAD,
+ second_reloadreg,
+ REG_NOTES (p));
#endif
#endif
/* Look at all insns we emitted, just to be safe. */
for (k = 1; k < nnr; k++)
reg_last_reload_reg[nregno + k]
= (nr == nnr
- ? gen_rtx (REG,
- reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
- REGNO (reload_reg_rtx[r]) + k)
+ ? gen_rtx_REG (reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
+ REGNO (reload_reg_rtx[r]) + k)
: 0);
/* Now do the inverse operation. */
for (k = 1; k < nnr; k++)
reg_last_reload_reg[nregno + k]
= (nr == nnr
- ? gen_rtx (REG,
- reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
- REGNO (reload_reg_rtx[r]) + k)
+ ? gen_rtx_REG (reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
+ REGNO (reload_reg_rtx[r]) + k)
: 0);
/* Unless we inherited this reload, show we haven't
tem = op0, op0 = op1, op1 = tem;
if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
- in = gen_rtx (PLUS, GET_MODE (in), op0, op1);
+ in = gen_rtx_PLUS (GET_MODE (in), op0, op1);
- insn = emit_insn (gen_rtx (SET, VOIDmode, out, in));
+ insn = emit_insn (gen_rtx_SET (VOIDmode, out, in));
code = recog_memoized (insn);
if (code >= 0)
rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
if (GET_MODE (loc) != GET_MODE (out))
- out = gen_rtx (REG, GET_MODE (loc), REGNO (out));
+ out = gen_rtx_REG (GET_MODE (loc), REGNO (out));
if (GET_MODE (loc) != GET_MODE (in))
- in = gen_rtx (REG, GET_MODE (loc), REGNO (in));
+ in = gen_rtx_REG (GET_MODE (loc), REGNO (in));
gen_reload (loc, in, opnum, type);
gen_reload (out, loc, opnum, type);
/* Otherwise, just write (set OUT IN) and hope for the best. */
else
- emit_insn (gen_rtx (SET, VOIDmode, out, in));
+ emit_insn (gen_rtx_SET (VOIDmode, out, in));
/* Return the first insn emitted.
We can not just return get_last_insn, because there may have
in gen_reload. */
last = get_last_insn ();
- add_insn = emit_insn (gen_rtx (SET, VOIDmode, incloc,
- gen_rtx (PLUS, GET_MODE (incloc),
- incloc, inc)));
+ add_insn = emit_insn (gen_rtx_SET (VOIDmode, incloc,
+ gen_rtx_PLUS (GET_MODE (incloc),
+ incloc, inc)));
code = recog_memoized (add_insn);
if (code >= 0)
/* We pass this to reload_cse_invalidate_mem to invalidate all of
memory for a non-const call instruction. */
- callmem = gen_rtx (MEM, BLKmode, const0_rtx);
+ callmem = gen_rtx_MEM (BLKmode, const0_rtx);
/* This is used in reload_cse_invalidate_regno to avoid consing a
new REG in a loop in that function. */
- invalidate_regno_rtx = gen_rtx (REG, VOIDmode, 0);
+ invalidate_regno_rtx = gen_rtx_REG (VOIDmode, 0);
for (insn = first; insn; insn = NEXT_INSN (insn))
{
pop_obstacks ();
validated = validate_change (insn, &SET_SRC (set),
- gen_rtx (REG, dest_mode, i), 1);
+ gen_rtx_REG (dest_mode, i), 1);
/* Go back to the obstack we are using for temporary
storage. */
replacement register if we don't have one for this
alternative yet. */
if (op_alt_regno[i][j] == -1
- && reg_fits_class_p (gen_rtx (REG, mode, regno), class,
+ && reg_fits_class_p (gen_rtx_REG (mode, regno), class,
0, mode))
{
alternative_nregs[j]++;
reload_cse_no_longer_dead (op_alt_regno[i][j], mode);
validate_change (insn, recog_operand_loc[i],
- gen_rtx (REG, mode, op_alt_regno[i][j]), 1);
+ gen_rtx_REG (mode, op_alt_regno[i][j]), 1);
}
for (i = insn_n_dups[insn_code_number] - 1; i >= 0; i--)
reload_cse_no_longer_dead (op_alt_regno[op][j], mode);
validate_change (insn, recog_dup_loc[i],
- gen_rtx (REG, mode, op_alt_regno[op][j]), 1);
+ gen_rtx_REG (mode, op_alt_regno[op][j]), 1);
}
/* Go back to the obstack we are using for temporary
else
tmp = gen_lowpart_common (dest_mode, XEXP (x, 0));
if (tmp)
- reg_values[dreg] = gen_rtx (EXPR_LIST, dest_mode, tmp,
- reg_values[dreg]);
+ reg_values[dreg] = gen_rtx_EXPR_LIST (dest_mode, tmp,
+ reg_values[dreg]);
}
}
else
- reg_values[dreg] = gen_rtx (EXPR_LIST, dest_mode, src, NULL_RTX);
+ reg_values[dreg] = gen_rtx_EXPR_LIST (dest_mode, src, NULL_RTX);
/* We've changed DREG, so invalidate any values held by other
registers that depend upon it. */
/* If we're storing a register to memory, add DEST to the list
in REG_VALUES. */
if (sreg >= 0 && ! side_effects_p (dest))
- reg_values[sreg] = gen_rtx (EXPR_LIST, dest_mode, dest,
+ reg_values[sreg] = gen_rtx_EXPR_LIST (dest_mode, dest,
reg_values[sreg]);
}
else
/* Allocate the the rtvec to hold the insns and the SEQUENCE. */
rtvec seqv = rtvec_alloc (length + 1);
- rtx seq = gen_rtx (SEQUENCE, VOIDmode, seqv);
+ rtx seq = gen_rtx_SEQUENCE (VOIDmode, seqv);
rtx seq_insn = make_insn_raw (seq);
rtx first = get_insns ();
rtx last = get_last_insn ();
if (tinfo)
tinfo->block = -1;
- return gen_rtx (INSN_LIST, VOIDmode, insn, NULL_RTX);
+ return gen_rtx_INSN_LIST (VOIDmode, insn, NULL_RTX);
}
/* Otherwise this must be an INSN_LIST. Add INSN to the end of the
|| (GET_CODE (XEXP (src, 2)) == LABEL_REF
&& XEXP (XEXP (src, 2), 0) == target))
&& XEXP (src, 1) == pc_rtx)
- return gen_rtx (reverse_condition (GET_CODE (XEXP (src, 0))),
- GET_MODE (XEXP (src, 0)),
- XEXP (XEXP (src, 0), 0), XEXP (XEXP (src, 0), 1));
+ return gen_rtx_fmt_ee (reverse_condition (GET_CODE (XEXP (src, 0))),
+ GET_MODE (XEXP (src, 0)),
+ XEXP (XEXP (src, 0), 0), XEXP (XEXP (src, 0), 1));
return 0;
}
INSN_FROM_TARGET_P (next_to_match) = 0;
}
else
- merged_insns = gen_rtx (INSN_LIST, VOIDmode, trial, merged_insns);
+ merged_insns = gen_rtx_INSN_LIST (VOIDmode, trial, merged_insns);
if (++slot_number == num_slots)
break;
INSN_FROM_TARGET_P (next_to_match) = 0;
}
else
- merged_insns = gen_rtx (INSN_LIST, SImode, dtrial,
- merged_insns);
+ merged_insns = gen_rtx_INSN_LIST (SImode, dtrial,
+ merged_insns);
if (++slot_number == num_slots)
break;
if (INSN_FROM_TARGET_P (insn))
return;
- emit_insn_before (gen_rtx (USE, VOIDmode, insn), where);
+ emit_insn_before (gen_rtx_USE (VOIDmode, insn), where);
/* INSN might be making a value live in a block where it didn't use to
be. So recompute liveness information for this block. */
tail, of the list. */
update_reg_dead_notes (trial, insn);
- delay_list = gen_rtx (INSN_LIST, VOIDmode,
- trial, delay_list);
+ delay_list = gen_rtx_INSN_LIST (VOIDmode,
+ trial, delay_list);
update_block (trial, trial);
delete_insn (trial);
if (slots_to_fill == ++slots_filled)
insns we find on the head of the list. */
current_function_epilogue_delay_list
- = gen_rtx (INSN_LIST, VOIDmode, trial,
- current_function_epilogue_delay_list);
+ = gen_rtx_INSN_LIST (VOIDmode, trial,
+ current_function_epilogue_delay_list);
mark_referenced_resources (trial, &end_of_function_needs, 1);
update_block (trial, trial);
delete_insn (trial);
the negated constant. Otherwise, reverse the sense of the
arithmetic. */
if (GET_CODE (other) == CONST_INT)
- new_arith = gen_rtx (GET_CODE (src), GET_MODE (src), dest,
- negate_rtx (GET_MODE (src), other));
+ new_arith = gen_rtx_fmt_ee (GET_CODE (src), GET_MODE (src), dest,
+ negate_rtx (GET_MODE (src), other));
else
- new_arith = gen_rtx (GET_CODE (src) == PLUS ? MINUS : PLUS,
- GET_MODE (src), dest, other);
+ new_arith = gen_rtx_fmt_ee (GET_CODE (src) == PLUS ? MINUS : PLUS,
+ GET_MODE (src), dest, other);
- ninsn = emit_insn_after (gen_rtx (SET, VOIDmode, dest, new_arith),
+ ninsn = emit_insn_after (gen_rtx_SET (VOIDmode, dest, new_arith),
insn);
if (recog_memoized (ninsn) < 0
continue;
pred_flags = get_jump_flags (insn, JUMP_LABEL (insn));
- REG_NOTES (insn) = gen_rtx (EXPR_LIST, REG_BR_PRED,
- GEN_INT (pred_flags), REG_NOTES (insn));
+ REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_BR_PRED,
+ GEN_INT (pred_flags),
+ REG_NOTES (insn));
}
}
#endif /* DELAY_SLOTS */
while (--i >= 0)
{
reg_last_uses[regno + i]
- = gen_rtx (INSN_LIST, VOIDmode,
- insn, reg_last_uses[regno + i]);
+ = gen_rtx_INSN_LIST (VOIDmode,
+ insn, reg_last_uses[regno + i]);
if (reg_last_sets[regno + i])
add_dependence (insn, reg_last_sets[regno + i], 0);
if ((call_used_regs[regno + i] || global_regs[regno + i])
else
{
reg_last_uses[regno]
- = gen_rtx (INSN_LIST, VOIDmode, insn, reg_last_uses[regno]);
+ = gen_rtx_INSN_LIST (VOIDmode, insn, reg_last_uses[regno]);
if (reg_last_sets[regno])
add_dependence (insn, reg_last_sets[regno], 0);
convert back into a NOTE_INSN_SETJMP note. See
reemit_notes for why we use a pair of of NOTEs. */
- REG_NOTES (insn) = gen_rtx (EXPR_LIST, REG_DEAD,
- GEN_INT (0),
- REG_NOTES (insn));
- REG_NOTES (insn) = gen_rtx (EXPR_LIST, REG_DEAD,
- GEN_INT (NOTE_INSN_SETJMP),
- REG_NOTES (insn));
+ REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_DEAD,
+ GEN_INT (0),
+ REG_NOTES (insn));
+ REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_DEAD,
+ GEN_INT (NOTE_INSN_SETJMP),
+ REG_NOTES (insn));
}
else
{
|| (NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP
&& GET_CODE (PREV_INSN (insn)) != CALL_INSN)))
{
- loop_notes = gen_rtx (EXPR_LIST, REG_DEAD,
- GEN_INT (NOTE_BLOCK_NUMBER (insn)), loop_notes);
- loop_notes = gen_rtx (EXPR_LIST, REG_DEAD,
- GEN_INT (NOTE_LINE_NUMBER (insn)), loop_notes);
+ loop_notes = gen_rtx_EXPR_LIST (REG_DEAD,
+ GEN_INT (NOTE_BLOCK_NUMBER (insn)),
+ loop_notes);
+ loop_notes = gen_rtx_EXPR_LIST (REG_DEAD,
+ GEN_INT (NOTE_LINE_NUMBER (insn)),
+ loop_notes);
CONST_CALL_P (loop_notes) = CONST_CALL_P (insn);
}
{
rtx temp_reg, temp_link;
- temp_reg = gen_rtx (REG, word_mode, 0);
+ temp_reg = gen_rtx_REG (word_mode, 0);
temp_link = rtx_alloc (EXPR_LIST);
PUT_REG_NOTE_KIND (temp_link, REG_DEAD);
XEXP (temp_link, 0) = temp_reg;
i >= 0; i--)
if (! REGNO_REG_SET_P (old_live_regs, regno + i)
&& ! dead_or_set_regno_p (insn, regno + i))
- create_reg_dead_note (gen_rtx (REG,
- reg_raw_mode[regno + i],
- regno + i),
+ create_reg_dead_note (gen_rtx_REG (reg_raw_mode[regno + i],
+ regno + i),
insn);
}
}
for (insn = first; insn != NEXT_INSN (last); insn = NEXT_INSN (insn))
if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
&& reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
- REG_NOTES (insn) = gen_rtx (EXPR_LIST, REG_LABEL,
- XEXP (note, 0), REG_NOTES (insn));
+ REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_LABEL,
+ XEXP (note, 0),
+ REG_NOTES (insn));
break;
case REG_CC_SETTER:
/* Create an insn here so that we can hang dependencies off of it later. */
sched_before_next_call
- = gen_rtx (INSN, VOIDmode, 0, NULL_RTX, NULL_RTX,
- NULL_RTX, 0, NULL_RTX, NULL_RTX);
+ = gen_rtx_INSN (VOIDmode, 0, NULL_RTX, NULL_RTX,
+ NULL_RTX, 0, NULL_RTX, NULL_RTX);
/* Initialize the unused_*_lists. We can't use the ones left over from
the previous function, because gcc has freed that memory. We can use
if (context != 0 && context != current_function_decl)
{
struct function *p = find_function_data (context);
- rtx label_ref = gen_rtx (LABEL_REF, Pmode, label_rtx (label));
+ rtx label_ref = gen_rtx_LABEL_REF (Pmode, label_rtx (label));
rtx temp;
p->has_nonlocal_label = 1;
emit_move_insn (static_chain_rtx, label_ref);
/* USE of hard_frame_pointer_rtx added for consistency; not clear if
really needed. */
- emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
- emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
- emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
+ emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
+ emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
+ emit_insn (gen_rtx_USE (VOIDmode, static_chain_rtx));
emit_indirect_jump (temp);
}
}
if (TREE_CODE (body) == ADDR_EXPR)
body = TREE_OPERAND (body, 0);
- emit_insn (gen_rtx (ASM_INPUT, VOIDmode,
- TREE_STRING_POINTER (body)));
+ emit_insn (gen_rtx_ASM_INPUT (VOIDmode,
+ TREE_STRING_POINTER (body)));
last_expr_type = 0;
}
argvec = rtvec_alloc (ninputs);
constraints = rtvec_alloc (ninputs);
- body = gen_rtx (ASM_OPERANDS, VOIDmode,
- TREE_STRING_POINTER (string), "", 0, argvec, constraints,
- filename, line);
+ body = gen_rtx_ASM_OPERANDS (VOIDmode,
+ TREE_STRING_POINTER (string), "", 0, argvec,
+ constraints, filename, line);
/* The only use of BODY is if no outputs are specified, so set
it volatile, at least for now. */
}
XVECEXP (body, 4, i) /* constraints */
- = gen_rtx (ASM_INPUT, TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
- TREE_STRING_POINTER (TREE_PURPOSE (tail)));
+ = gen_rtx_ASM_INPUT (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
+ TREE_STRING_POINTER (TREE_PURPOSE (tail)));
i++;
}
XVECEXP (body, 3, ninputs - ninout + i) /* argvec */
= output_rtx[j];
XVECEXP (body, 4, ninputs - ninout + i) /* constraints */
- = gen_rtx (ASM_INPUT, inout_mode[j], match[j]);
+ = gen_rtx_ASM_INPUT (inout_mode[j], match[j]);
}
/* Now, for each output, construct an rtx
if (noutputs == 1 && nclobbers == 0)
{
XSTR (body, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
- insn = emit_insn (gen_rtx (SET, VOIDmode, output_rtx[0], body));
+ insn = emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
}
else if (noutputs == 0 && nclobbers == 0)
{
rtx obody = body;
int num = noutputs;
if (num == 0) num = 1;
- body = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (num + nclobbers));
+ body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
/* For each output operand, store a SET. */
for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
{
XVECEXP (body, 0, i)
- = gen_rtx (SET, VOIDmode,
- output_rtx[i],
- gen_rtx (ASM_OPERANDS, VOIDmode,
- TREE_STRING_POINTER (string),
- TREE_STRING_POINTER (TREE_PURPOSE (tail)),
- i, argvec, constraints,
- filename, line));
+ = gen_rtx_SET (VOIDmode,
+ output_rtx[i],
+ gen_rtx_ASM_OPERANDS (VOIDmode,
+ TREE_STRING_POINTER (string),
+ TREE_STRING_POINTER (TREE_PURPOSE (tail)),
+ i, argvec, constraints,
+ filename, line));
MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
}
if (j == -4) /* `memory', don't cache memory across asm */
{
XVECEXP (body, 0, i++)
- = gen_rtx (CLOBBER, VOIDmode,
- gen_rtx (MEM, BLKmode,
- gen_rtx (SCRATCH, VOIDmode, 0)));
+ = gen_rtx_CLOBBER (VOIDmode,
+ gen_rtx_MEM (BLKmode,
+ gen_rtx_SCRATCH (VOIDmode)));
continue;
}
/* Use QImode since that's guaranteed to clobber just one reg. */
XVECEXP (body, 0, i++)
- = gen_rtx (CLOBBER, VOIDmode, gen_rtx (REG, QImode, j));
+ = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (QImode, j));
}
insn = emit_insn (body);
}
if (GET_CODE (return_reg) == REG
&& REGNO (return_reg) < FIRST_PSEUDO_REGISTER)
- emit_insn (gen_rtx (USE, VOIDmode, return_reg));
+ emit_insn (gen_rtx_USE (VOIDmode, return_reg));
/* Handle calls that return values in multiple non-contiguous locations.
The Irix 6 ABI has examples of this. */
else if (GET_CODE (return_reg) == PARALLEL)
if (GET_CODE (x) == REG
&& REGNO (x) < FIRST_PSEUDO_REGISTER)
- emit_insn (gen_rtx (USE, VOIDmode, x));
+ emit_insn (gen_rtx_USE (VOIDmode, x));
}
}
result_pseudos[xbitpos / BITS_PER_WORD] = dst;
/* Clobber the destination before we move anything into it. */
- emit_insn (gen_rtx (CLOBBER, VOIDmode, dst));
+ emit_insn (gen_rtx_CLOBBER (VOIDmode, dst));
}
/* We need a new source operand each time bitpos is on a word
start_sequence ();
emit_move_insn (nonlocal_goto_handler_slot,
- gen_rtx (LABEL_REF, Pmode, handler_label));
+ gen_rtx_LABEL_REF (Pmode, handler_label));
insns = get_insns ();
end_sequence ();
emit_insns_before (insns, thisblock->data.block.first_insn);
rtx not_this = gen_label_rtx ();
rtx this = gen_label_rtx ();
do_jump_if_equal (static_chain_rtx,
- gen_rtx (LABEL_REF, Pmode, DECL_RTL (TREE_VALUE (link))),
+ gen_rtx_LABEL_REF (Pmode, DECL_RTL (TREE_VALUE (link))),
this, 0);
emit_jump (not_this);
emit_label (this);
emit_label (not_this);
}
/* If label is not recognized, abort. */
- emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "abort"), 0,
+ emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "abort"), 0,
VOIDmode, 0);
emit_barrier ();
emit_label (afterward);
/* Create the RTL representation for the variable. */
if (type == error_mark_node)
- DECL_RTL (decl) = gen_rtx (MEM, BLKmode, const0_rtx);
+ DECL_RTL (decl) = gen_rtx_MEM (BLKmode, const0_rtx);
else if (DECL_SIZE (decl) == 0)
/* Variable with incomplete type. */
{
else
/* An initializer is going to decide the size of this array.
Until we know the size, represent its address with a reg. */
- DECL_RTL (decl) = gen_rtx (MEM, BLKmode, gen_reg_rtx (Pmode));
+ DECL_RTL (decl) = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
MEM_IN_STRUCT_P (DECL_RTL (decl)) = AGGREGATE_TYPE_P (type);
}
else if (DECL_MODE (decl) != BLKmode
TYPE_ALIGN (TREE_TYPE (decl)));
/* Reference the variable indirect through that rtx. */
- DECL_RTL (decl) = gen_rtx (MEM, DECL_MODE (decl), address);
+ DECL_RTL (decl) = gen_rtx_MEM (DECL_MODE (decl), address);
/* If this is a memory ref that contains aggregate components,
mark it as such for cse and loop optimize. */
DECL_RTL (decl_elt) = x;
else
{
- DECL_RTL (decl_elt) = gen_rtx (MEM, mode, copy_rtx (XEXP (x, 0)));
+ DECL_RTL (decl_elt) = gen_rtx_MEM (mode, copy_rtx (XEXP (x, 0)));
MEM_IN_STRUCT_P (DECL_RTL (decl_elt)) = MEM_IN_STRUCT_P (x);
RTX_UNCHANGING_P (DECL_RTL (decl_elt)) = RTX_UNCHANGING_P (x);
}
if (mode == GET_MODE (x))
DECL_RTL (decl_elt) = x;
else
- DECL_RTL (decl_elt) = gen_rtx (SUBREG, mode, x, 0);
+ DECL_RTL (decl_elt) = gen_rtx_SUBREG (mode, x, 0);
}
else
abort ();
while (1)
{
labelvec[i]
- = gen_rtx (LABEL_REF, Pmode, label_rtx (n->code_label));
+ = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
if (i + TREE_INT_CST_LOW (orig_minval)
== TREE_INT_CST_LOW (n->high))
break;
/* Fill in the gaps with the default. */
for (i = 0; i < ncases; i++)
if (labelvec[i] == 0)
- labelvec[i] = gen_rtx (LABEL_REF, Pmode, default_label);
+ labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
/* Output the table */
emit_label (table_label);
if (CASE_VECTOR_PC_RELATIVE || flag_pic)
- emit_jump_insn (gen_rtx (ADDR_DIFF_VEC, CASE_VECTOR_MODE,
- gen_rtx (LABEL_REF, Pmode, table_label),
- gen_rtvec_v (ncases, labelvec)));
+ emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
+ gen_rtx_LABEL_REF (Pmode, table_label),
+ gen_rtvec_v (ncases, labelvec)));
else
- emit_jump_insn (gen_rtx (ADDR_VEC, CASE_VECTOR_MODE,
- gen_rtvec_v (ncases, labelvec)));
+ emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
+ gen_rtvec_v (ncases, labelvec)));
/* If the case insn drops through the table,
after the table we must jump to the default-label.
&& REGNO_LAST_UID (regno) == INSN_UID (insn)
&& (code == CLOBBER || ! reg_mentioned_p (SET_DEST (x),
SET_SRC (x))))
- REG_NOTES (insn) = gen_rtx (EXPR_LIST, REG_UNUSED,
- SET_DEST (x), REG_NOTES (insn));
+ REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_UNUSED,
+ SET_DEST (x),
+ REG_NOTES (insn));
}
}
tem = get_label_from_map (map,
CODE_LABEL_NUMBER
(XEXP (SET_SRC (pattern), 0)));
- SET_SRC (pattern) = gen_rtx (LABEL_REF, VOIDmode, tem);
+ SET_SRC (pattern) = gen_rtx_LABEL_REF (VOIDmode, tem);
/* Set the jump label so that it can be used by later loop unrolling
passes. */
mult_res = simplify_binary_operation (MULT, mode, mult1, mult2);
if (! mult_res)
- mult_res = gen_rtx (MULT, mode, mult1, mult2);
+ mult_res = gen_rtx_MULT (mode, mult1, mult2);
/* Again, put the constant second. */
if (GET_CODE (add1) == CONST_INT)
result = simplify_binary_operation (PLUS, mode, add1, mult_res);
if (! result)
- result = gen_rtx (PLUS, mode, add1, mult_res);
+ result = gen_rtx_PLUS (mode, add1, mult_res);
return result;
}
instruction on machines with complex addressing modes.
If we can't recognize it, then delete it and emit insns
to calculate the value from scratch. */
- emit_insn_before (gen_rtx (SET, VOIDmode, tem,
- copy_rtx (v->new_reg)),
+ emit_insn_before (gen_rtx_SET (VOIDmode, tem,
+ copy_rtx (v->new_reg)),
loop_start);
if (recog_memoized (PREV_INSN (loop_start)) < 0)
{
/* HACK: Must also search the loop fall through exit, create a label_ref
here which points to the loop_end, and append the loop_number_exit_labels
list to it. */
- label = gen_rtx (LABEL_REF, VOIDmode, loop_end);
+ label = gen_rtx_LABEL_REF (VOIDmode, loop_end);
LABEL_NEXTREF (label) = loop_number_exit_labels[this_loop_num];
for ( ; label; label = LABEL_NEXTREF (label))
if (DECL_RTL (decl) == 0)
{
DECL_RTL (decl)
- = gen_rtx (MEM, DECL_MODE (decl),
- gen_rtx (SYMBOL_REF, Pmode, name));
+ = gen_rtx_MEM (DECL_MODE (decl),
+ gen_rtx_SYMBOL_REF (Pmode, name));
/* Optionally set flags or add text to the name to record information
such as that it is a function name. If the name is changed, the macro
kludge to avoid setting DECL_RTL to frame_pointer_rtx. */
DECL_RTL (decl)
- = gen_rtx (REG, DECL_MODE (decl), FIRST_PSEUDO_REGISTER);
+ = gen_rtx_REG (DECL_MODE (decl), FIRST_PSEUDO_REGISTER);
REGNO (DECL_RTL (decl)) = reg_number;
REG_USERVAR_P (DECL_RTL (decl)) = 1;
new_name, strlen (new_name));
}
- DECL_RTL (decl) = gen_rtx (MEM, DECL_MODE (decl),
- gen_rtx (SYMBOL_REF, Pmode, name));
+ DECL_RTL (decl) = gen_rtx_MEM (DECL_MODE (decl),
+ gen_rtx_SYMBOL_REF (Pmode, name));
/* If this variable is to be treated as volatile, show its
tree node has side effects. If it has side effects, either
if (output_bytecode)
x = bc_gen_rtx (namestring, 0, (struct bc_label *) 0);
else
- x = gen_rtx (SYMBOL_REF, Pmode, namestring);
+ x = gen_rtx_SYMBOL_REF (Pmode, namestring);
if (output_bytecode)
{
ASM_GENERATE_INTERNAL_LABEL (label, "LTRAMP", 0);
name
= (char *) obstack_copy0 (&permanent_obstack, label, strlen (label));
- return gen_rtx (SYMBOL_REF, Pmode, name);
+ return gen_rtx_SYMBOL_REF (Pmode, name);
}
#endif
\f
push_obstacks_nochange ();
rtl_in_saveable_obstack ();
- r = gen_rtx (CONST_DOUBLE, mode, NULL_RTX, i0, i1);
+ r = gen_rtx_CONST_DOUBLE (mode, NULL_RTX, i0, i1);
pop_obstacks ();
/* Don't touch const_double_chain in nested function; see force_const_mem.
/* FIXME: this may not be correct, check it */
x = bc_gen_rtx (TREE_STRING_POINTER (target), 0, (struct bc_label *) 0);
else
- x = gen_rtx (MEM, FUNCTION_MODE,
- gen_rtx (LABEL_REF, VOIDmode,
- label_rtx (TREE_OPERAND (exp, 0))));
+ x = gen_rtx_MEM (FUNCTION_MODE,
+ gen_rtx_LABEL_REF (VOIDmode,
+ label_rtx (TREE_OPERAND (exp, 0))));
break;
case REAL_CST:
if (TREE_PERMANENT (exp))
end_temporary_allocation ();
- def = gen_rtx (SYMBOL_REF, Pmode, desc->label);
+ def = gen_rtx_SYMBOL_REF (Pmode, desc->label);
TREE_CST_RTL (exp)
- = gen_rtx (MEM, TYPE_MODE (TREE_TYPE (exp)), def);
+ = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)), def);
RTX_UNCHANGING_P (TREE_CST_RTL (exp)) = 1;
if (AGGREGATE_TYPE_P (TREE_TYPE (exp)))
MEM_IN_STRUCT_P (TREE_CST_RTL (exp)) = 1;
rtl_in_saveable_obstack ();
if (GET_CODE (x) == CONST)
- x = gen_rtx (CONST, GET_MODE (x),
- gen_rtx (PLUS, GET_MODE (x),
- XEXP (XEXP (x, 0), 0),
- XEXP (XEXP (x, 0), 1)));
+ x = gen_rtx_CONST (GET_MODE (x),
+ gen_rtx_PLUS (GET_MODE (x),
+ XEXP (XEXP (x, 0), 0),
+ XEXP (XEXP (x, 0), 1)));
else
x = GEN_INT (INTVAL (x));
/* We have a symbol name; construct the SYMBOL_REF and the MEM. */
- def = gen_rtx (MEM, mode, gen_rtx (SYMBOL_REF, Pmode, found));
+ def = gen_rtx_MEM (mode, gen_rtx_SYMBOL_REF (Pmode, found));
RTX_UNCHANGING_P (def) = 1;
/* Mark the symbol_ref as belonging to this constants pool. */