+2005-05-11 Richard Henderson <rth@redhat.com>
+
+ PR target/21412
+ * config/ia64/ia64.c (TARGET_CANNOT_FORCE_CONST_MEM): New.
+ (ia64_cannot_force_const_mem): New.
+ (tls_symbolic_operand_type): New.
+ (ia64_legitimate_constant_p): New.
+ (ia64_expand_load_address): Return true on success. Improve
+ checks for when we should not split.
+ (ia64_expand_tls_address): New addend operand. Distribute it
+ as appropriate to the tls_kind. Delay referencing gp.
+ (ia64_expand_move): Split symbolic addend as necessary. Handle
+ tls symbols with addends.
+ * config/ia64/ia64-protos.h: Update.
+ * config/ia64/ia64.h (CALL_REALLY_USED_REGISTERS): False for r0,
+ p0, f0, f1, and r13.
+ (LEGITIMATE_CONSTANT_P): Move to ia64_legitimate_constant_p.
+ * config/ia64/ia64.md (UNSPEC_DTPMOD): New.
+ (symbolic_operand splitter): Pass everything through
+ ia64_expand_load_address and FAIL or DONE as appropriate.
+ (load_fptr): Only accept after reload.
+ (load_fptr_internal1, gprel64_offset, load_gprel64, load_symptr_high,
+ load_symptr_low, load_ltoff_dtpmod,
+ (load_dtpmod): New.
+ (load_dtprel): Only accept tls symbols.
+ (load_dtprel64, load_dtprel22): Likewise.
+ (load_tprel, load_tprel64, load_tprel22): Likewise.
+ (load_dtprel_gd, load_ltoff_dtprel, load_tprel_ie): New.
+ (add_dtprel): Only accept tls symbols. Canonicalize PLUS.
+ (add_dtprel14, add_dtprel22): Likewise.
+ (add_tprel, add_tprel14, add_tprel22): Likewise.
+ * config/ia64/predicates.md (small_addr_symbolic_operand): New.
+ (any_offset_symbol_operand, aligned_offset_symbol_operand): New.
+ (got_symbolic_operand): Check CONST offsets.
+ (tls_symbolic_operand, ld_tls_symbolic_operand): New.
+ (ie_tls_symbolic_operand, le_tls_symbolic_operand): New.
+ (move_operand): Don't handle tls here. Check CONST offsets.
+
2005-05-11 Richard Sandiford <rsandifo@redhat.com>
* config/mips/7000.md (rm7_impy_si_mult): Just match imul and imadd.
extern bool ia64_const_ok_for_letter_p (HOST_WIDE_INT, char);
extern bool ia64_const_double_ok_for_letter_p (rtx, char);
extern bool ia64_extra_constraint (rtx, char);
+extern bool ia64_legitimate_constant_p (rtx);
extern rtx ia64_expand_move (rtx, rtx);
extern int ia64_move_ok (rtx, rtx);
extern void ia64_expand_epilogue (int);
extern int ia64_direct_return (void);
-extern void ia64_expand_load_address (rtx, rtx);
+extern bool ia64_expand_load_address (rtx, rtx);
extern int ia64_hard_regno_rename_ok (int, int);
extern void ia64_initialize_trampoline (rtx, rtx, rtx);
static int ia64_dfa_new_cycle (FILE *, int, rtx, int, int, int *);
static rtx gen_tls_get_addr (void);
static rtx gen_thread_pointer (void);
-static rtx ia64_expand_tls_address (enum tls_model, rtx, rtx);
static int find_gr_spill (int);
static int next_scratch_gr_reg (void);
static void mark_reg_gr_used_mask (rtx, void *);
static tree ia64_gimplify_va_arg (tree, tree, tree *, tree *);
static bool ia64_scalar_mode_supported_p (enum machine_mode mode);
static bool ia64_vector_mode_supported_p (enum machine_mode mode);
-
+static bool ia64_cannot_force_const_mem (rtx);
\f
/* Table of valid machine attributes. */
static const struct attribute_spec ia64_attribute_table[] =
#undef TARGET_HANDLE_OPTION
#define TARGET_HANDLE_OPTION ia64_handle_option
+#undef TARGET_CANNOT_FORCE_CONST_MEM
+#define TARGET_CANNOT_FORCE_CONST_MEM ia64_cannot_force_const_mem
+
struct gcc_target targetm = TARGET_INITIALIZER;
\f
typedef enum
return exact_log2 (op + 1);
}
+/* Return the TLS model to use for ADDR. */
+
+static enum tls_model
+tls_symbolic_operand_type (rtx addr)
+{
+ enum tls_model tls_kind = 0;
+
+ if (GET_CODE (addr) == CONST)
+ {
+ if (GET_CODE (XEXP (addr, 0)) == PLUS
+ && GET_CODE (XEXP (XEXP (addr, 0), 0)) == SYMBOL_REF)
+ tls_kind = SYMBOL_REF_TLS_MODEL (XEXP (XEXP (addr, 0), 0));
+ }
+ else if (GET_CODE (addr) == SYMBOL_REF)
+ tls_kind = SYMBOL_REF_TLS_MODEL (addr);
+
+ return tls_kind;
+}
+
+/* Return true if X is a constant that is valid for some immediate
+ field in an instruction. */
+
+bool
+ia64_legitimate_constant_p (rtx x)
+{
+ switch (GET_CODE (x))
+ {
+ case CONST_INT:
+ case LABEL_REF:
+ return true;
+
+ case CONST_DOUBLE:
+ if (GET_MODE (x) == VOIDmode)
+ return true;
+ return CONST_DOUBLE_OK_FOR_G (x);
+
+ case CONST:
+ case SYMBOL_REF:
+ return tls_symbolic_operand_type (x) == 0;
+
+ default:
+ return false;
+ }
+}
+
+/* Don't allow TLS addresses to get spilled to memory. */
+
+static bool
+ia64_cannot_force_const_mem (rtx x)
+{
+ return tls_symbolic_operand_type (x) != 0;
+}
+
/* Expand a symbolic constant load. */
-void
+bool
ia64_expand_load_address (rtx dest, rtx src)
{
- gcc_assert (GET_CODE (src) != SYMBOL_REF || !SYMBOL_REF_TLS_MODEL (src));
gcc_assert (GET_CODE (dest) == REG);
/* ILP32 mode still loads 64-bits of data from the GOT. This avoids
computation below are also more natural to compute as 64-bit quantities.
If we've been given an SImode destination register, change it. */
if (GET_MODE (dest) != Pmode)
- dest = gen_rtx_REG (Pmode, REGNO (dest));
+ dest = gen_rtx_REG_offset (dest, Pmode, REGNO (dest), 0);
- if (GET_CODE (src) == SYMBOL_REF && SYMBOL_REF_SMALL_ADDR_P (src))
- {
- emit_insn (gen_rtx_SET (VOIDmode, dest, src));
- return;
- }
- else if (TARGET_AUTO_PIC)
- {
- emit_insn (gen_load_gprel64 (dest, src));
- return;
- }
+ if (TARGET_NO_PIC)
+ return false;
+ if (small_addr_symbolic_operand (src, VOIDmode))
+ return false;
+
+ if (TARGET_AUTO_PIC)
+ emit_insn (gen_load_gprel64 (dest, src));
else if (GET_CODE (src) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (src))
- {
- emit_insn (gen_load_fptr (dest, src));
- return;
- }
+ emit_insn (gen_load_fptr (dest, src));
else if (sdata_symbolic_operand (src, VOIDmode))
+ emit_insn (gen_load_gprel (dest, src));
+ else
{
- emit_insn (gen_load_gprel (dest, src));
- return;
- }
+ HOST_WIDE_INT addend = 0;
+ rtx tmp;
- if (GET_CODE (src) == CONST
- && GET_CODE (XEXP (src, 0)) == PLUS
- && GET_CODE (XEXP (XEXP (src, 0), 1)) == CONST_INT
- && (INTVAL (XEXP (XEXP (src, 0), 1)) & 0x3fff) != 0)
- {
- rtx sym = XEXP (XEXP (src, 0), 0);
- HOST_WIDE_INT ofs, hi, lo;
+ /* We did split constant offsets in ia64_expand_move, and we did try
+ to keep them split in move_operand, but we also allowed reload to
+ rematerialize arbitrary constants rather than spill the value to
+ the stack and reload it. So we have to be prepared here to split
+ them apart again. */
+ if (GET_CODE (src) == CONST)
+ {
+ HOST_WIDE_INT hi, lo;
- /* Split the offset into a sign extended 14-bit low part
- and a complementary high part. */
- ofs = INTVAL (XEXP (XEXP (src, 0), 1));
- lo = ((ofs & 0x3fff) ^ 0x2000) - 0x2000;
- hi = ofs - lo;
+ hi = INTVAL (XEXP (XEXP (src, 0), 1));
+ lo = ((hi & 0x3fff) ^ 0x2000) - 0x2000;
+ hi = hi - lo;
- ia64_expand_load_address (dest, plus_constant (sym, hi));
- emit_insn (gen_adddi3 (dest, dest, GEN_INT (lo)));
- }
- else
- {
- rtx tmp;
+ if (lo != 0)
+ {
+ addend = lo;
+ src = plus_constant (XEXP (XEXP (src, 0), 0), hi);
+ }
+ }
tmp = gen_rtx_HIGH (Pmode, src);
tmp = gen_rtx_PLUS (Pmode, tmp, pic_offset_table_rtx);
emit_insn (gen_rtx_SET (VOIDmode, dest, tmp));
- tmp = gen_rtx_LO_SUM (GET_MODE (dest), dest, src);
+ tmp = gen_rtx_LO_SUM (Pmode, dest, src);
emit_insn (gen_rtx_SET (VOIDmode, dest, tmp));
+
+ if (addend)
+ {
+ tmp = gen_rtx_PLUS (Pmode, dest, GEN_INT (addend));
+ emit_insn (gen_rtx_SET (VOIDmode, dest, tmp));
+ }
}
+
+ return true;
}
static GTY(()) rtx gen_tls_tga;
}
static rtx
-ia64_expand_tls_address (enum tls_model tls_kind, rtx op0, rtx op1)
+ia64_expand_tls_address (enum tls_model tls_kind, rtx op0, rtx op1,
+ HOST_WIDE_INT addend)
{
rtx tga_op1, tga_op2, tga_ret, tga_eqv, tmp, insns;
- rtx orig_op0 = op0;
+ rtx orig_op0 = op0, orig_op1 = op1;
+ HOST_WIDE_INT addend_lo, addend_hi;
+
+ addend_lo = ((addend & 0x3fff) ^ 0x2000) - 0x2000;
+ addend_hi = addend - addend_lo;
switch (tls_kind)
{
start_sequence ();
tga_op1 = gen_reg_rtx (Pmode);
- emit_insn (gen_load_ltoff_dtpmod (tga_op1, op1));
- tga_op1 = gen_const_mem (Pmode, tga_op1);
+ emit_insn (gen_load_dtpmod (tga_op1, op1));
tga_op2 = gen_reg_rtx (Pmode);
- emit_insn (gen_load_ltoff_dtprel (tga_op2, op1));
- tga_op2 = gen_const_mem (Pmode, tga_op2);
+ emit_insn (gen_load_dtprel (tga_op2, op1));
tga_ret = emit_library_call_value (gen_tls_get_addr (), NULL_RTX,
LCT_CONST, Pmode, 2, tga_op1,
start_sequence ();
tga_op1 = gen_reg_rtx (Pmode);
- emit_insn (gen_load_ltoff_dtpmod (tga_op1, op1));
+ emit_insn (gen_load_dtpmod (tga_op1, op1));
tga_op1 = gen_const_mem (Pmode, tga_op1);
tga_op2 = const0_rtx;
emit_insn (gen_adddi3 (op0, tmp, op0));
}
else
- emit_insn (gen_add_dtprel (op0, tmp, op1));
+ emit_insn (gen_add_dtprel (op0, op1, tmp));
break;
case TLS_MODEL_INITIAL_EXEC:
+ op1 = plus_constant (op1, addend_hi);
+ addend = addend_lo;
+
tmp = gen_reg_rtx (Pmode);
- emit_insn (gen_load_ltoff_tprel (tmp, op1));
- tmp = gen_const_mem (Pmode, tmp);
- tmp = force_reg (Pmode, tmp);
+ emit_insn (gen_load_tprel (tmp, op1));
if (!register_operand (op0, Pmode))
op0 = gen_reg_rtx (Pmode);
case TLS_MODEL_LOCAL_EXEC:
if (!register_operand (op0, Pmode))
op0 = gen_reg_rtx (Pmode);
+
+ op1 = orig_op1;
+ addend = 0;
if (TARGET_TLS64)
{
emit_insn (gen_load_tprel (op0, op1));
- emit_insn (gen_adddi3 (op0, gen_thread_pointer (), op0));
+ emit_insn (gen_adddi3 (op0, op0, gen_thread_pointer ()));
}
else
- emit_insn (gen_add_tprel (op0, gen_thread_pointer (), op1));
+ emit_insn (gen_add_tprel (op0, op1, gen_thread_pointer ()));
break;
default:
gcc_unreachable ();
}
+ if (addend)
+ op0 = expand_simple_binop (Pmode, PLUS, op0, GEN_INT (addend),
+ orig_op0, 1, OPTAB_DIRECT);
if (orig_op0 == op0)
return NULL_RTX;
if (GET_MODE (orig_op0) == Pmode)
if ((mode == Pmode || mode == ptr_mode) && symbolic_operand (op1, VOIDmode))
{
+ HOST_WIDE_INT addend = 0;
enum tls_model tls_kind;
- if (GET_CODE (op1) == SYMBOL_REF
- && (tls_kind = SYMBOL_REF_TLS_MODEL (op1)))
- return ia64_expand_tls_address (tls_kind, op0, op1);
+ rtx sym = op1;
+
+ if (GET_CODE (op1) == CONST
+ && GET_CODE (XEXP (op1, 0)) == PLUS
+ && GET_CODE (XEXP (XEXP (op1, 0), 1)) == CONST_INT)
+ {
+ addend = INTVAL (XEXP (XEXP (op1, 0), 1));
+ sym = XEXP (XEXP (op1, 0), 0);
+ }
+
+ tls_kind = tls_symbolic_operand_type (sym);
+ if (tls_kind)
+ return ia64_expand_tls_address (tls_kind, op0, sym, addend);
+
+ if (any_offset_symbol_operand (sym, mode))
+ addend = 0;
+ else if (aligned_offset_symbol_operand (sym, mode))
+ {
+ HOST_WIDE_INT addend_lo, addend_hi;
+
+ addend_lo = ((addend & 0x3fff) ^ 0x2000) - 0x2000;
+ addend_hi = addend - addend_lo;
+
+ if (addend_lo != 0)
+ {
+ op1 = plus_constant (sym, addend_hi);
+ addend = addend_lo;
+ }
+ }
+ else
+ op1 = sym;
- if (!TARGET_NO_PIC && reload_completed)
+ if (reload_completed)
{
- ia64_expand_load_address (op0, op1);
- return NULL_RTX;
+ /* We really should have taken care of this offset earlier. */
+ gcc_assert (addend == 0);
+ if (ia64_expand_load_address (op0, op1))
+ return NULL_RTX;
+ }
+
+ if (addend)
+ {
+ rtx subtarget = no_new_pseudos ? op0 : gen_reg_rtx (mode);
+
+ emit_insn (gen_rtx_SET (VOIDmode, subtarget, op1));
+
+ op1 = expand_simple_binop (mode, PLUS, subtarget,
+ GEN_INT (addend), op0, 1, OPTAB_DIRECT);
+ if (op0 == op1)
+ return NULL_RTX;
}
}
#define CALL_REALLY_USED_REGISTERS \
{ /* General registers. */ \
- 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, \
+ 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1, \
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, \
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \
0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, \
/* Floating-point registers. */ \
- 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, \
+ 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, \
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, \
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, \
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, \
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, \
/* Predicate registers. */ \
- 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, \
+ 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, \
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \
/* A C expression that is nonzero if X is a legitimate constant for an
immediate operand on the target machine. */
-#define LEGITIMATE_CONSTANT_P(X) \
- (GET_CODE (X) != CONST_DOUBLE || GET_MODE (X) == VOIDmode \
- || GET_MODE (X) == DImode || CONST_DOUBLE_OK_FOR_G (X)) \
-
+#define LEGITIMATE_CONSTANT_P(X) ia64_legitimate_constant_p (X)
\f
/* Condition Code Status */
(UNSPEC_DTPREL 2)
(UNSPEC_LTOFF_TPREL 3)
(UNSPEC_TPREL 4)
+ (UNSPEC_DTPMOD 5)
(UNSPEC_LD_BASE 9)
(UNSPEC_GR_SPILL 10)
(define_split
[(set (match_operand 0 "register_operand" "")
(match_operand 1 "symbolic_operand" ""))]
- "reload_completed && ! TARGET_NO_PIC"
+ "reload_completed"
[(const_int 0)]
{
- ia64_expand_load_address (operands[0], operands[1]);
- DONE;
+ if (ia64_expand_load_address (operands[0], operands[1]))
+ DONE;
+ else
+ FAIL;
})
(define_expand "load_fptr"
- [(set (match_dup 2)
- (plus:DI (reg:DI 1) (match_operand 1 "function_operand" "")))
- (set (match_operand:DI 0 "register_operand" "") (match_dup 3))]
- ""
+ [(set (match_operand:DI 0 "register_operand" "")
+ (plus:DI (match_dup 2) (match_operand 1 "function_operand" "")))
+ (set (match_dup 0) (match_dup 3))]
+ "reload_completed"
{
- operands[2] = no_new_pseudos ? operands[0] : gen_reg_rtx (DImode);
- operands[3] = gen_const_mem (DImode, operands[2]);
+ operands[2] = pic_offset_table_rtx;
+ operands[3] = gen_const_mem (DImode, operands[0]);
})
(define_insn "*load_fptr_internal1"
[(set (match_operand:DI 0 "register_operand" "=r")
(plus:DI (reg:DI 1) (match_operand 1 "function_operand" "s")))]
- ""
+ "reload_completed"
"addl %0 = @ltoff(@fptr(%1)), gp"
[(set_attr "itanium_class" "ialu")])
(define_insn "load_gprel"
[(set (match_operand:DI 0 "register_operand" "=r")
(plus:DI (reg:DI 1) (match_operand 1 "sdata_symbolic_operand" "s")))]
- ""
+ "reload_completed"
"addl %0 = @gprel(%1), gp"
[(set_attr "itanium_class" "ialu")])
-(define_insn "gprel64_offset"
+(define_insn "*gprel64_offset"
[(set (match_operand:DI 0 "register_operand" "=r")
(minus:DI (match_operand:DI 1 "symbolic_operand" "") (reg:DI 1)))]
- ""
+ "reload_completed"
"movl %0 = @gprel(%1)"
[(set_attr "itanium_class" "long_i")])
(define_expand "load_gprel64"
- [(set (match_dup 2)
- (minus:DI (match_operand:DI 1 "symbolic_operand" "") (match_dup 3)))
- (set (match_operand:DI 0 "register_operand" "")
- (plus:DI (match_dup 3) (match_dup 2)))]
- ""
+ [(set (match_operand:DI 0 "register_operand" "")
+ (minus:DI (match_operand:DI 1 "symbolic_operand" "") (match_dup 2)))
+ (set (match_dup 0)
+ (plus:DI (match_dup 2) (match_dup 0)))]
+ "reload_completed"
{
- operands[2] = no_new_pseudos ? operands[0] : gen_reg_rtx (DImode);
- operands[3] = pic_offset_table_rtx;
+ operands[2] = pic_offset_table_rtx;
})
;; This is used as a placeholder for the return address during early
[(set (match_operand:DI 0 "register_operand" "=r")
(plus:DI (high:DI (match_operand 1 "got_symbolic_operand" "s"))
(match_operand:DI 2 "register_operand" "a")))]
- ""
+ "reload_completed"
{
if (HAVE_AS_LTOFFX_LDXMOV_RELOCS)
return "%,addl %0 = @ltoffx(%1), %2";
[(set (match_operand:DI 0 "register_operand" "=r")
(lo_sum:DI (match_operand:DI 1 "register_operand" "r")
(match_operand 2 "got_symbolic_operand" "s")))]
- ""
+ "reload_completed"
{
if (HAVE_AS_LTOFFX_LDXMOV_RELOCS)
return "%,ld8.mov %0 = [%1], %2";
}
[(set_attr "itanium_class" "ld")])
-(define_insn "load_ltoff_dtpmod"
+(define_insn_and_split "load_dtpmod"
[(set (match_operand:DI 0 "register_operand" "=r")
- (plus:DI (reg:DI 1)
- (unspec:DI [(match_operand:DI 1 "symbolic_operand" "")]
- UNSPEC_LTOFF_DTPMOD)))]
+ (unspec:DI [(match_operand:DI 1 "tls_symbolic_operand" "")]
+ UNSPEC_DTPMOD))]
""
- "addl %0 = @ltoff(@dtpmod(%1)), gp"
- [(set_attr "itanium_class" "ialu")])
+ "#"
+ "reload_completed"
+ [(set (match_dup 0)
+ (plus:DI (unspec:DI [(match_dup 1)] UNSPEC_LTOFF_DTPMOD)
+ (match_dup 2)))
+ (set (match_dup 0) (match_dup 3))]
+{
+ operands[2] = pic_offset_table_rtx;
+ operands[3] = gen_const_mem (DImode, operands[0]);
+})
-(define_insn "load_ltoff_dtprel"
+(define_insn "*load_ltoff_dtpmod"
[(set (match_operand:DI 0 "register_operand" "=r")
- (plus:DI (reg:DI 1)
- (unspec:DI [(match_operand:DI 1 "symbolic_operand" "")]
- UNSPEC_LTOFF_DTPREL)))]
- ""
- "addl %0 = @ltoff(@dtprel(%1)), gp"
+ (plus:DI (unspec:DI [(match_operand:DI 1 "tls_symbolic_operand" "")]
+ UNSPEC_LTOFF_DTPMOD)
+ (match_operand:DI 2 "register_operand" "a")))]
+ "reload_completed"
+ "addl %0 = @ltoff(@dtpmod(%1)), %2"
[(set_attr "itanium_class" "ialu")])
(define_expand "load_dtprel"
[(set (match_operand:DI 0 "register_operand" "")
- (unspec:DI [(match_operand:DI 1 "symbolic_operand" "")]
+ (unspec:DI [(match_operand:DI 1 "tls_symbolic_operand" "")]
UNSPEC_DTPREL))]
""
"")
(define_insn "*load_dtprel64"
[(set (match_operand:DI 0 "register_operand" "=r")
- (unspec:DI [(match_operand:DI 1 "symbolic_operand" "")]
+ (unspec:DI [(match_operand:DI 1 "ld_tls_symbolic_operand" "")]
UNSPEC_DTPREL))]
"TARGET_TLS64"
"movl %0 = @dtprel(%1)"
(define_insn "*load_dtprel22"
[(set (match_operand:DI 0 "register_operand" "=r")
- (unspec:DI [(match_operand:DI 1 "symbolic_operand" "")]
+ (unspec:DI [(match_operand:DI 1 "ld_tls_symbolic_operand" "")]
UNSPEC_DTPREL))]
""
"addl %0 = @dtprel(%1), r0"
[(set_attr "itanium_class" "ialu")])
+(define_insn_and_split "*load_dtprel_gd"
+ [(set (match_operand:DI 0 "register_operand" "=r")
+ (unspec:DI [(match_operand:DI 1 "tls_symbolic_operand" "")]
+ UNSPEC_DTPREL))]
+ ""
+ "#"
+ "reload_completed"
+ [(set (match_dup 0)
+ (plus:DI (unspec:DI [(match_dup 1)] UNSPEC_LTOFF_DTPREL)
+ (match_dup 2)))
+ (set (match_dup 0) (match_dup 3))]
+{
+ operands[2] = pic_offset_table_rtx;
+ operands[3] = gen_const_mem (DImode, operands[0]);
+})
+
+(define_insn "*load_ltoff_dtprel"
+ [(set (match_operand:DI 0 "register_operand" "=r")
+ (plus:DI (unspec:DI [(match_operand:DI 1 "tls_symbolic_operand" "")]
+ UNSPEC_LTOFF_DTPREL)
+ (match_operand:DI 2 "register_operand" "a")))]
+ ""
+ "addl %0 = @ltoff(@dtprel(%1)), %2"
+ [(set_attr "itanium_class" "ialu")])
+
(define_expand "add_dtprel"
[(set (match_operand:DI 0 "register_operand" "")
- (plus:DI (match_operand:DI 1 "register_operand" "")
- (unspec:DI [(match_operand:DI 2 "symbolic_operand" "")]
- UNSPEC_DTPREL)))]
+ (plus:DI (unspec:DI [(match_operand:DI 1 "ld_tls_symbolic_operand" "")]
+ UNSPEC_DTPREL)
+ (match_operand:DI 2 "register_operand" "")))]
"!TARGET_TLS64"
"")
(define_insn "*add_dtprel14"
[(set (match_operand:DI 0 "register_operand" "=r")
- (plus:DI (match_operand:DI 1 "register_operand" "r")
- (unspec:DI [(match_operand:DI 2 "symbolic_operand" "")]
- UNSPEC_DTPREL)))]
+ (plus:DI (unspec:DI [(match_operand:DI 1 "ld_tls_symbolic_operand" "")]
+ UNSPEC_DTPREL)
+ (match_operand:DI 2 "register_operand" "r")))]
"TARGET_TLS14"
- "adds %0 = @dtprel(%2), %1"
+ "adds %0 = @dtprel(%1), %2"
[(set_attr "itanium_class" "ialu")])
(define_insn "*add_dtprel22"
[(set (match_operand:DI 0 "register_operand" "=r")
- (plus:DI (match_operand:DI 1 "register_operand" "a")
- (unspec:DI [(match_operand:DI 2 "symbolic_operand" "")]
- UNSPEC_DTPREL)))]
+ (plus:DI (unspec:DI [(match_operand:DI 1 "ld_tls_symbolic_operand" "")]
+ UNSPEC_DTPREL)
+ (match_operand:DI 2 "register_operand" "a")))]
"TARGET_TLS22"
- "addl %0 = @dtprel(%2), %1"
- [(set_attr "itanium_class" "ialu")])
-
-(define_insn "load_ltoff_tprel"
- [(set (match_operand:DI 0 "register_operand" "=r")
- (plus:DI (reg:DI 1)
- (unspec:DI [(match_operand:DI 1 "symbolic_operand" "")]
- UNSPEC_LTOFF_TPREL)))]
- ""
- "addl %0 = @ltoff(@tprel(%1)), gp"
+ "addl %0 = @dtprel(%1), %2"
[(set_attr "itanium_class" "ialu")])
(define_expand "load_tprel"
[(set (match_operand:DI 0 "register_operand" "")
- (unspec:DI [(match_operand:DI 1 "symbolic_operand" "")]
+ (unspec:DI [(match_operand:DI 1 "tls_symbolic_operand" "")]
UNSPEC_TPREL))]
""
"")
(define_insn "*load_tprel64"
[(set (match_operand:DI 0 "register_operand" "=r")
- (unspec:DI [(match_operand:DI 1 "symbolic_operand" "")]
+ (unspec:DI [(match_operand:DI 1 "le_tls_symbolic_operand" "")]
UNSPEC_TPREL))]
"TARGET_TLS64"
"movl %0 = @tprel(%1)"
(define_insn "*load_tprel22"
[(set (match_operand:DI 0 "register_operand" "=r")
- (unspec:DI [(match_operand:DI 1 "symbolic_operand" "")]
+ (unspec:DI [(match_operand:DI 1 "le_tls_symbolic_operand" "")]
UNSPEC_TPREL))]
""
"addl %0 = @tprel(%1), r0"
[(set_attr "itanium_class" "ialu")])
+(define_insn_and_split "*load_tprel_ie"
+ [(set (match_operand:DI 0 "register_operand" "=r")
+ (unspec:DI [(match_operand:DI 1 "ie_tls_symbolic_operand" "")]
+ UNSPEC_TPREL))]
+ ""
+ "#"
+ "reload_completed"
+ [(set (match_dup 0)
+ (plus:DI (unspec:DI [(match_dup 1)] UNSPEC_LTOFF_TPREL)
+ (match_dup 2)))
+ (set (match_dup 0) (match_dup 3))]
+{
+ operands[2] = pic_offset_table_rtx;
+ operands[3] = gen_const_mem (DImode, operands[0]);
+})
+
+(define_insn "*load_ltoff_tprel"
+ [(set (match_operand:DI 0 "register_operand" "=r")
+ (plus:DI (unspec:DI [(match_operand:DI 1 "ie_tls_symbolic_operand" "")]
+ UNSPEC_LTOFF_TPREL)
+ (match_operand:DI 2 "register_operand" "a")))]
+ ""
+ "addl %0 = @ltoff(@tprel(%1)), %2"
+ [(set_attr "itanium_class" "ialu")])
+
(define_expand "add_tprel"
[(set (match_operand:DI 0 "register_operand" "")
- (plus:DI (match_operand:DI 1 "register_operand" "")
- (unspec:DI [(match_operand:DI 2 "symbolic_operand" "")]
- UNSPEC_TPREL)))]
+ (plus:DI (unspec:DI [(match_operand:DI 1 "le_tls_symbolic_operand" "")]
+ UNSPEC_TPREL)
+ (match_operand:DI 2 "register_operand" "")))]
"!TARGET_TLS64"
"")
(define_insn "*add_tprel14"
[(set (match_operand:DI 0 "register_operand" "=r")
- (plus:DI (match_operand:DI 1 "register_operand" "r")
- (unspec:DI [(match_operand:DI 2 "symbolic_operand" "")]
- UNSPEC_TPREL)))]
+ (plus:DI (unspec:DI [(match_operand:DI 1 "le_tls_symbolic_operand" "")]
+ UNSPEC_TPREL)
+ (match_operand:DI 2 "register_operand" "r")))]
"TARGET_TLS14"
- "adds %0 = @tprel(%2), %1"
+ "adds %0 = @tprel(%1), %2"
[(set_attr "itanium_class" "ialu")])
(define_insn "*add_tprel22"
[(set (match_operand:DI 0 "register_operand" "=r")
- (plus:DI (match_operand:DI 1 "register_operand" "a")
- (unspec:DI [(match_operand:DI 2 "symbolic_operand" "")]
- UNSPEC_TPREL)))]
+ (plus:DI (unspec:DI [(match_operand:DI 1 "le_tls_symbolic_operand" "")]
+ UNSPEC_TPREL)
+ (match_operand:DI 2 "register_operand" "a")))]
"TARGET_TLS22"
- "addl %0 = @tprel(%2), %1"
+ "addl %0 = @tprel(%1), %2"
[(set_attr "itanium_class" "ialu")])
;; With no offsettable memory references, we've got to have a scratch
(and (match_code "symbol_ref")
(match_test "SYMBOL_REF_FUNCTION_P (op)")))
-;; True if OP refers to a symbol, and is appropriate for a GOT load.
-(define_predicate "got_symbolic_operand"
- (match_operand 0 "symbolic_operand" "")
-{
- switch (GET_CODE (op))
- {
- case LABEL_REF:
- return true;
-
- case SYMBOL_REF:
- /* This sort of load should not be used for things in sdata. */
- return !SYMBOL_REF_SMALL_ADDR_P (op);
-
- case CONST:
- /* Accept only (plus (symbol_ref) (const_int)). */
- op = XEXP (op, 0);
- if (GET_CODE (op) != PLUS
- || GET_CODE (XEXP (op, 0)) != SYMBOL_REF
- || GET_CODE (XEXP (op, 1)) != CONST_INT)
- return false;
-
- /* Ok if we're not using GOT entries at all. */
- if (TARGET_NO_PIC || TARGET_AUTO_PIC)
- return true;
-
- /* The low 14 bits of the constant have been forced to zero
- by ia64_expand_load_address, so that we do not use up so
- many GOT entries. Prevent cse from undoing this. */
- op = XEXP (op, 1);
- return (INTVAL (op) & 0x3fff) == 0;
-
- default:
- gcc_unreachable ();
- }
-})
-
;; True if OP refers to a symbol in the sdata section.
(define_predicate "sdata_symbolic_operand"
(match_code "symbol_ref,const")
}
})
+;; True if OP refers to a symbol in the small address area.
+(define_predicate "small_addr_symbolic_operand"
+ (match_code "symbol_ref,const")
+{
+ switch (GET_CODE (op))
+ {
+ case CONST:
+ op = XEXP (op, 0);
+ if (GET_CODE (op) != PLUS
+ || GET_CODE (XEXP (op, 0)) != SYMBOL_REF
+ || GET_CODE (XEXP (op, 1)) != CONST_INT)
+ return false;
+ op = XEXP (op, 0);
+ /* FALLTHRU */
+
+ case SYMBOL_REF:
+ return SYMBOL_REF_SMALL_ADDR_P (op);
+
+ default:
+ gcc_unreachable ();
+ }
+})
+
+;; True if OP refers to a symbol with which we may use any offset.
+(define_predicate "any_offset_symbol_operand"
+ (match_code "symbol_ref")
+{
+ if (TARGET_NO_PIC || TARGET_AUTO_PIC)
+ return true;
+ if (SYMBOL_REF_SMALL_ADDR_P (op))
+ return true;
+ if (SYMBOL_REF_FUNCTION_P (op))
+ return false;
+ if (sdata_symbolic_operand (op, mode))
+ return true;
+ return false;
+})
+
+;; True if OP refers to a symbol with which we may use 14-bit aligned offsets.
+;; False if OP refers to a symbol with which we may not use any offset at any
+;; time.
+(define_predicate "aligned_offset_symbol_operand"
+ (and (match_code "symbol_ref")
+ (match_test "! SYMBOL_REF_FUNCTION_P (op)")))
+
+;; True if OP refers to a symbol, and is appropriate for a GOT load.
+(define_predicate "got_symbolic_operand"
+ (match_operand 0 "symbolic_operand" "")
+{
+ HOST_WIDE_INT addend = 0;
+
+ switch (GET_CODE (op))
+ {
+ case LABEL_REF:
+ return true;
+
+ case CONST:
+ /* Accept only (plus (symbol_ref) (const_int)). */
+ op = XEXP (op, 0);
+ if (GET_CODE (op) != PLUS
+ || GET_CODE (XEXP (op, 0)) != SYMBOL_REF
+ || GET_CODE (XEXP (op, 1)) != CONST_INT)
+ return false;
+
+ addend = INTVAL (XEXP (op, 1));
+ op = XEXP (op, 0);
+ /* FALLTHRU */
+
+ case SYMBOL_REF:
+ /* These symbols shouldn't be used with got loads. */
+ if (SYMBOL_REF_SMALL_ADDR_P (op))
+ return false;
+ if (SYMBOL_REF_TLS_MODEL (op) != 0)
+ return false;
+
+ if (any_offset_symbol_operand (op, mode))
+ return true;
+
+ /* The low 14 bits of the constant have been forced to zero
+ so that we do not use up so many GOT entries. Prevent cse
+ from undoing this. */
+ if (aligned_offset_symbol_operand (op, mode))
+ return (addend & 0x3fff) == 0;
+
+ return addend == 0;
+
+ default:
+ gcc_unreachable ();
+ }
+})
+
+;; Return true if OP is a valid thread local storage symbolic operand.
+(define_predicate "tls_symbolic_operand"
+ (match_code "symbol_ref,const")
+{
+ switch (GET_CODE (op))
+ {
+ case SYMBOL_REF:
+ return SYMBOL_REF_TLS_MODEL (op) != 0;
+
+ case CONST:
+ op = XEXP (op, 0);
+ if (GET_CODE (op) != PLUS
+ || GET_CODE (XEXP (op, 0)) != SYMBOL_REF
+ || GET_CODE (XEXP (op, 1)) != CONST_INT)
+ return false;
+
+ /* We only allow certain offsets for certain tls models. */
+ switch (SYMBOL_REF_TLS_MODEL (XEXP (op, 0)))
+ {
+ case TLS_MODEL_GLOBAL_DYNAMIC:
+ case TLS_MODEL_LOCAL_DYNAMIC:
+ return false;
+
+ case TLS_MODEL_INITIAL_EXEC:
+ return (INTVAL (XEXP (op, 1)) & 0x3fff) == 0;
+
+ case TLS_MODEL_LOCAL_EXEC:
+ return true;
+
+ default:
+ return false;
+ }
+
+ default:
+ gcc_unreachable ();
+ }
+})
+
+;; Return true if OP is a local-dynamic thread local storage symbolic operand.
+(define_predicate "ld_tls_symbolic_operand"
+ (and (match_code "symbol_ref")
+ (match_test "SYMBOL_REF_TLS_MODEL (op) == TLS_MODEL_LOCAL_DYNAMIC")))
+
+;; Return true if OP is an initial-exec thread local storage symbolic operand.
+(define_predicate "ie_tls_symbolic_operand"
+ (match_code "symbol_ref,const")
+{
+ switch (GET_CODE (op))
+ {
+ case CONST:
+ op = XEXP (op, 0);
+ if (GET_CODE (op) != PLUS
+ || GET_CODE (XEXP (op, 0)) != SYMBOL_REF
+ || GET_CODE (XEXP (op, 1)) != CONST_INT
+ || (INTVAL (XEXP (op, 1)) & 0x3fff) != 0)
+ return false;
+ op = XEXP (op, 0);
+ /* FALLTHRU */
+
+ case SYMBOL_REF:
+ return SYMBOL_REF_TLS_MODEL (op) == TLS_MODEL_INITIAL_EXEC;
+
+ default:
+ gcc_unreachable ();
+ }
+})
+
+;; Return true if OP is a local-exec thread local storage symbolic operand.
+(define_predicate "le_tls_symbolic_operand"
+ (match_code "symbol_ref,const")
+{
+ switch (GET_CODE (op))
+ {
+ case CONST:
+ op = XEXP (op, 0);
+ if (GET_CODE (op) != PLUS
+ || GET_CODE (XEXP (op, 0)) != SYMBOL_REF
+ || GET_CODE (XEXP (op, 1)) != CONST_INT)
+ return false;
+ op = XEXP (op, 0);
+ /* FALLTHRU */
+
+ case SYMBOL_REF:
+ return SYMBOL_REF_TLS_MODEL (op) == TLS_MODEL_LOCAL_EXEC;
+
+ default:
+ gcc_unreachable ();
+ }
+})
+
;; Like nonimmediate_operand, but don't allow MEMs that try to use a
;; POST_MODIFY with a REG as displacement.
(define_predicate "destination_operand"
(and (match_operand 0 "memory_operand")
(match_test "GET_RTX_CLASS (GET_CODE (XEXP (op, 0))) != RTX_AUTOINC")))
-;; True if OP is a general operand, excluding tls symbolic operands.
+;; True if OP is a general operand, with some restrictions on symbols.
(define_predicate "move_operand"
- (and (match_operand 0 "general_operand")
- (not (match_test
- "GET_CODE (op) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (op)"))))
+ (match_operand 0 "general_operand")
+{
+ switch (GET_CODE (op))
+ {
+ case CONST:
+ {
+ HOST_WIDE_INT addend;
+
+ /* Accept only (plus (symbol_ref) (const_int)). */
+ op = XEXP (op, 0);
+ if (GET_CODE (op) != PLUS
+ || GET_CODE (XEXP (op, 0)) != SYMBOL_REF
+ || GET_CODE (XEXP (op, 1)) != CONST_INT)
+ return false;
+
+ addend = INTVAL (XEXP (op, 1));
+ op = XEXP (op, 0);
+
+ /* After reload, we want to allow any offset whatsoever. This
+ allows reload the opportunity to avoid spilling addresses to
+ the stack, and instead simply substitute in the value from a
+ REG_EQUIV. We'll split this up again when splitting the insn. */
+ if (reload_in_progress || reload_completed)
+ return true;
+
+ /* Some symbol types we allow to use with any offset. */
+ if (any_offset_symbol_operand (op, mode))
+ return true;
+
+ /* Some symbol types we allow offsets with the low 14 bits of the
+ constant forced to zero so that we do not use up so many GOT
+ entries. We want to prevent cse from undoing this. */
+ if (aligned_offset_symbol_operand (op, mode))
+ return (addend & 0x3fff) == 0;
+
+ /* The remaining symbol types may never be used with an offset. */
+ return false;
+ }
+
+ default:
+ return true;
+ }
+})
;; True if OP is a register operand that is (or could be) a GR reg.
(define_predicate "gr_register_operand"