\f
;; Move instructions.
+;; Reload patterns to support multi-word load/store
+;; with non-offsetable address.
+(define_expand "reload_noff_store"
+ [(parallel [(match_operand 0 "memory_operand" "=m")
+ (match_operand 1 "register_operand" "r")
+ (match_operand:DI 2 "register_operand" "=&r")])]
+ "TARGET_64BIT"
+{
+ rtx mem = operands[0];
+ rtx addr = XEXP (mem, 0);
+
+ emit_move_insn (operands[2], addr);
+ mem = replace_equiv_address_nv (mem, operands[2]);
+
+ emit_insn (gen_rtx_SET (VOIDmode, mem, operands[1]));
+ DONE;
+})
+
+(define_expand "reload_noff_load"
+ [(parallel [(match_operand 0 "register_operand" "=r")
+ (match_operand 1 "memory_operand" "m")
+ (match_operand:DI 2 "register_operand" "=r")])]
+ "TARGET_64BIT"
+{
+ rtx mem = operands[1];
+ rtx addr = XEXP (mem, 0);
+
+ emit_move_insn (operands[2], addr);
+ mem = replace_equiv_address_nv (mem, operands[2]);
+
+ emit_insn (gen_rtx_SET (VOIDmode, operands[0], mem));
+ DONE;
+})
+
(define_expand "movoi"
[(set (match_operand:OI 0 "nonimmediate_operand")
(match_operand:OI 1 "general_operand"))]
]
(const_string "OI")))])
-(define_insn "*movti_internal_rex64"
+(define_insn "*movti_internal"
[(set (match_operand:TI 0 "nonimmediate_operand" "=!r ,o ,x,x ,m")
(match_operand:TI 1 "general_operand" "riFo,re,C,xm,x"))]
- "TARGET_64BIT && !(MEM_P (operands[0]) && MEM_P (operands[1]))"
+ "(TARGET_64BIT || TARGET_SSE)
+ && !(MEM_P (operands[0]) && MEM_P (operands[1]))"
{
switch (which_alternative)
{
gcc_unreachable ();
}
}
- [(set_attr "type" "*,*,sselog1,ssemov,ssemov")
+ [(set_attr "isa" "x64,x64,*,*,*")
+ (set_attr "type" "*,*,sselog1,ssemov,ssemov")
(set_attr "prefix" "*,*,maybe_vex,maybe_vex,maybe_vex")
(set (attr "mode")
(cond [(eq_attr "alternative" "0,1")
(const_string "DI")
- (match_test "TARGET_SSE_PACKED_SINGLE_INSN_OPTIMAL")
+ (ior (not (match_test "TARGET_SSE2"))
+ (match_test "TARGET_SSE_PACKED_SINGLE_INSN_OPTIMAL"))
(const_string "V4SF")
(and (eq_attr "alternative" "4")
(match_test "TARGET_SSE_TYPELESS_STORES"))
[(const_int 0)]
"ix86_split_long_move (operands); DONE;")
-(define_insn "*movti_internal_sse"
- [(set (match_operand:TI 0 "nonimmediate_operand" "=x,x ,m")
- (match_operand:TI 1 "vector_move_operand" "C ,xm,x"))]
- "TARGET_SSE && !TARGET_64BIT
- && !(MEM_P (operands[0]) && MEM_P (operands[1]))"
-{
- switch (which_alternative)
- {
- case 0:
- return standard_sse_constant_opcode (insn, operands[1]);
- case 1:
- case 2:
- /* TDmode values are passed as TImode on the stack. Moving them
- to stack may result in unaligned memory access. */
- if (misaligned_operand (operands[0], TImode)
- || misaligned_operand (operands[1], TImode))
- {
- if (get_attr_mode (insn) == MODE_V4SF)
- return "%vmovups\t{%1, %0|%0, %1}";
- else
- return "%vmovdqu\t{%1, %0|%0, %1}";
- }
- else
- {
- if (get_attr_mode (insn) == MODE_V4SF)
- return "%vmovaps\t{%1, %0|%0, %1}";
- else
- return "%vmovdqa\t{%1, %0|%0, %1}";
- }
- default:
- gcc_unreachable ();
- }
-}
- [(set_attr "type" "sselog1,ssemov,ssemov")
- (set_attr "prefix" "maybe_vex")
- (set (attr "mode")
- (cond [(match_test "TARGET_SSE_PACKED_SINGLE_INSN_OPTIMAL")
- (const_string "V4SF")
- (and (eq_attr "alternative" "2")
- (match_test "TARGET_SSE_TYPELESS_STORES"))
- (const_string "V4SF")
- (match_test "TARGET_AVX")
- (const_string "TI")
- (ior (not (match_test "TARGET_SSE2"))
- (match_test "optimize_function_for_size_p (cfun)"))
- (const_string "V4SF")
- ]
- (const_string "TI")))])
-
-(define_insn "*movdi_internal_rex64"
+(define_insn "*movdi_internal"
[(set (match_operand:DI 0 "nonimmediate_operand"
- "=r,r ,r,m ,*y,m*y,?*y,?r ,?*Ym,*x,m ,*x,*x,?r ,?*Yi,?*x,?*Ym")
+ "=r ,o ,r,r ,r,m ,*y,m*y,*y,?*y,?r ,?*Ym,*x,m ,*x,*x,?r ,?*Yi,?*x,?*Ym")
(match_operand:DI 1 "general_operand"
- "Z ,rem,i,re,C ,*y ,m ,*Ym,r ,C ,*x,*x,m ,*Yi,r ,*Ym,*x"))]
- "TARGET_64BIT && !(MEM_P (operands[0]) && MEM_P (operands[1]))"
+ "riFo,riF,Z,rem,i,re,C ,*y ,m ,m ,*Ym,r ,C ,*x,*x,m ,*Yi,r ,*Ym,*x"))]
+ "!(MEM_P (operands[0]) && MEM_P (operands[1]))"
{
switch (get_attr_type (insn))
{
- case TYPE_SSECVT:
- if (SSE_REG_P (operands[0]))
- return "movq2dq\t{%1, %0|%0, %1}";
- else
- return "movdq2q\t{%1, %0|%0, %1}";
-
- case TYPE_SSEMOV:
- if (get_attr_mode (insn) == MODE_V4SF)
- return "%vmovaps\t{%1, %0|%0, %1}";
- else if (get_attr_mode (insn) == MODE_TI)
- return "%vmovdqa\t{%1, %0|%0, %1}";
+ case TYPE_MULTI:
+ return "#";
- /* Handle broken assemblers that require movd instead of movq. */
- if (GENERAL_REG_P (operands[0]) || GENERAL_REG_P (operands[1]))
- return "%vmovd\t{%1, %0|%0, %1}";
- else
- return "%vmovq\t{%1, %0|%0, %1}";
+ case TYPE_MMX:
+ return "pxor\t%0, %0";
case TYPE_MMXMOV:
/* Handle broken assemblers that require movd instead of movq. */
case TYPE_SSELOG1:
return standard_sse_constant_opcode (insn, operands[1]);
- case TYPE_MMX:
- return "pxor\t%0, %0";
+ case TYPE_SSEMOV:
+ switch (get_attr_mode (insn))
+ {
+ case MODE_DI:
+ return "%vmovq\t{%1, %0|%0, %1}";
+ case MODE_TI:
+ return "%vmovdqa\t{%1, %0|%0, %1}";
+ case MODE_V4SF:
+ return "%vmovaps\t{%1, %0|%0, %1}";
+ case MODE_V2SF:
+ gcc_assert (!TARGET_AVX);
+ return "movlps\t{%1, %0|%0, %1}";
+ default:
+ gcc_unreachable ();
+ }
+
+ case TYPE_SSECVT:
+ if (SSE_REG_P (operands[0]))
+ return "movq2dq\t{%1, %0|%0, %1}";
+ else
+ return "movdq2q\t{%1, %0|%0, %1}";
case TYPE_LEA:
return "lea{q}\t{%E1, %0|%0, %E1}";
gcc_assert (!flag_pic || LEGITIMATE_PIC_OPERAND_P (operands[1]));
if (get_attr_mode (insn) == MODE_SI)
return "mov{l}\t{%k1, %k0|%k0, %k1}";
- else if (which_alternative == 2)
+ else if (which_alternative == 4)
return "movabs{q}\t{%1, %0|%0, %1}";
else if (ix86_use_lea_for_mov (insn, operands))
return "lea{q}\t{%E1, %0|%0, %E1}";
return "mov{q}\t{%1, %0|%0, %1}";
}
}
- [(set (attr "type")
- (cond [(eq_attr "alternative" "4")
+ [(set (attr "isa")
+ (cond [(eq_attr "alternative" "0,1,8")
+ (const_string "nox64")
+ (eq_attr "alternative" "2,3,4,5,9,10,11,16,17")
+ (const_string "x64")
+ (eq_attr "alternative" "18,19")
+ (const_string "sse2")
+ ]
+ (const_string "*")))
+ (set (attr "type")
+ (cond [(eq_attr "alternative" "0,1")
+ (const_string "multi")
+ (eq_attr "alternative" "6")
(const_string "mmx")
- (eq_attr "alternative" "5,6,7,8")
+ (eq_attr "alternative" "7,8,9,10,11")
(const_string "mmxmov")
- (eq_attr "alternative" "9")
+ (eq_attr "alternative" "12")
(const_string "sselog1")
- (eq_attr "alternative" "10,11,12,13,14")
+ (eq_attr "alternative" "13,14,15,16,17")
(const_string "ssemov")
- (eq_attr "alternative" "15,16")
+ (eq_attr "alternative" "18,19")
(const_string "ssecvt")
(match_operand 1 "pic_32bit_operand")
(const_string "lea")
(const_string "imov")))
(set (attr "modrm")
(if_then_else
- (and (eq_attr "alternative" "2") (eq_attr "type" "imov"))
+ (and (eq_attr "alternative" "4") (eq_attr "type" "imov"))
(const_string "0")
(const_string "*")))
(set (attr "length_immediate")
(if_then_else
- (and (eq_attr "alternative" "2") (eq_attr "type" "imov"))
+ (and (eq_attr "alternative" "4") (eq_attr "type" "imov"))
(const_string "8")
(const_string "*")))
(set (attr "prefix_rex")
- (if_then_else (eq_attr "alternative" "7,8")
- (const_string "1")
- (const_string "*")))
- (set (attr "prefix_data16")
- (if_then_else (eq_attr "alternative" "10")
+ (if_then_else (eq_attr "alternative" "10,11")
(const_string "1")
(const_string "*")))
(set (attr "prefix")
- (if_then_else (eq_attr "alternative" "9,10,11,12,13,14")
+ (if_then_else (eq_attr "type" "sselog1,ssemov")
(const_string "maybe_vex")
(const_string "orig")))
+ (set (attr "prefix_data16")
+ (if_then_else (and (eq_attr "type" "ssemov") (eq_attr "mode" "DI"))
+ (const_string "1")
+ (const_string "*")))
(set (attr "mode")
- (cond [(eq_attr "alternative" "0")
- (const_string "SI")
- (eq_attr "alternative" "9,11")
- (cond [(match_test "TARGET_SSE_PACKED_SINGLE_INSN_OPTIMAL")
- (const_string "V4SF")
- (match_test "TARGET_AVX")
- (const_string "TI")
- (match_test "optimize_function_for_size_p (cfun)")
- (const_string "V4SF")
- ]
- (const_string "TI"))
- ]
- (const_string "DI")))])
-
-;; Reload patterns to support multi-word load/store
-;; with non-offsetable address.
-(define_expand "reload_noff_store"
- [(parallel [(match_operand 0 "memory_operand" "=m")
- (match_operand 1 "register_operand" "r")
- (match_operand:DI 2 "register_operand" "=&r")])]
- "TARGET_64BIT"
-{
- rtx mem = operands[0];
- rtx addr = XEXP (mem, 0);
-
- emit_move_insn (operands[2], addr);
- mem = replace_equiv_address_nv (mem, operands[2]);
-
- emit_insn (gen_rtx_SET (VOIDmode, mem, operands[1]));
- DONE;
-})
-
-(define_expand "reload_noff_load"
- [(parallel [(match_operand 0 "register_operand" "=r")
- (match_operand 1 "memory_operand" "m")
- (match_operand:DI 2 "register_operand" "=r")])]
- "TARGET_64BIT"
-{
- rtx mem = operands[1];
- rtx addr = XEXP (mem, 0);
-
- emit_move_insn (operands[2], addr);
- mem = replace_equiv_address_nv (mem, operands[2]);
-
- emit_insn (gen_rtx_SET (VOIDmode, operands[0], mem));
- DONE;
-})
-
-(define_insn "*movdi_internal"
- [(set (match_operand:DI 0 "nonimmediate_operand"
- "=r ,o ,*y,m*y,*y,*x,m ,*x,*x,*x,m ,*x,*x,?*x,?*Ym")
- (match_operand:DI 1 "general_operand"
- "riFo,riF,C ,*y ,m ,C ,*x,*x,m ,C ,*x,*x,m ,*Ym,*x"))]
- "!TARGET_64BIT && !(MEM_P (operands[0]) && MEM_P (operands[1]))"
-{
- switch (get_attr_type (insn))
- {
- case TYPE_SSECVT:
- if (SSE_REG_P (operands[0]))
- return "movq2dq\t{%1, %0|%0, %1}";
- else
- return "movdq2q\t{%1, %0|%0, %1}";
-
- case TYPE_SSEMOV:
- switch (get_attr_mode (insn))
- {
- case MODE_TI:
- return "%vmovdqa\t{%1, %0|%0, %1}";
- case MODE_DI:
- return "%vmovq\t{%1, %0|%0, %1}";
- case MODE_V4SF:
- return "%vmovaps\t{%1, %0|%0, %1}";
- case MODE_V2SF:
- return "movlps\t{%1, %0|%0, %1}";
- default:
- gcc_unreachable ();
- }
-
- case TYPE_MMXMOV:
- return "movq\t{%1, %0|%0, %1}";
-
- case TYPE_SSELOG1:
- return standard_sse_constant_opcode (insn, operands[1]);
-
- case TYPE_MMX:
- return "pxor\t%0, %0";
-
- case TYPE_MULTI:
- return "#";
+ (cond [(eq_attr "alternative" "2")
+ (const_string "SI")
+ (eq_attr "alternative" "12,14")
+ (cond [(ior (not (match_test "TARGET_SSE2"))
+ (match_test "TARGET_SSE_PACKED_SINGLE_INSN_OPTIMAL"))
+ (const_string "V4SF")
+ (match_test "TARGET_AVX")
+ (const_string "TI")
+ (match_test "optimize_function_for_size_p (cfun)")
+ (const_string "V4SF")
+ ]
+ (const_string "TI"))
- default:
- gcc_unreachable ();
- }
-}
- [(set (attr "isa")
- (cond [(eq_attr "alternative" "5,6,7,8,13,14")
- (const_string "sse2")
- (eq_attr "alternative" "9,10,11,12")
- (const_string "noavx")
- ]
- (const_string "*")))
- (set (attr "type")
- (cond [(eq_attr "alternative" "0,1")
- (const_string "multi")
- (eq_attr "alternative" "2")
- (const_string "mmx")
- (eq_attr "alternative" "3,4")
- (const_string "mmxmov")
- (eq_attr "alternative" "5,9")
- (const_string "sselog1")
- (eq_attr "alternative" "13,14")
- (const_string "ssecvt")
- ]
- (const_string "ssemov")))
- (set (attr "prefix")
- (if_then_else (eq_attr "alternative" "5,6,7,8")
- (const_string "maybe_vex")
- (const_string "orig")))
- (set (attr "mode")
- (cond [(eq_attr "alternative" "9,11")
- (const_string "V4SF")
- (eq_attr "alternative" "10,12")
- (const_string "V2SF")
- (eq_attr "alternative" "5,7")
- (cond [(match_test "TARGET_SSE_PACKED_SINGLE_INSN_OPTIMAL")
- (const_string "V4SF")
- (match_test "TARGET_AVX")
- (const_string "TI")
- (match_test "optimize_function_for_size_p (cfun)")
- (const_string "V4SF")
- ]
- (const_string "TI"))
+ (and (eq_attr "alternative" "13,15")
+ (not (match_test "TARGET_SSE2")))
+ (const_string "V2SF")
]
(const_string "DI")))])
(const_string "V4SF")
]
(const_string "TI"))
+
(and (eq_attr "alternative" "8,9,10,11")
(not (match_test "TARGET_SSE2")))
(const_string "SF")
case MODE_V1DF:
return "%vmovlpd\t{%1, %d0|%d0, %1}";
case MODE_V2SF:
- return "%vmovlps\t{%1, %d0|%d0, %1}";
+ gcc_assert (!TARGET_AVX);
+ return "movlps\t{%1, %0|%0, %1}";
default:
gcc_unreachable ();
}
]
(const_string "DF"))
- (eq_attr "alternative" "12,16")
- (if_then_else (match_test "TARGET_SSE2")
- (const_string "DF")
- (const_string "V2SF"))
+ (and (eq_attr "alternative" "12,16")
+ (not (match_test "TARGET_SSE2")))
+ (const_string "V2SF")
]
(const_string "DF")))])