;; Generic LVX load instruction.
(define_insn "altivec_lvx_<mode>"
[(set (match_operand:V 0 "altivec_register_operand" "=v")
- (match_operand:V 1 "memory_operand" "m"))]
+ (match_operand:V 1 "memory_operand" "Z"))]
"TARGET_ALTIVEC"
"lvx %0,%y1"
[(set_attr "type" "vecload")])
;; Generic STVX store instruction.
(define_insn "altivec_stvx_<mode>"
- [(set (match_operand:V 0 "memory_operand" "=m")
+ [(set (match_operand:V 0 "memory_operand" "=Z")
(match_operand:V 1 "altivec_register_operand" "v"))]
"TARGET_ALTIVEC"
"stvx %1,%y0"
})
(define_insn "*mov<mode>_internal"
- [(set (match_operand:V 0 "nonimmediate_operand" "=m,v,v,o,r,r,v")
- (match_operand:V 1 "input_operand" "v,m,v,r,o,r,W"))]
+ [(set (match_operand:V 0 "nonimmediate_operand" "=Z,v,v,o,r,r,v")
+ (match_operand:V 1 "input_operand" "v,Z,v,r,o,r,W"))]
"TARGET_ALTIVEC
&& (register_operand (operands[0], <MODE>mode)
|| register_operand (operands[1], <MODE>mode))"
(define_insn "altivec_lvsl"
[(set (match_operand:V16QI 0 "register_operand" "=v")
- (unspec:V16QI [(match_operand 1 "memory_operand" "m")] UNSPEC_LVSL))]
+ (unspec:V16QI [(match_operand 1 "memory_operand" "Z")] UNSPEC_LVSL))]
"TARGET_ALTIVEC"
"lvsl %0,%y1"
[(set_attr "type" "vecload")])
(define_insn "altivec_lvsr"
[(set (match_operand:V16QI 0 "register_operand" "=v")
- (unspec:V16QI [(match_operand 1 "memory_operand" "m")] UNSPEC_LVSR))]
+ (unspec:V16QI [(match_operand 1 "memory_operand" "Z")] UNSPEC_LVSR))]
"TARGET_ALTIVEC"
"lvsr %0,%y1"
[(set_attr "type" "vecload")])
(define_expand "build_vector_mask_for_load"
- [(set (match_operand:V16QI 0 "register_operand" "=v")
- (unspec:V16QI [(match_operand 1 "memory_operand" "m")] UNSPEC_LVSR))]
+ [(set (match_operand:V16QI 0 "register_operand" "")
+ (unspec:V16QI [(match_operand 1 "memory_operand" "")] UNSPEC_LVSR))]
"TARGET_ALTIVEC"
"
{
(define_insn "altivec_lvxl"
[(parallel
[(set (match_operand:V4SI 0 "register_operand" "=v")
- (match_operand:V4SI 1 "memory_operand" "m"))
+ (match_operand:V4SI 1 "memory_operand" "Z"))
(unspec [(const_int 0)] UNSPEC_SET_VSCR)])]
"TARGET_ALTIVEC"
"lvxl %0,%y1"
(define_insn "altivec_lvx"
[(set (match_operand:V4SI 0 "register_operand" "=v")
- (match_operand:V4SI 1 "memory_operand" "m"))]
+ (match_operand:V4SI 1 "memory_operand" "Z"))]
"TARGET_ALTIVEC"
"lvx %0,%y1"
[(set_attr "type" "vecload")])
(define_insn "altivec_stvx"
[(parallel
- [(set (match_operand:V4SI 0 "memory_operand" "=m")
+ [(set (match_operand:V4SI 0 "memory_operand" "=Z")
(match_operand:V4SI 1 "register_operand" "v"))
(unspec [(const_int 0)] UNSPEC_STVX)])]
"TARGET_ALTIVEC"
(define_insn "altivec_stvxl"
[(parallel
- [(set (match_operand:V4SI 0 "memory_operand" "=m")
+ [(set (match_operand:V4SI 0 "memory_operand" "=Z")
(match_operand:V4SI 1 "register_operand" "v"))
(unspec [(const_int 0)] UNSPEC_STVXL)])]
"TARGET_ALTIVEC"
(define_insn "altivec_stve<VI_char>x"
[(parallel
- [(set (match_operand:VI 0 "memory_operand" "=m")
+ [(set (match_operand:VI 0 "memory_operand" "=Z")
(match_operand:VI 1 "register_operand" "v"))
(unspec [(const_int 0)] UNSPEC_STVE)])]
"TARGET_ALTIVEC"
(define_insn "*altivec_stvesfx"
[(parallel
- [(set (match_operand:V4SF 0 "memory_operand" "=m")
+ [(set (match_operand:V4SF 0 "memory_operand" "=Z")
(match_operand:V4SF 1 "register_operand" "v"))
(unspec [(const_int 0)] UNSPEC_STVE)])]
"TARGET_ALTIVEC"
;; Return 1 if the operand is an indexed or indirect memory operand.
(define_predicate "indexed_or_indirect_operand"
- (and (match_operand 0 "memory_operand")
- (match_test "REG_P (XEXP (op, 0))
- || (GET_CODE (XEXP (op, 0)) == PLUS
- && REG_P (XEXP (XEXP (op, 0), 0))
- && REG_P (XEXP (XEXP (op, 0), 1)))")))
+ (match_operand 0 "memory_operand")
+{
+ rtx tmp = XEXP (op, 0);
+
+ if (TARGET_ALTIVEC
+ && ALTIVEC_VECTOR_MODE (mode)
+ && GET_CODE (tmp) == AND
+ && GET_CODE (XEXP (tmp, 1)) == CONST_INT
+ && INTVAL (XEXP (tmp, 1)) == -16)
+ tmp = XEXP (tmp, 0);
+
+ return REG_P (tmp)
+ || (GET_CODE (tmp) == PLUS
+ && REG_P (XEXP (tmp, 0))
+ && REG_P (XEXP (tmp, 1)));
+})
;; Return 1 if the operand is a memory operand with an address divisible by 4
(define_predicate "word_offset_memref_operand"
}
#endif
+ /* Reload an offset address wrapped by an AND that represents the
+ masking of the lower bits. Strip the outer AND and let reload
+ convert the offset address into an indirect address. */
+ if (TARGET_ALTIVEC
+ && ALTIVEC_VECTOR_MODE (mode)
+ && GET_CODE (x) == AND
+ && GET_CODE (XEXP (x, 0)) == PLUS
+ && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
+ && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
+ && GET_CODE (XEXP (x, 1)) == CONST_INT
+ && INTVAL (XEXP (x, 1)) == -16)
+ {
+ x = XEXP (x, 0);
+ *win = 1;
+ return x;
+ }
+
if (TARGET_TOC
&& constant_pool_expr_p (x)
&& ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))