+2004-11-11 Nathan Sidwell <nathan@codesourcery.com>
+
+ PR target/16457
+ * config/rs6000/rs6000.c (mask64_2_operand): Stub to call
+ mask64_1or2_operand.
+ (mask64_1or2_operand): Broken out of mask64_2_operand, add flag
+ to spot rlwinm opportunities.
+ (and64_2_operand): Use mask_1or2_operand.
+ * config/rs6000/rs6000.md (anddi3): Use rlwinm when possible.
+
2004-11-11 Dorit Naishlos <dorit@il.ibm.com>
* tree-vectorizer.c (update_phi_nodes_for_guard): Call reverse_phis.
return 0;
}
-/* Like mask64_operand, but allow up to three transitions. This
- predicate is used by insn patterns that generate two rldicl or
- rldicr machine insns. */
-
-int
-mask64_2_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
+static int
+mask64_1or2_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED,
+ bool allow_one)
{
if (GET_CODE (op) == CONST_INT)
{
HOST_WIDE_INT c, lsb;
-
+ bool one_ok;
+
c = INTVAL (op);
/* Disallow all zeros. */
if (c == 0)
return 0;
+ /* We can use a single rlwinm insn if no upper bits of C are set
+ AND there are zero, one or two transitions in the _whole_ of
+ C. */
+ one_ok = !(c & ~(HOST_WIDE_INT)0xffffffff);
+
/* We don't change the number of transitions by inverting,
so make sure we start with the LS bit zero. */
if (c & 1)
/* Erase second transition. */
c &= -lsb;
+ if (one_ok && !(allow_one || c))
+ return 0;
+
/* Find the third transition (if any). */
lsb = c & -c;
return 0;
}
+/* Like mask64_operand, but allow up to three transitions. This
+ predicate is used by insn patterns that generate two rldicl or
+ rldicr machine insns. */
+int mask64_2_operand (rtx op, enum machine_mode mode)
+{
+ return mask64_1or2_operand (op, mode, false);
+}
+
/* Generates shifts and masks for a pair of rldicl or rldicr insns to
implement ANDing by the mask IN. */
void
and64_2_operand (rtx op, enum machine_mode mode)
{
if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
- return gpc_reg_operand (op, mode) || mask64_2_operand (op, mode);
+ return gpc_reg_operand (op, mode) || mask64_1or2_operand (op, mode, true);
- return logical_operand (op, mode) || mask64_2_operand (op, mode);
+ return logical_operand (op, mode) || mask64_1or2_operand (op, mode, true);
}
/* Return 1 if the operand is either a non-special register or a
"")
(define_insn "anddi3"
- [(set (match_operand:DI 0 "gpc_reg_operand" "=r,r,r,r,r")
- (and:DI (match_operand:DI 1 "gpc_reg_operand" "%r,r,r,r,r")
- (match_operand:DI 2 "and64_2_operand" "?r,S,K,J,t")))
- (clobber (match_scratch:CC 3 "=X,X,x,x,X"))]
+ [(set (match_operand:DI 0 "gpc_reg_operand" "=r,r,r,r,r,r")
+ (and:DI (match_operand:DI 1 "gpc_reg_operand" "%r,r,r,r,r,r")
+ (match_operand:DI 2 "and64_2_operand" "?r,S,T,K,J,t")))
+ (clobber (match_scratch:CC 3 "=X,X,X,x,x,X"))]
"TARGET_POWERPC64"
"@
and %0,%1,%2
rldic%B2 %0,%1,0,%S2
+ rlwinm %0,%1,0,%m2,%M2
andi. %0,%1,%b2
andis. %0,%1,%u2
#"
- [(set_attr "type" "*,*,compare,compare,*")
- (set_attr "length" "4,4,4,4,8")])
+ [(set_attr "type" "*,*,*,compare,compare,*")
+ (set_attr "length" "4,4,4,4,4,8")])
(define_split
[(set (match_operand:DI 0 "gpc_reg_operand" "")
(and:DI (rotate:DI (match_dup 0)
(match_dup 6))
(match_dup 7)))]
- "
{
build_mask64_2_operands (operands[2], &operands[4]);
-}")
+})
(define_insn "*anddi3_internal2"
[(set (match_operand:CC 0 "cc_reg_operand" "=x,x,x,x,x,?y,?y,??y,??y,?y")
2004-11-11 Nathan Sidwell <nathan@codesourcery.com>
+ PR target/16457
+ * gcc.dg/ppc-and-1: New
+
PR target/16796
* gcc.dg/ppc-mov-1.c: New.
--- /dev/null
+/* { dg-do compile { target powerpc64-*-* } } */
+/* { dg-options "-m64 -O2" } */
+
+/* { dg-final { scan-assembler "rlwinm \[0-9\]+,\[0-9\]+,0,0,30" } } */
+/* { dg-final { scan-assembler "rlwinm \[0-9\]+,\[0-9\]+,0,29,30" } } */
+/* { dg-final { scan-assembler-not "rldicr" } } */
+
+/* Origin:Pete Steinmetz <steinmtz@us.ibm.com> */
+
+/* PR 16457 - use rlwinm insn. */
+
+char *foo1 (char *p, unsigned int x)
+{
+ return p - (x & ~1);
+}
+
+char *foo2 (char *p, unsigned int x)
+{
+ return p - (x & 6);
+}