From: Jeff Law Date: Thu, 28 Mar 1996 17:14:41 +0000 (-0700) Subject: pa.c (hppa_legitimize_address): Don't lose for (plus (plus (mult (A) (shadd_const... X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=31d4f31fd7f5bfc432ffad4341d0e97eeb201e66;p=platform%2Fupstream%2Fgcc.git pa.c (hppa_legitimize_address): Don't lose for (plus (plus (mult (A) (shadd_const)) (B)) (C)) if... * pa.c (hppa_legitimize_address): Don't lose for (plus (plus (mult (A) (shadd_const)) (B)) (C)) if B + C isn't a valid address for indexing. (basereg_operand): Only accept base registers after cse has completed. Don't accept the frame pointer if it's likely to be eliminated. * pa.md (unscaled indexing patterns): Add variants with basereg and index register reversed. (HImode and QImode loads): Add zero extended variants. From-SVN: r11625 --- diff --git a/gcc/config/pa/pa.c b/gcc/config/pa/pa.c index 1bce85f..c27b222 100644 --- a/gcc/config/pa/pa.c +++ b/gcc/config/pa/pa.c @@ -767,19 +767,19 @@ hppa_legitimize_address (x, oldx, mode) { rtx regx1, regx2; - /* Add the two unscaled terms B and C; only force them into registers - if it's absolutely necessary. */ + /* Add the two unscaled terms B and C; if either B or C isn't + a register or small constant int, then fail. */ regx1 = XEXP (XEXP (x, 0), 1); if (! (GET_CODE (regx1) == REG || (GET_CODE (regx1) == CONST_INT && INT_14_BITS (regx1)))) - regx1 = force_reg (Pmode, force_operand (XEXP (XEXP (x, 0), 1), 0)); + return orig; regx2 = XEXP (x, 1); if (! (GET_CODE (regx2) == REG || (GET_CODE (regx2) == CONST_INT && INT_14_BITS (regx2)))) - regx2 = force_reg (Pmode, force_operand (XEXP (x, 1), 0)); + return orig; /* Add them, make sure the result is in canonical form. */ if (GET_CODE (regx1) == REG) @@ -4941,6 +4941,12 @@ basereg_operand (op, mode) rtx op; enum machine_mode mode; { + /* cse will create some unscaled indexed addresses, however; it + generally isn't a win on the PA, so avoid creating unscaled + indexed addresses until after cse is finished. */ + if (!cse_not_expected) + return 0; + /* Once reload has started everything is considered valid. Reload should only create indexed addresses using the stack/frame pointer, and any others were checked for validity when created by the combine pass. @@ -4952,8 +4958,14 @@ basereg_operand (op, mode) if (TARGET_NO_SPACE_REGS || reload_in_progress || reload_completed) return (GET_CODE (op) == REG || GET_CODE (op) == CONST_INT); - /* Stack and frame pointers are always OK for indexing. */ - if (op == stack_pointer_rtx || op == frame_pointer_rtx) + /* Stack is always OK for indexing. */ + if (op == stack_pointer_rtx) + return 1; + + /* While it's always safe to index off the frame pointer, it's not + always profitable, particularly when the frame pointer is being + eliminated. */ + if (! flag_omit_frame_pointer && op == frame_pointer_rtx) return 1; /* The only other valid OPs are pseudo registers with diff --git a/gcc/config/pa/pa.md b/gcc/config/pa/pa.md index 116f372..a96cca4 100644 --- a/gcc/config/pa/pa.md +++ b/gcc/config/pa/pa.md @@ -1529,6 +1529,25 @@ (define_insn "" [(set (match_operand:SI 0 "register_operand" "=r") + (mem:SI (plus:SI (match_operand:SI 1 "basereg_operand" "r") + (match_operand:SI 2 "register_operand" "r"))))] + "! TARGET_DISABLE_INDEXING" + "* +{ + /* Reload can create backwards (relative to cse) unscaled index + address modes when eliminating registers and possibly for + pseudos that don't get hard registers. Deal with it. */ + if (operands[2] == hard_frame_pointer_rtx + || operands[2] == stack_pointer_rtx) + return \"ldwx %1(0,%2),%0\"; + else + return \"ldwx %2(0,%1),%0\"; +}" + [(set_attr "type" "load") + (set_attr "length" "4")]) + +(define_insn "" + [(set (match_operand:SI 0 "register_operand" "=r") (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r") (match_operand:SI 2 "basereg_operand" "r"))))] "! TARGET_DISABLE_INDEXING" @@ -1851,7 +1870,20 @@ [(set_attr "type" "load") (set_attr "length" "4")]) -;; This variant of the above insn can occur if the second operand +; Same thing with zero extension. +(define_insn "" + [(set (match_operand:SI 0 "register_operand" "=r") + (zero_extend:SI (mem:HI + (plus:SI + (mult:SI (match_operand:SI 2 "register_operand" "r") + (const_int 2)) + (match_operand:SI 1 "register_operand" "r")))))] + "! TARGET_DISABLE_INDEXING" + "ldhx,s %2(0,%1),%0" + [(set_attr "type" "load") + (set_attr "length" "4")]) + +;; These variants of the above insns can occur if the second operand ;; is the frame pointer. This is a kludge, but there doesn't ;; seem to be a way around it. Only recognize it while reloading. ;; Note how operand 3 uses a predicate of "const_int_operand", but @@ -1877,6 +1909,44 @@ [(set_attr "type" "load") (set_attr "length" "8")]) +; Now the zero extended variant. +(define_insn "" + [(set (match_operand:SI 0 "register_operand" "=&r") + (zero_extend:SI (mem:HI (plus:SI (plus:SI + (mult:SI (match_operand:SI 2 "register_operand" "r") + (const_int 2)) + (match_operand:SI 1 "register_operand" "r")) + (match_operand:SI 3 "const_int_operand" "rI")))))] + "! TARGET_DISABLE_INDEXING && reload_in_progress" + "* +{ + if (GET_CODE (operands[3]) == CONST_INT) + return \"sh1addl %2,%1,%0\;ldh %3(0,%0),%0\"; + else + return \"sh1addl %2,%1,%0\;ldhx %3(0,%0),%0\"; +}" + [(set_attr "type" "load") + (set_attr "length" "8")]) + +(define_insn "" + [(set (match_operand:HI 0 "register_operand" "=r") + (mem:HI (plus:SI (match_operand:SI 1 "basereg_operand" "r") + (match_operand:SI 2 "register_operand" "r"))))] + "! TARGET_DISABLE_INDEXING" + "* +{ + /* Reload can create backwards (relative to cse) unscaled index + address modes when eliminating registers and possibly for + pseudos that don't get hard registers. Deal with it. */ + if (operands[2] == hard_frame_pointer_rtx + || operands[2] == stack_pointer_rtx) + return \"ldhx %1(0,%2),%0\"; + else + return \"ldhx %2(0,%1),%0\"; +}" + [(set_attr "type" "load") + (set_attr "length" "4")]) + (define_insn "" [(set (match_operand:HI 0 "register_operand" "=r") (mem:HI (plus:SI (match_operand:SI 1 "register_operand" "r") @@ -1896,6 +1966,49 @@ [(set_attr "type" "load") (set_attr "length" "4")]) +; Now zero extended variants. +(define_insn "" + [(set (match_operand:SI 0 "register_operand" "=r") + (zero_extend:SI (mem:HI + (plus:SI + (match_operand:SI 1 "basereg_operand" "r") + (match_operand:SI 2 "register_operand" "r")))))] + "! TARGET_DISABLE_INDEXING" + "* +{ + /* Reload can create backwards (relative to cse) unscaled index + address modes when eliminating registers and possibly for + pseudos that don't get hard registers. Deal with it. */ + if (operands[2] == hard_frame_pointer_rtx + || operands[2] == stack_pointer_rtx) + return \"ldhx %1(0,%2),%0\"; + else + return \"ldhx %2(0,%1),%0\"; +}" + [(set_attr "type" "load") + (set_attr "length" "4")]) + +(define_insn "" + [(set (match_operand:SI 0 "register_operand" "=r") + (zero_extend:SI (mem:HI + (plus:SI + (match_operand:SI 1 "register_operand" "r") + (match_operand:SI 2 "basereg_operand" "r")))))] + "! TARGET_DISABLE_INDEXING" + "* +{ + /* Reload can create backwards (relative to cse) unscaled index + address modes when eliminating registers and possibly for + pseudos that don't get hard registers. Deal with it. */ + if (operands[1] == hard_frame_pointer_rtx + || operands[1] == stack_pointer_rtx) + return \"ldhx %2(0,%1),%0\"; + else + return \"ldhx %1(0,%2),%0\"; +}" + [(set_attr "type" "load") + (set_attr "length" "4")]) + (define_insn "" [(set (match_operand:HI 0 "register_operand" "=r") (mem:HI (plus:SI (match_operand:SI 1 "register_operand" "=r") @@ -1907,6 +2020,20 @@ [(set_attr "type" "load") (set_attr "length" "4")]) +; And a zero extended variant. +(define_insn "" + [(set (match_operand:SI 0 "register_operand" "=r") + (zero_extend:SI (mem:HI + (plus:SI + (match_operand:SI 1 "register_operand" "=r") + (match_operand:SI 2 "int5_operand" "L"))))) + (set (match_dup 1) + (plus:SI (match_dup 1) (match_dup 2)))] + "" + "ldhs,mb %2(0,%1),%0" + [(set_attr "type" "load") + (set_attr "length" "4")]) + (define_insn "" [(set (mem:HI (plus:SI (match_operand:SI 0 "register_operand" "=r") (match_operand:SI 1 "int5_operand" "L"))) @@ -1964,6 +2091,25 @@ (define_insn "" [(set (match_operand:QI 0 "register_operand" "=r") + (mem:QI (plus:SI (match_operand:SI 1 "basereg_operand" "r") + (match_operand:SI 2 "register_operand" "r"))))] + "! TARGET_DISABLE_INDEXING" + "* +{ + /* Reload can create backwards (relative to cse) unscaled index + address modes when eliminating registers and possibly for + pseudos that don't get hard registers. Deal with it. */ + if (operands[2] == hard_frame_pointer_rtx + || operands[2] == stack_pointer_rtx) + return \"ldbx %1(0,%2),%0\"; + else + return \"ldbx %2(0,%1),%0\"; +}" + [(set_attr "type" "load") + (set_attr "length" "4")]) + +(define_insn "" + [(set (match_operand:QI 0 "register_operand" "=r") (mem:QI (plus:SI (match_operand:SI 1 "register_operand" "r") (match_operand:SI 2 "basereg_operand" "r"))))] "! TARGET_DISABLE_INDEXING" @@ -1981,6 +2127,91 @@ [(set_attr "type" "load") (set_attr "length" "4")]) +; Indexed byte load with zero extension to SImode or HImode. +(define_insn "" + [(set (match_operand:SI 0 "register_operand" "=r") + (zero_extend:SI (mem:QI + (plus:SI + (match_operand:SI 1 "basereg_operand" "r") + (match_operand:SI 2 "register_operand" "r")))))] + "! TARGET_DISABLE_INDEXING" + "* +{ + /* Reload can create backwards (relative to cse) unscaled index + address modes when eliminating registers and possibly for + pseudos that don't get hard registers. Deal with it. */ + if (operands[2] == hard_frame_pointer_rtx + || operands[2] == stack_pointer_rtx) + return \"ldbx %1(0,%2),%0\"; + else + return \"ldbx %2(0,%1),%0\"; +}" + [(set_attr "type" "load") + (set_attr "length" "4")]) + +(define_insn "" + [(set (match_operand:SI 0 "register_operand" "=r") + (zero_extend:SI (mem:QI + (plus:SI + (match_operand:SI 1 "register_operand" "r") + (match_operand:SI 2 "basereg_operand" "r")))))] + "! TARGET_DISABLE_INDEXING" + "* +{ + /* Reload can create backwards (relative to cse) unscaled index + address modes when eliminating registers and possibly for + pseudos that don't get hard registers. Deal with it. */ + if (operands[1] == hard_frame_pointer_rtx + || operands[1] == stack_pointer_rtx) + return \"ldbx %2(0,%1),%0\"; + else + return \"ldbx %1(0,%2),%0\"; +}" + [(set_attr "type" "load") + (set_attr "length" "4")]) + +(define_insn "" + [(set (match_operand:HI 0 "register_operand" "=r") + (zero_extend:HI (mem:QI + (plus:SI + (match_operand:SI 1 "basereg_operand" "r") + (match_operand:SI 2 "register_operand" "r")))))] + "! TARGET_DISABLE_INDEXING" + "* +{ + /* Reload can create backwards (relative to cse) unscaled index + address modes when eliminating registers and possibly for + pseudos that don't get hard registers. Deal with it. */ + if (operands[2] == hard_frame_pointer_rtx + || operands[2] == stack_pointer_rtx) + return \"ldbx %1(0,%2),%0\"; + else + return \"ldbx %2(0,%1),%0\"; +}" + [(set_attr "type" "load") + (set_attr "length" "4")]) + +(define_insn "" + [(set (match_operand:HI 0 "register_operand" "=r") + (zero_extend:HI (mem:QI + (plus:SI + (match_operand:SI 1 "register_operand" "r") + (match_operand:SI 2 "basereg_operand" "r")))))] + "! TARGET_DISABLE_INDEXING" + "* +{ + /* Reload can create backwards (relative to cse) unscaled index + address modes when eliminating registers and possibly for + pseudos that don't get hard registers. Deal with it. */ + if (operands[1] == hard_frame_pointer_rtx + || operands[1] == stack_pointer_rtx) + return \"ldbx %2(0,%1),%0\"; + else + return \"ldbx %1(0,%2),%0\"; +}" + [(set_attr "type" "load") + (set_attr "length" "4")]) + (define_insn "" [(set (match_operand:QI 0 "register_operand" "=r") (mem:QI (plus:SI (match_operand:SI 1 "register_operand" "=r") @@ -1991,6 +2222,29 @@ [(set_attr "type" "load") (set_attr "length" "4")]) +; Now the same thing with zero extensions. +(define_insn "" + [(set (match_operand:SI 0 "register_operand" "=r") + (zero_extend:SI (mem:QI (plus:SI + (match_operand:SI 1 "register_operand" "=r") + (match_operand:SI 2 "int5_operand" "L"))))) + (set (match_dup 1) (plus:SI (match_dup 1) (match_dup 2)))] + "" + "ldbs,mb %2(0,%1),%0" + [(set_attr "type" "load") + (set_attr "length" "4")]) + +(define_insn "" + [(set (match_operand:HI 0 "register_operand" "=r") + (zero_extend:HI (mem:QI (plus:SI + (match_operand:SI 1 "register_operand" "=r") + (match_operand:SI 2 "int5_operand" "L"))))) + (set (match_dup 1) (plus:SI (match_dup 1) (match_dup 2)))] + "" + "ldbs,mb %2(0,%1),%0" + [(set_attr "type" "load") + (set_attr "length" "4")]) + (define_insn "" [(set (mem:QI (plus:SI (match_operand:SI 0 "register_operand" "=r") (match_operand:SI 1 "int5_operand" "L"))) @@ -2227,6 +2481,25 @@ (define_insn "" [(set (match_operand:DF 0 "register_operand" "=fx") + (mem:DF (plus:SI (match_operand:SI 1 "basereg_operand" "r") + (match_operand:SI 2 "register_operand" "r"))))] + "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT" + "* +{ + /* Reload can create backwards (relative to cse) unscaled index + address modes when eliminating registers and possibly for + pseudos that don't get hard registers. Deal with it. */ + if (operands[2] == hard_frame_pointer_rtx + || operands[2] == stack_pointer_rtx) + return \"flddx %1(0,%2),%0\"; + else + return \"flddx %2(0,%1),%0\"; +}" + [(set_attr "type" "fpload") + (set_attr "length" "4")]) + +(define_insn "" + [(set (match_operand:DF 0 "register_operand" "=fx") (mem:DF (plus:SI (match_operand:SI 1 "register_operand" "r") (match_operand:SI 2 "basereg_operand" "r"))))] "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT" @@ -2284,6 +2557,25 @@ (set_attr "length" "8")]) (define_insn "" + [(set (mem:DF (plus:SI (match_operand:SI 1 "basereg_operand" "r") + (match_operand:SI 2 "register_operand" "r"))) + (match_operand:DF 0 "register_operand" "fx"))] + "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT" + "* +{ + /* Reload can create backwards (relative to cse) unscaled index + address modes when eliminating registers and possibly for + pseudos that don't get hard registers. Deal with it. */ + if (operands[2] == hard_frame_pointer_rtx + || operands[2] == stack_pointer_rtx) + return \"fstdx %0,%1(0,%2)\"; + else + return \"fstdx %0,%2(0,%1)\"; +}" + [(set_attr "type" "fpstore") + (set_attr "length" "4")]) + +(define_insn "" [(set (mem:DF (plus:SI (match_operand:SI 1 "register_operand" "r") (match_operand:SI 2 "basereg_operand" "r"))) (match_operand:DF 0 "register_operand" "fx"))] @@ -2570,6 +2862,25 @@ (define_insn "" [(set (match_operand:SF 0 "register_operand" "=fx") + (mem:SF (plus:SI (match_operand:SI 1 "basereg_operand" "r") + (match_operand:SI 2 "register_operand" "r"))))] + "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT" + "* +{ + /* Reload can create backwards (relative to cse) unscaled index + address modes when eliminating registers and possibly for + pseudos that don't get hard registers. Deal with it. */ + if (operands[2] == hard_frame_pointer_rtx + || operands[2] == stack_pointer_rtx) + return \"fldwx %1(0,%2),%0\"; + else + return \"fldwx %2(0,%1),%0\"; +}" + [(set_attr "type" "fpload") + (set_attr "length" "4")]) + +(define_insn "" + [(set (match_operand:SF 0 "register_operand" "=fx") (mem:SF (plus:SI (match_operand:SI 1 "register_operand" "r") (match_operand:SI 2 "basereg_operand" "r"))))] "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT" @@ -2627,6 +2938,25 @@ (set_attr "length" "8")]) (define_insn "" + [(set (mem:SF (plus:SI (match_operand:SI 1 "basereg_operand" "r") + (match_operand:SI 2 "register_operand" "r"))) + (match_operand:SF 0 "register_operand" "fx"))] + "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT" + "* +{ + /* Reload can create backwards (relative to cse) unscaled index + address modes when eliminating registers and possibly for + pseudos that don't get hard registers. Deal with it. */ + if (operands[2] == hard_frame_pointer_rtx + || operands[2] == stack_pointer_rtx) + return \"fstwx %0,%1(0,%2)\"; + else + return \"fstwx %0,%2(0,%1)\"; +}" + [(set_attr "type" "fpstore") + (set_attr "length" "4")]) + +(define_insn "" [(set (mem:SF (plus:SI (match_operand:SI 1 "register_operand" "r") (match_operand:SI 2 "basereg_operand" "r"))) (match_operand:SF 0 "register_operand" "fx"))]