expmed.c (store_bit_field_1): Be prepared for movstrict expander to fail.
authorJan Hubicka <jh@suse.cz>
Thu, 28 Aug 2008 20:12:57 +0000 (22:12 +0200)
committerJan Hubicka <hubicka@gcc.gnu.org>
Thu, 28 Aug 2008 20:12:57 +0000 (20:12 +0000)
* expmed.c (store_bit_field_1): Be prepared for movstrict expander to fail.
* predict.c (always_optimize_for_size_p): Rename to ...
(optimize_function_for_size): ... this one; make extern.
(optimize_function_for_speed_p): New.
(optimize_bb_for_size_p, optimize_bb_for_size_p,
optimize_edge_for_size_p,optimize_edge_for_size_p,
optimize_insn_for_size_p, optimize_insn_for_size_p): Update.
* basic-block.h (optimize_function_for_size_p,
optimize_function_for_speed_p): Declare.
* i386.md (optimize_size checks): Replace them by appropriate predicate.
(standard_80387_constant_p, ix86_compute_frame_layout,
ix86_expand_epilogue, ix86_decompose_address,
print_operand, emit_i387_cw_initialization,
inline_memory_move_cost, ix86_pad_returns,
ix86_reorg): Replace optimize_size checks.

From-SVN: r139737

gcc/ChangeLog
gcc/basic-block.h
gcc/config/i386/i386.c
gcc/config/i386/i386.md
gcc/config/i386/sse.md
gcc/expmed.c
gcc/predict.c

index 22f639b..7ab08ac 100644 (file)
@@ -1,3 +1,21 @@
+2008-08-28  Jan Hubicka  <jh@suse.cz>
+
+       * expmed.c (store_bit_field_1): Be prepared for movstrict expander to fail.
+       * predict.c (always_optimize_for_size_p): Rename to ...
+       (optimize_function_for_size): ... this one; make extern.
+       (optimize_function_for_speed_p): New.
+       (optimize_bb_for_size_p, optimize_bb_for_size_p,
+       optimize_edge_for_size_p,optimize_edge_for_size_p,
+       optimize_insn_for_size_p, optimize_insn_for_size_p): Update.
+       * basic-block.h (optimize_function_for_size_p,
+       optimize_function_for_speed_p): Declare.
+       * i386.md (optimize_size checks): Replace them by appropriate predicate.
+       (standard_80387_constant_p, ix86_compute_frame_layout,
+       ix86_expand_epilogue, ix86_decompose_address,
+       print_operand, emit_i387_cw_initialization,
+       inline_memory_move_cost, ix86_pad_returns,
+       ix86_reorg): Replace optimize_size checks.
+
 2008-08-28  Richard Sandiford  <rdsandiford@googlemail.com>
 
        * rtl.h (simplify_subreg_regno): Declare.
index caaf22e..c3cb76b 100644 (file)
@@ -837,6 +837,8 @@ extern bool optimize_edge_for_size_p (edge);
 extern bool optimize_edge_for_speed_p (edge);
 extern bool optimize_insn_for_size_p (void);
 extern bool optimize_insn_for_speed_p (void);
+extern bool optimize_function_for_size_p (struct function *);
+extern bool optimize_function_for_speed_p (struct function *);
 extern bool gimple_predicted_by_p (const_basic_block, enum br_predictor);
 extern bool rtl_predicted_by_p (const_basic_block, enum br_predictor);
 extern void gimple_predict_edge (edge, enum br_predictor, int);
index 4d45c84..9ef95b5 100644 (file)
@@ -6843,7 +6843,7 @@ standard_80387_constant_p (rtx x)
   /* For XFmode constants, try to find a special 80387 instruction when
      optimizing for size or on those CPUs that benefit from them.  */
   if (mode == XFmode
-      && (optimize_size || TARGET_EXT_80387_CONSTANTS))
+      && (optimize_function_for_size_p (cfun) || TARGET_EXT_80387_CONSTANTS))
     {
       int i;
 
@@ -7429,7 +7429,7 @@ ix86_compute_frame_layout (struct ix86_frame *frame)
      Recompute the value as needed.  Do not recompute when amount of registers
      didn't change as reload does multiple calls to the function and does not
      expect the decision to change within single iteration.  */
-  if (!optimize_size
+  if (!optimize_function_for_size_p (cfun)
       && cfun->machine->use_fast_prologue_epilogue_nregs != frame->nregs)
     {
       int count = frame->nregs;
@@ -8176,7 +8176,7 @@ ix86_expand_epilogue (int style)
                                            + frame.nregs * UNITS_PER_WORD),
                                   style);
       /* If not an i386, mov & pop is faster than "leave".  */
-      else if (TARGET_USE_LEAVE || optimize_size
+      else if (TARGET_USE_LEAVE || optimize_function_for_size_p (cfun)
               || !cfun->machine->use_fast_prologue_epilogue)
        emit_insn ((*ix86_gen_leave) ());
       else
@@ -8435,8 +8435,10 @@ ix86_decompose_address (rtx addr, struct ix86_address *out)
     disp = const0_rtx;
 
   /* Special case: on K6, [%esi] makes the instruction vector decoded.
-     Avoid this by transforming to [%esi+0].  */
-  if (TARGET_K6 && !optimize_size
+     Avoid this by transforming to [%esi+0].
+     Reload calls address legitimization without cfun defined, so we need
+     to test cfun for being non-NULL. */
+  if (TARGET_K6 && cfun && optimize_function_for_speed_p (cfun)
       && base_reg && !index_reg && !disp
       && REG_P (base_reg)
       && REGNO_REG_CLASS (REGNO (base_reg)) == SIREG)
@@ -10736,7 +10738,8 @@ print_operand (FILE *file, rtx x, int code)
          {
            rtx x;
 
-           if (!optimize || optimize_size || !TARGET_BRANCH_PREDICTION_HINTS)
+           if (!optimize
+               || optimize_function_for_size_p (cfun) || !TARGET_BRANCH_PREDICTION_HINTS)
              return;
 
            x = find_reg_note (current_output_insn, REG_BR_PROB, 0);
@@ -11503,7 +11506,8 @@ emit_i387_cw_initialization (int mode)
   emit_insn (gen_x86_fnstcw_1 (stored_mode));
   emit_move_insn (reg, copy_rtx (stored_mode));
 
-  if (TARGET_64BIT || TARGET_PARTIAL_REG_STALL || optimize_size)
+  if (TARGET_64BIT || TARGET_PARTIAL_REG_STALL
+      || optimize_function_for_size_p (cfun))
     {
       switch (mode)
        {
@@ -24914,7 +24918,8 @@ inline_memory_move_cost (enum machine_mode mode, enum reg_class regclass,
          {
            if (!in)
              return ix86_cost->int_store[0];
-           if (TARGET_PARTIAL_REG_DEPENDENCY && !optimize_size)
+           if (TARGET_PARTIAL_REG_DEPENDENCY
+               && optimize_function_for_speed_p (cfun))
              cost = ix86_cost->movzbl_load;
            else
              cost = ix86_cost->int_load[0];
@@ -26150,9 +26155,11 @@ ix86_pad_returns (void)
 static void
 ix86_reorg (void)
 {
-  if (TARGET_PAD_RETURNS && optimize && !optimize_size)
+  if (TARGET_PAD_RETURNS && optimize
+      && optimize_function_for_speed_p (cfun))
     ix86_pad_returns ();
-  if (TARGET_FOUR_JUMP_LIMIT && optimize && !optimize_size)
+  if (TARGET_FOUR_JUMP_LIMIT && optimize
+      && optimize_function_for_speed_p (cfun))
     ix86_avoid_jump_misspredicts ();
 }
 
index 0a13751..d952847 100644 (file)
               [(match_operand:X87MODEI12 2 "memory_operand" "m")]))]
          UNSPEC_FNSTSW))]
   "X87_FLOAT_MODE_P (GET_MODE (operands[1]))
-   && (TARGET_USE_<MODE>MODE_FIOP || optimize_size)
+   && (TARGET_USE_<MODE>MODE_FIOP || optimize_function_for_size_p (cfun))
    && (GET_MODE (operands [3]) == GET_MODE (operands[1]))"
   "* return output_fp_compare (insn, operands, 0, 0);"
   [(set_attr "type" "multi")
    (clobber (match_operand:HI 0 "register_operand" "=a"))]
   "X87_FLOAT_MODE_P (GET_MODE (operands[1]))
    && TARGET_SAHF && !TARGET_CMOVE
-   && (TARGET_USE_<MODE>MODE_FIOP || optimize_size)
+   && (TARGET_USE_<MODE>MODE_FIOP || optimize_function_for_size_p (cfun))
    && (GET_MODE (operands [3]) == GET_MODE (operands[1]))"
   "#"
   "&& reload_completed"
     }
 }
   [(set (attr "type")
-     (cond [(ne (symbol_ref "optimize_size") (const_int 0))
+     (cond [(ne (symbol_ref "optimize_function_for_size_p (cfun)") (const_int 0))
              (const_string "imov")
            (and (eq_attr "alternative" "0")
                 (ior (eq (symbol_ref "TARGET_PARTIAL_REG_STALL")
        (match_operand:HI 1 "register_operand" "+r"))
    (set (match_dup 1)
        (match_dup 0))]
-  "!TARGET_PARTIAL_REG_STALL || optimize_size"
+  "!TARGET_PARTIAL_REG_STALL || optimize_function_for_size_p (cfun)"
   "xchg{l}\t%k1, %k0"
   [(set_attr "type" "imov")
    (set_attr "mode" "SI")
 (define_expand "movstricthi"
   [(set (strict_low_part (match_operand:HI 0 "nonimmediate_operand" ""))
        (match_operand:HI 1 "general_operand" ""))]
-  "! TARGET_PARTIAL_REG_STALL || optimize_size"
+  ""
 {
+  if (TARGET_PARTIAL_REG_STALL && optimize_function_for_speed_p (cfun))
+    FAIL;
   /* Don't generate memory->memory moves, go through a register */
   if (MEM_P (operands[0]) && MEM_P (operands[1]))
     operands[1] = force_reg (HImode, operands[1]);
 (define_insn "*movstricthi_1"
   [(set (strict_low_part (match_operand:HI 0 "nonimmediate_operand" "+rm,r"))
        (match_operand:HI 1 "general_operand" "rn,m"))]
-  "(! TARGET_PARTIAL_REG_STALL || optimize_size)
+  "(! TARGET_PARTIAL_REG_STALL || optimize_function_for_size_p (cfun))
    && !(MEM_P (operands[0]) && MEM_P (operands[1]))"
   "mov{w}\t{%1, %0|%0, %1}"
   [(set_attr "type" "imov")
      (cond [(and (eq_attr "alternative" "5")
                 (not (match_operand:QI 1 "aligned_operand" "")))
              (const_string "imovx")
-           (ne (symbol_ref "optimize_size") (const_int 0))
+           (ne (symbol_ref "optimize_function_for_size_p (cfun)") (const_int 0))
              (const_string "imov")
            (and (eq_attr "alternative" "3")
                 (ior (eq (symbol_ref "TARGET_PARTIAL_REG_STALL")
                  (and (eq_attr "alternative" "0,1")
                       (and (ne (symbol_ref "TARGET_PARTIAL_REG_DEPENDENCY")
                                (const_int 0))
-                           (and (eq (symbol_ref "optimize_size")
+                           (and (eq (symbol_ref "optimize_function_for_size_p (cfun)")
                                     (const_int 0))
                                 (eq (symbol_ref "TARGET_PARTIAL_REG_STALL")
                                     (const_int 0))))))
        (match_operand:QI 1 "register_operand" "+r"))
    (set (match_dup 1)
        (match_dup 0))]
-  "!TARGET_PARTIAL_REG_STALL || optimize_size"
+  "!TARGET_PARTIAL_REG_STALL || optimize_function_for_size_p (cfun)"
   "xchg{l}\t%k1, %k0"
   [(set_attr "type" "imov")
    (set_attr "mode" "SI")
 (define_expand "movstrictqi"
   [(set (strict_low_part (match_operand:QI 0 "nonimmediate_operand" ""))
        (match_operand:QI 1 "general_operand" ""))]
-  "! TARGET_PARTIAL_REG_STALL || optimize_size"
+  ""
 {
+  if (TARGET_PARTIAL_REG_STALL && optimize_function_for_speed_p (cfun))
+    FAIL;
   /* Don't generate memory->memory moves, go through a register.  */
   if (MEM_P (operands[0]) && MEM_P (operands[1]))
     operands[1] = force_reg (QImode, operands[1]);
 (define_insn "*movstrictqi_1"
   [(set (strict_low_part (match_operand:QI 0 "nonimmediate_operand" "+qm,q"))
        (match_operand:QI 1 "general_operand" "*qn,m"))]
-  "(! TARGET_PARTIAL_REG_STALL || optimize_size)
+  "(! TARGET_PARTIAL_REG_STALL || optimize_function_for_size_p (cfun))
    && !(MEM_P (operands[0]) && MEM_P (operands[1]))"
   "mov{b}\t{%1, %0|%0, %1}"
   [(set_attr "type" "imov")
    (set_attr "prefix" "maybe_vex")
    (set (attr "mode")
        (cond [(ior (eq (symbol_ref "TARGET_SSE2") (const_int 0))
-                   (ne (symbol_ref "optimize_size") (const_int 0)))
+                   (ne (symbol_ref "optimize_function_for_size_p (cfun)") (const_int 0)))
                 (const_string "V4SF")
               (and (eq_attr "alternative" "2")
                    (ne (symbol_ref "TARGET_SSE_TYPELESS_STORES")
    (set (attr "mode")
         (cond [(eq_attr "alternative" "2,3")
                 (if_then_else
-                  (ne (symbol_ref "optimize_size")
+                  (ne (symbol_ref "optimize_function_for_size_p (cfun)")
                       (const_int 0))
                   (const_string "V4SF")
                   (const_string "TI"))
                 (if_then_else
                   (ior (ne (symbol_ref "TARGET_SSE_TYPELESS_STORES")
                            (const_int 0))
-                       (ne (symbol_ref "optimize_size")
+                       (ne (symbol_ref "optimize_function_for_size_p (cfun)")
                            (const_int 0)))
                   (const_string "V4SF")
                   (const_string "TI"))]
   "!(MEM_P (operands[0]) && MEM_P (operands[1]))
    && (reload_in_progress || reload_completed
        || (ix86_cmodel == CM_MEDIUM || ix86_cmodel == CM_LARGE)
-       || (!TARGET_SSE_MATH && optimize_size
+       || (!TARGET_SSE_MATH && optimize_function_for_size_p (cfun)
           && standard_80387_constant_p (operands[1]))
        || GET_CODE (operands[1]) != CONST_DOUBLE
        || memory_operand (operands[0], SFmode))"
                                 (const_int 0))
                             (ne (symbol_ref "TARGET_SSE2")
                                 (const_int 0)))
-                       (eq (symbol_ref "optimize_size")
+                       (eq (symbol_ref "optimize_function_for_size_p (cfun)")
                            (const_int 0)))
                   (const_string "TI")
                   (const_string "V4SF"))
        (match_operand:DF 1 "general_operand"
                        "fm,f,G,*roF,*Fr,C   ,Y2*x,mY2*x,Y2*x"))]
   "!(MEM_P (operands[0]) && MEM_P (operands[1]))
-   && ((optimize_size || !TARGET_INTEGER_DFMODE_MOVES) && !TARGET_64BIT)
+   && ((optimize_function_for_size_p (cfun)
+       || !TARGET_INTEGER_DFMODE_MOVES) && !TARGET_64BIT)
    && (reload_in_progress || reload_completed
        || (ix86_cmodel == CM_MEDIUM || ix86_cmodel == CM_LARGE)
-       || (!(TARGET_SSE2 && TARGET_SSE_MATH) && optimize_size
+       || (!(TARGET_SSE2 && TARGET_SSE_MATH)
+           && optimize_function_for_size_p (cfun)
            && !memory_operand (operands[0], DFmode)
           && standard_80387_constant_p (operands[1]))
        || GET_CODE (operands[1]) != CONST_DOUBLE
-       || ((optimize_size
+       || ((optimize_function_for_size_p (cfun)
             || !TARGET_MEMORY_MISMATCH_STALL
            || reload_in_progress || reload_completed)
           && memory_operand (operands[0], DFmode)))"
 
               /* xorps is one byte shorter.  */
               (eq_attr "alternative" "5")
-                (cond [(ne (symbol_ref "optimize_size")
+                (cond [(ne (symbol_ref "optimize_function_for_size_p (cfun)")
                            (const_int 0))
                          (const_string "V4SF")
                        (ne (symbol_ref "TARGET_SSE_LOAD0_BY_PXOR")
                  movaps encodes one byte shorter.  */
               (eq_attr "alternative" "6")
                 (cond
-                  [(ne (symbol_ref "optimize_size")
+                  [(ne (symbol_ref "optimize_function_for_size_p (cfun)")
                        (const_int 0))
                      (const_string "V4SF")
                    (ne (symbol_ref "TARGET_SSE_PARTIAL_REG_DEPENDENCY")
   "TARGET_64BIT && !(MEM_P (operands[0]) && MEM_P (operands[1]))
    && (reload_in_progress || reload_completed
        || (ix86_cmodel == CM_MEDIUM || ix86_cmodel == CM_LARGE)
-       || (!(TARGET_SSE2 && TARGET_SSE_MATH) && optimize_size
+       || (!(TARGET_SSE2 && TARGET_SSE_MATH)
+           && optimize_function_for_size_p (cfun)
           && standard_80387_constant_p (operands[1]))
        || GET_CODE (operands[1]) != CONST_DOUBLE
        || memory_operand (operands[0], DFmode))"
 
               /* xorps is one byte shorter.  */
               (eq_attr "alternative" "5")
-                (cond [(ne (symbol_ref "optimize_size")
+                (cond [(ne (symbol_ref "optimize_function_for_size_p (cfun)")
                            (const_int 0))
                          (const_string "V4SF")
                        (ne (symbol_ref "TARGET_SSE_LOAD0_BY_PXOR")
                  movaps encodes one byte shorter.  */
               (eq_attr "alternative" "6")
                 (cond
-                  [(ne (symbol_ref "optimize_size")
+                  [(ne (symbol_ref "optimize_function_for_size_p (cfun)")
                        (const_int 0))
                      (const_string "V4SF")
                    (ne (symbol_ref "TARGET_SSE_PARTIAL_REG_DEPENDENCY")
        (match_operand:DF 1 "general_operand"
                "fm,f,G,roF,Fr,C   ,Y2*x,m   ,Y2*x"))]
   "!(MEM_P (operands[0]) && MEM_P (operands[1]))
-   && !optimize_size && TARGET_INTEGER_DFMODE_MOVES
+   && optimize_function_for_speed_p (cfun)
+   && TARGET_INTEGER_DFMODE_MOVES
    && (reload_in_progress || reload_completed
        || (ix86_cmodel == CM_MEDIUM || ix86_cmodel == CM_LARGE)
-       || (!(TARGET_SSE2 && TARGET_SSE_MATH) && optimize_size
+       || (!(TARGET_SSE2 && TARGET_SSE_MATH)
+           && optimize_function_for_size_p (cfun)
           && standard_80387_constant_p (operands[1]))
        || GET_CODE (operands[1]) != CONST_DOUBLE
        || memory_operand (operands[0], DFmode))"
 
               /* xorps is one byte shorter.  */
               (eq_attr "alternative" "5")
-                (cond [(ne (symbol_ref "optimize_size")
+                (cond [(ne (symbol_ref "optimize_function_for_size_p (cfun)")
                            (const_int 0))
                          (const_string "V4SF")
                        (ne (symbol_ref "TARGET_SSE_LOAD0_BY_PXOR")
                  movaps encodes one byte shorter.  */
               (eq_attr "alternative" "6")
                 (cond
-                  [(ne (symbol_ref "optimize_size")
+                  [(ne (symbol_ref "optimize_function_for_size_p (cfun)")
                        (const_int 0))
                      (const_string "V4SF")
                    (ne (symbol_ref "TARGET_SSE_PARTIAL_REG_DEPENDENCY")
 (define_insn "*pushxf_nointeger"
   [(set (match_operand:XF 0 "push_operand" "=X,X,X")
        (match_operand:XF 1 "general_no_elim_operand" "f,Fo,*r"))]
-  "optimize_size"
+  "optimize_function_for_size_p (cfun)"
 {
   /* This insn should be already split before reg-stack.  */
   gcc_unreachable ();
 (define_insn "*pushxf_integer"
   [(set (match_operand:XF 0 "push_operand" "=<,<")
        (match_operand:XF 1 "general_no_elim_operand" "f,ro"))]
-  "!optimize_size"
+  "optimize_function_for_speed_p (cfun)"
 {
   /* This insn should be already split before reg-stack.  */
   gcc_unreachable ();
 (define_insn "*movxf_nointeger"
   [(set (match_operand:XF 0 "nonimmediate_operand" "=f,m,f,*r,o")
        (match_operand:XF 1 "general_operand" "fm,f,G,*roF,F*r"))]
-  "optimize_size
+  "optimize_function_for_size_p (cfun)
    && !(MEM_P (operands[0]) && MEM_P (operands[1]))
    && (reload_in_progress || reload_completed
-       || (optimize_size && standard_80387_constant_p (operands[1]))
+       || standard_80387_constant_p (operands[1])
        || GET_CODE (operands[1]) != CONST_DOUBLE
        || memory_operand (operands[0], XFmode))"
 {
 (define_insn "*movxf_integer"
   [(set (match_operand:XF 0 "nonimmediate_operand" "=f,m,f,r,o")
        (match_operand:XF 1 "general_operand" "fm,f,G,roF,Fr"))]
-  "!optimize_size
+  "optimize_function_for_speed_p (cfun)
    && !(MEM_P (operands[0]) && MEM_P (operands[1]))
    && (reload_in_progress || reload_completed
-       || (optimize_size && standard_80387_constant_p (operands[1]))
        || GET_CODE (operands[1]) != CONST_DOUBLE
        || memory_operand (operands[0], XFmode))"
 {
    (set (attr "mode")
         (cond [(eq_attr "alternative" "0,2")
                 (if_then_else
-                  (ne (symbol_ref "optimize_size")
+                  (ne (symbol_ref "optimize_function_for_size_p (cfun)")
                       (const_int 0))
                   (const_string "V4SF")
                   (const_string "TI"))
                 (if_then_else
                   (ior (ne (symbol_ref "TARGET_SSE_TYPELESS_STORES")
                            (const_int 0))
-                       (ne (symbol_ref "optimize_size")
+                       (ne (symbol_ref "optimize_function_for_size_p (cfun)")
                            (const_int 0)))
                   (const_string "V4SF")
                   (const_string "TI"))]
      (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "")))]
   ""
 {
-  if (TARGET_ZERO_EXTEND_WITH_AND && !optimize_size)
+  if (TARGET_ZERO_EXTEND_WITH_AND && optimize_function_for_speed_p (cfun))
     {
       operands[1] = force_reg (HImode, operands[1]);
       emit_insn (gen_zero_extendhisi2_and (operands[0], operands[1]));
   [(set (match_operand:SI 0 "register_operand" "=r")
      (zero_extend:SI (match_operand:HI 1 "register_operand" "0")))
    (clobber (reg:CC FLAGS_REG))]
-  "TARGET_ZERO_EXTEND_WITH_AND && !optimize_size"
+  "TARGET_ZERO_EXTEND_WITH_AND && optimize_function_for_speed_p (cfun)"
   "#"
   [(set_attr "type" "alu1")
    (set_attr "mode" "SI")])
   [(set (match_operand:SI 0 "register_operand" "")
        (zero_extend:SI (match_operand:HI 1 "register_operand" "")))
    (clobber (reg:CC FLAGS_REG))]
-  "reload_completed && TARGET_ZERO_EXTEND_WITH_AND && !optimize_size"
+  "reload_completed && TARGET_ZERO_EXTEND_WITH_AND
+   && optimize_function_for_speed_p (cfun)"
   [(parallel [(set (match_dup 0) (and:SI (match_dup 0) (const_int 65535)))
              (clobber (reg:CC FLAGS_REG))])]
   "")
 (define_insn "*zero_extendhisi2_movzwl"
   [(set (match_operand:SI 0 "register_operand" "=r")
      (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "rm")))]
-  "!TARGET_ZERO_EXTEND_WITH_AND || optimize_size"
+  "!TARGET_ZERO_EXTEND_WITH_AND
+   || optimize_function_for_size_p (cfun)"
   "movz{wl|x}\t{%1, %0|%0, %1}"
   [(set_attr "type" "imovx")
    (set_attr "mode" "SI")])
   [(set (match_operand:HI 0 "register_operand" "=r,?&q")
      (zero_extend:HI (match_operand:QI 1 "nonimmediate_operand" "0,qm")))
    (clobber (reg:CC FLAGS_REG))]
-  "TARGET_ZERO_EXTEND_WITH_AND && !optimize_size"
+  "TARGET_ZERO_EXTEND_WITH_AND && optimize_function_for_speed_p (cfun)"
   "#"
   [(set_attr "type" "alu1")
    (set_attr "mode" "HI")])
   [(set (match_operand:HI 0 "register_operand" "=r,r")
      (zero_extend:HI (match_operand:QI 1 "nonimmediate_operand" "qm,0")))
    (clobber (reg:CC FLAGS_REG))]
-  "!TARGET_ZERO_EXTEND_WITH_AND || optimize_size"
+  "!TARGET_ZERO_EXTEND_WITH_AND || optimize_function_for_size_p (cfun)"
   "#"
   [(set_attr "type" "imovx,alu1")
    (set_attr "mode" "HI")])
 (define_insn "*zero_extendqihi2_movzbl"
   [(set (match_operand:HI 0 "register_operand" "=r")
      (zero_extend:HI (match_operand:QI 1 "nonimmediate_operand" "qm")))]
-  "(!TARGET_ZERO_EXTEND_WITH_AND || optimize_size) && reload_completed"
+  "(!TARGET_ZERO_EXTEND_WITH_AND || optimize_function_for_speed_p (cfun))
+   && reload_completed"
   "movz{bl|x}\t{%1, %k0|%k0, %1}"
   [(set_attr "type" "imovx")
    (set_attr "mode" "SI")])
        (zero_extend:HI (match_operand:QI 1 "nonimmediate_operand" "")))
    (clobber (reg:CC FLAGS_REG))]
   "reload_completed
-   && (!TARGET_ZERO_EXTEND_WITH_AND || optimize_size)
+   && (!TARGET_ZERO_EXTEND_WITH_AND
+       || optimize_function_for_size_p (cfun))
    && (!REG_P (operands[1]) || ANY_QI_REG_P (operands[1]))"
   [(set (match_operand:HI 0 "register_operand" "")
        (zero_extend:HI (match_operand:QI 1 "nonimmediate_operand" "")))])
    (clobber (reg:CC FLAGS_REG))]
   "reload_completed
    && ANY_QI_REG_P (operands[0])
-   && (TARGET_ZERO_EXTEND_WITH_AND && !optimize_size)
+   && (TARGET_ZERO_EXTEND_WITH_AND
+       && optimize_function_for_speed_p (cfun))
    && !reg_overlap_mentioned_p (operands[0], operands[1])"
   [(set (match_dup 0) (const_int 0))
    (set (strict_low_part (match_dup 2)) (match_dup 1))]
   [(set (match_operand:SI 0 "register_operand" "=r,?&q")
      (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "0,qm")))
    (clobber (reg:CC FLAGS_REG))]
-  "TARGET_ZERO_EXTEND_WITH_AND && !optimize_size"
+  "TARGET_ZERO_EXTEND_WITH_AND && optimize_function_for_speed_p (cfun)"
   "#"
   [(set_attr "type" "alu1")
    (set_attr "mode" "SI")])
   [(set (match_operand:SI 0 "register_operand" "=r,r")
      (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "qm,0")))
    (clobber (reg:CC FLAGS_REG))]
-  "!TARGET_ZERO_EXTEND_WITH_AND || optimize_size"
+  "!TARGET_ZERO_EXTEND_WITH_AND || optimize_function_for_size_p (cfun)"
   "#"
   [(set_attr "type" "imovx,alu1")
    (set_attr "mode" "SI")])
 (define_insn "*zero_extendqisi2_movzbw"
   [(set (match_operand:SI 0 "register_operand" "=r")
      (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "qm")))]
-  "(!TARGET_ZERO_EXTEND_WITH_AND || optimize_size) && reload_completed"
+  "(!TARGET_ZERO_EXTEND_WITH_AND || optimize_function_for_size_p (cfun))
+   && reload_completed"
   "movz{bl|x}\t{%1, %0|%0, %1}"
   [(set_attr "type" "imovx")
    (set_attr "mode" "SI")])
        (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "")))
    (clobber (reg:CC FLAGS_REG))]
   "reload_completed
-   && (!TARGET_ZERO_EXTEND_WITH_AND || optimize_size)
+   && (!TARGET_ZERO_EXTEND_WITH_AND || optimize_function_for_size_p (cfun))
    && (!REG_P (operands[1]) || ANY_QI_REG_P (operands[1]))"
   [(set (match_dup 0)
        (zero_extend:SI (match_dup 1)))])
   "reload_completed
    && ANY_QI_REG_P (operands[0])
    && (ANY_QI_REG_P (operands[1]) || MEM_P (operands[1]))
-   && (TARGET_ZERO_EXTEND_WITH_AND && !optimize_size)
+   && (TARGET_ZERO_EXTEND_WITH_AND && optimize_function_for_speed_p (cfun))
    && !reg_overlap_mentioned_p (operands[0], operands[1])"
   [(set (match_dup 0) (const_int 0))
    (set (strict_low_part (match_dup 2)) (match_dup 1))]
   emit_move_insn (operands[3], operands[1]);
 
   /* Generate a cltd if possible and doing so it profitable.  */
-  if ((optimize_size || TARGET_USE_CLTD)
+  if ((optimize_function_for_size_p (cfun) || TARGET_USE_CLTD)
       && true_regnum (operands[1]) == AX_REG
       && true_regnum (operands[2]) == DX_REG)
     {
     emit_move_insn (operands[3], operands[1]);
 
   /* Generate a cltd if possible and doing so it profitable.  */
-  if ((optimize_size || TARGET_USE_CLTD)
+  if ((optimize_function_for_size_p (cfun) || TARGET_USE_CLTD)
       && true_regnum (operands[3]) == AX_REG)
     {
       emit_insn (gen_ashrsi3_31 (operands[4], operands[3], GEN_INT (31)));
   [(set (match_operand:DF 0 "register_operand" "")
         (float_extend:DF
          (match_operand:SF 1 "nonimmediate_operand" "")))]
-  "(TARGET_USE_VECTOR_CONVERTS || TARGET_GENERIC) && !optimize_size
+  "(TARGET_USE_VECTOR_CONVERTS || TARGET_GENERIC)
+   && optimize_insn_for_speed_p ()
    && reload_completed && SSE_REG_P (operands[0])"
    [(set (match_dup 2)
         (float_extend:V2DF
   [(set (match_operand:SF 0 "register_operand" "")
         (float_truncate:SF
          (match_operand:DF 1 "nonimmediate_operand" "")))]
-  "(TARGET_USE_VECTOR_CONVERTS || TARGET_GENERIC) && !optimize_size
+  "(TARGET_USE_VECTOR_CONVERTS || TARGET_GENERIC)
+   && optimize_insn_for_speed_p ()
    && reload_completed && SSE_REG_P (operands[0])"
    [(set (match_dup 2)
         (vec_concat:V4SF
    (use (match_operand:<ssevecmode> 4  "nonimmediate_operand" "m,x"))
    (clobber (match_scratch:<ssevecmode> 1 "=x,&x"))
    (clobber (match_scratch:<ssevecmode> 2 "=x,x"))]
-  "!TARGET_64BIT && TARGET_SSE2 && TARGET_SSE_MATH && !optimize_size"
+  "!TARGET_64BIT && TARGET_SSE2 && TARGET_SSE_MATH
+   && optimize_function_for_speed_p (cfun)"
   "#"
   "&& reload_completed"
   [(const_int 0)]
   [(match_scratch:DF 2 "Y2")
    (set (match_operand:SSEMODEI24 0 "register_operand" "")
        (fix:SSEMODEI24 (match_operand:DF 1 "memory_operand" "")))]
-  "TARGET_AVOID_VECTOR_DECODE && !optimize_size"
+  "TARGET_AVOID_VECTOR_DECODE && optimize_insn_for_speed_p ()"
   [(set (match_dup 2) (match_dup 1))
    (set (match_dup 0) (fix:SSEMODEI24 (match_dup 2)))]
   "")
   [(match_scratch:SF 2 "x")
    (set (match_operand:SSEMODEI24 0 "register_operand" "")
        (fix:SSEMODEI24 (match_operand:SF 1 "memory_operand" "")))]
-  "TARGET_AVOID_VECTOR_DECODE && !optimize_size"
+  "TARGET_AVOID_VECTOR_DECODE && optimize_insn_for_speed_p ()"
   [(set (match_dup 2) (match_dup 1))
    (set (match_dup 0) (fix:SSEMODEI24 (match_dup 2)))]
   "")
     || ((<SSEMODEI24:MODE>mode != DImode || TARGET_64BIT)
        && SSE_FLOAT_MODE_P (<X87MODEF:MODE>mode) && TARGET_SSE_MATH
        && ((<SSEMODEI24:MODE>mode == SImode
-            && TARGET_SSE2 && TARGET_USE_VECTOR_CONVERTS && !optimize_size
+            && TARGET_SSE2 && TARGET_USE_VECTOR_CONVERTS
+            && optimize_function_for_speed_p (cfun)
             && flag_trapping_math)
-           || !(TARGET_INTER_UNIT_CONVERSIONS || optimize_size))))
+           || !(TARGET_INTER_UNIT_CONVERSIONS
+                || optimize_function_for_size_p (cfun)))))
    && !(reload_completed || reload_in_progress)"
   "#"
   "&& 1"
      by passing DImode value through XMM registers.  */
   if (<SSEMODEI24:MODE>mode == DImode && !TARGET_64BIT 
       && TARGET_80387 && TARGET_SSE2 && TARGET_INTER_UNIT_MOVES 
-      && !optimize_size)
+      && optimize_function_for_speed_p (cfun))
     {
       emit_insn (gen_floatdi<X87MODEF:mode>2_i387_with_xmm (operands[0],
                                                            operands[1],
          (match_operand:SI 1 "nonimmediate_operand" "m,?r,r,m,!x")))
    (clobber (match_operand:SI 2 "memory_operand" "=X,m,m,X,m"))]
   "TARGET_SSE2 && TARGET_MIX_SSE_I387
-   && TARGET_USE_VECTOR_CONVERTS && !optimize_size"
+   && TARGET_USE_VECTOR_CONVERTS && optimize_function_for_speed_p (cfun)"
   "#"
   [(set_attr "type" "fmov,multi,sseicvt,sseicvt,sseicvt")
    (set_attr "mode" "<MODE>,<MODE>,<MODE>,<MODE>,<ssevecmode>")
   [(set (match_operand:MODEF 0 "register_operand" "=f,x")
        (float:MODEF (match_operand:SI 1 "memory_operand" "m,m")))]
   "TARGET_SSE2 && TARGET_MIX_SSE_I387
-   && TARGET_USE_VECTOR_CONVERTS && !optimize_size"
+   && TARGET_USE_VECTOR_CONVERTS && optimize_function_for_speed_p (cfun)"
   "@
    fild%z1\t%1
    #"
    (clobber (match_operand:SSEMODEI24 2 "memory_operand" ""))]
   "(<SSEMODEI24:MODE>mode != DImode || TARGET_64BIT)
    && SSE_FLOAT_MODE_P (<MODEF:MODE>mode) && TARGET_MIX_SSE_I387
-   && !(TARGET_INTER_UNIT_CONVERSIONS || optimize_size)
+   && !(TARGET_INTER_UNIT_CONVERSIONS || optimize_function_for_size_p (cfun))
    && reload_completed
    && (SSE_REG_P (operands[0])
        || (GET_CODE (operands[0]) == SUBREG
          (match_operand:SSEMODEI24 1 "nonimmediate_operand" "m,r,m")))]
   "(<SSEMODEI24:MODE>mode != DImode || TARGET_64BIT)
    && SSE_FLOAT_MODE_P (<MODEF:MODE>mode) && TARGET_MIX_SSE_I387
-   && (TARGET_INTER_UNIT_CONVERSIONS || optimize_size)"
+   && (TARGET_INTER_UNIT_CONVERSIONS || optimize_function_for_size_p (cfun))"
   "@
    fild%z1\t%1
    %vcvtsi2s<MODEF:ssemodefsuffix><SSEMODEI24:rex64suffix>\t{%1, %d0|%d0, %1}
          (match_operand:SSEMODEI24 1 "memory_operand" "m,m")))]
   "(<SSEMODEI24:MODE>mode != DImode || TARGET_64BIT)
    && SSE_FLOAT_MODE_P (<MODEF:MODE>mode) && TARGET_MIX_SSE_I387
-   && !(TARGET_INTER_UNIT_CONVERSIONS || optimize_size)"
+   && !(TARGET_INTER_UNIT_CONVERSIONS || optimize_function_for_size_p (cfun))"
   "@
    fild%z1\t%1
    %vcvtsi2s<MODEF:ssemodefsuffix><SSEMODEI24:rex64suffix>\t{%1, %d0|%d0, %1}"
          (match_operand:SI 1 "nonimmediate_operand" "r,m,!x")))
    (clobber (match_operand:SI 2 "memory_operand" "=m,X,m"))]
   "TARGET_SSE2 && TARGET_SSE_MATH
-   && TARGET_USE_VECTOR_CONVERTS && !optimize_size"
+   && TARGET_USE_VECTOR_CONVERTS && optimize_function_for_speed_p (cfun)"
   "#"
   [(set_attr "type" "sseicvt")
    (set_attr "mode" "<MODE>,<MODE>,<ssevecmode>")
   [(set (match_operand:MODEF 0 "register_operand" "=x")
        (float:MODEF (match_operand:SI 1 "memory_operand" "m")))]
   "TARGET_SSE2 && TARGET_SSE_MATH
-   && TARGET_USE_VECTOR_CONVERTS && !optimize_size"
+   && TARGET_USE_VECTOR_CONVERTS && optimize_function_for_speed_p (cfun)"
   "#"
   [(set_attr "type" "sseicvt")
    (set_attr "mode" "<MODE>")
        (float:MODEF (match_operand:SI 1 "register_operand" "")))
    (clobber (match_operand:SI 2 "memory_operand" ""))]
   "TARGET_SSE2 && TARGET_SSE_MATH
-   && TARGET_USE_VECTOR_CONVERTS && !optimize_size
+   && TARGET_USE_VECTOR_CONVERTS && optimize_function_for_speed_p (cfun)
    && reload_completed
    && (SSE_REG_P (operands[0])
        || (GET_CODE (operands[0]) == SUBREG
        (float:MODEF (match_operand:SI 1 "memory_operand" "")))
    (clobber (match_operand:SI 2 "memory_operand" ""))]
   "TARGET_SSE2 && TARGET_SSE_MATH
-   && TARGET_USE_VECTOR_CONVERTS && !optimize_size
+   && TARGET_USE_VECTOR_CONVERTS && optimize_function_for_speed_p (cfun)
    && reload_completed
    && (SSE_REG_P (operands[0])
        || (GET_CODE (operands[0]) == SUBREG
   [(set (match_operand:MODEF 0 "register_operand" "")
        (float:MODEF (match_operand:SI 1 "register_operand" "")))]
   "TARGET_SSE2 && TARGET_SSE_MATH
-   && TARGET_USE_VECTOR_CONVERTS && !optimize_size
+   && TARGET_USE_VECTOR_CONVERTS && optimize_function_for_speed_p (cfun)
    && reload_completed
    && (SSE_REG_P (operands[0])
        || (GET_CODE (operands[0]) == SUBREG
   [(set (match_operand:MODEF 0 "register_operand" "")
        (float:MODEF (match_operand:SI 1 "memory_operand" "")))]
   "TARGET_SSE2 && TARGET_SSE_MATH
-   && TARGET_USE_VECTOR_CONVERTS && !optimize_size
+   && TARGET_USE_VECTOR_CONVERTS && optimize_function_for_speed_p (cfun)
    && reload_completed
    && (SSE_REG_P (operands[0])
        || (GET_CODE (operands[0]) == SUBREG
          (match_operand:SSEMODEI24 1 "nonimmediate_operand" "r,m")))]
   "(<SSEMODEI24:MODE>mode != DImode || TARGET_64BIT)
    && SSE_FLOAT_MODE_P (<MODEF:MODE>mode) && TARGET_SSE_MATH
-   && (TARGET_INTER_UNIT_CONVERSIONS || optimize_size)"
+   && (TARGET_INTER_UNIT_CONVERSIONS || optimize_function_for_size_p (cfun))"
   "%vcvtsi2s<MODEF:ssemodefsuffix><SSEMODEI24:rex64suffix>\t{%1, %d0|%d0, %1}"
   [(set_attr "type" "sseicvt")
    (set_attr "prefix" "maybe_vex")
    (clobber (match_operand:SSEMODEI24 2 "memory_operand" ""))]
   "(<SSEMODEI24:MODE>mode != DImode || TARGET_64BIT)
    && SSE_FLOAT_MODE_P (<MODEF:MODE>mode) && TARGET_SSE_MATH
-   && (TARGET_INTER_UNIT_CONVERSIONS || optimize_size)
+   && (TARGET_INTER_UNIT_CONVERSIONS || optimize_function_for_size_p (cfun))
    && reload_completed
    && (SSE_REG_P (operands[0])
        || (GET_CODE (operands[0]) == SUBREG
          (match_operand:SSEMODEI24 1 "memory_operand" "m")))]
   "(<SSEMODEI24:MODE>mode != DImode || TARGET_64BIT)
    && SSE_FLOAT_MODE_P (<MODEF:MODE>mode) && TARGET_SSE_MATH
-   && !(TARGET_INTER_UNIT_CONVERSIONS || optimize_size)"
+   && !(TARGET_INTER_UNIT_CONVERSIONS || optimize_function_for_size_p (cfun))"
   "%vcvtsi2s<MODEF:ssemodefsuffix><SSEMODEI24:rex64suffix>\t{%1, %d0|%d0, %1}"
   [(set_attr "type" "sseicvt")
    (set_attr "prefix" "maybe_vex")
    (clobber (match_operand:SSEMODEI24 2 "memory_operand" ""))]
   "(<SSEMODEI24:MODE>mode != DImode || TARGET_64BIT)
    && SSE_FLOAT_MODE_P (<MODEF:MODE>mode) && TARGET_SSE_MATH
-   && !(TARGET_INTER_UNIT_CONVERSIONS || optimize_size)
+   && !(TARGET_INTER_UNIT_CONVERSIONS || optimize_function_for_size_p (cfun))
    && reload_completed
    && (SSE_REG_P (operands[0])
        || (GET_CODE (operands[0]) == SUBREG
    (clobber (match_scratch:V4SI 4 "=X,x"))
    (clobber (match_operand:DI 2 "memory_operand" "=X,m"))]
   "TARGET_80387 && TARGET_SSE2 && TARGET_INTER_UNIT_MOVES
-   && !TARGET_64BIT && !optimize_size"
+   && !TARGET_64BIT && optimize_function_for_speed_p (cfun)"
   "#"
   [(set_attr "type" "multi")
    (set_attr "mode" "<X87MODEF:MODE>")
    (clobber (match_scratch:V4SI 4 ""))
    (clobber (match_operand:DI 2 "memory_operand" ""))]
   "TARGET_80387 && TARGET_SSE2 && TARGET_INTER_UNIT_MOVES
-   && !TARGET_64BIT && !optimize_size
+   && !TARGET_64BIT && optimize_function_for_speed_p (cfun)
    && reload_completed
    && FP_REG_P (operands[0])"
   [(set (match_dup 2) (match_dup 3))
    (clobber (match_scratch:V4SI 4 ""))
    (clobber (match_operand:DI 2 "memory_operand" ""))]
   "TARGET_80387 && TARGET_SSE2 && TARGET_INTER_UNIT_MOVES
-   && !TARGET_64BIT && !optimize_size
+   && !TARGET_64BIT && optimize_function_for_speed_p (cfun)
    && reload_completed
    && FP_REG_P (operands[0])"
   [(set (match_dup 0) (float:X87MODEF (match_dup 1)))]
              (match_operand 3 "immediate_operand" "i")))]
   "(GET_MODE (operands[0]) == QImode || GET_MODE (operands[0]) == HImode
     || (TARGET_64BIT && GET_MODE (operands[0]) == SImode))
-   && (!TARGET_PARTIAL_REG_STALL || optimize_size)
+   && (!TARGET_PARTIAL_REG_STALL || optimize_function_for_size_p (cfun))
    && GET_MODE (operands[0]) == GET_MODE (operands[1])
    && GET_MODE (operands[0]) == GET_MODE (operands[2])
    && (GET_MODE (operands[0]) == GET_MODE (operands[3])
              (match_operand 3 "nonmemory_operand" "ri")))]
   "(GET_MODE (operands[0]) == QImode || GET_MODE (operands[0]) == HImode
     || (TARGET_64BIT && GET_MODE (operands[0]) == SImode))
-   && (!TARGET_PARTIAL_REG_STALL || optimize_size)
+   && (!TARGET_PARTIAL_REG_STALL || optimize_function_for_size_p (cfun))
    && GET_MODE (operands[0]) == GET_MODE (operands[1])
    && (GET_MODE (operands[0]) == GET_MODE (operands[3])
        || GET_MODE (operands[3]) == VOIDmode)"
              (match_operand 4 "immediate_operand" "i")))]
   "(GET_MODE (operands[0]) == QImode || GET_MODE (operands[0]) == HImode
     || (TARGET_64BIT && GET_MODE (operands[0]) == SImode))
-   && (!TARGET_PARTIAL_REG_STALL || optimize_size)
+   && (!TARGET_PARTIAL_REG_STALL || optimize_function_for_size_p (cfun))
    && GET_MODE (operands[0]) == GET_MODE (operands[1])
    && GET_MODE (operands[0]) == GET_MODE (operands[3])"
   "#"
        (plus:QI (match_dup 0)
                 (match_operand:QI 1 "general_operand" "qn,qnm")))
    (clobber (reg:CC FLAGS_REG))]
-  "(! TARGET_PARTIAL_REG_STALL || optimize_size)
+  "(! TARGET_PARTIAL_REG_STALL || optimize_function_for_size_p (cfun))
    && !(MEM_P (operands[0]) && MEM_P (operands[1]))"
 {
   switch (get_attr_type (insn))
        (minus:QI (match_dup 0)
                  (match_operand:QI 1 "general_operand" "qn,qm")))
    (clobber (reg:CC FLAGS_REG))]
-  "(! TARGET_PARTIAL_REG_STALL || optimize_size)
+  "(! TARGET_PARTIAL_REG_STALL || optimize_function_for_size_p (cfun))
    && !(MEM_P (operands[0]) && MEM_P (operands[1]))"
   "sub{b}\t{%1, %0|%0, %1}"
   [(set_attr "type" "alu1")
    (set (match_operand:DI 1 "register_operand" "=&d,&d")
        (mod:DI (match_dup 2) (match_dup 3)))
    (clobber (reg:CC FLAGS_REG))]
-  "TARGET_64BIT && !optimize_size && !TARGET_USE_CLTD"
+  "TARGET_64BIT && optimize_function_for_speed_p (cfun) && !TARGET_USE_CLTD"
   "#"
   [(set_attr "type" "multi")])
 
    (set (match_operand:DI 1 "register_operand" "=&d")
        (mod:DI (match_dup 2) (match_dup 3)))
    (clobber (reg:CC FLAGS_REG))]
-  "TARGET_64BIT && (optimize_size || TARGET_USE_CLTD)"
+  "TARGET_64BIT && (optimize_function_for_size_p (cfun) || TARGET_USE_CLTD)"
   "#"
   [(set_attr "type" "multi")])
 
              (clobber (reg:CC FLAGS_REG))])]
 {
   /* Avoid use of cltd in favor of a mov+shift.  */
-  if (!TARGET_USE_CLTD && !optimize_size)
+  if (!TARGET_USE_CLTD && optimize_function_for_speed_p (cfun))
     {
       if (true_regnum (operands[1]))
         emit_move_insn (operands[0], operands[1]);
    (set (match_operand:SI 1 "register_operand" "=&d,&d")
        (mod:SI (match_dup 2) (match_dup 3)))
    (clobber (reg:CC FLAGS_REG))]
-  "!optimize_size && !TARGET_USE_CLTD"
+  "optimize_function_for_speed_p (cfun) && !TARGET_USE_CLTD"
   "#"
   [(set_attr "type" "multi")])
 
    (set (match_operand:SI 1 "register_operand" "=&d")
        (mod:SI (match_dup 2) (match_dup 3)))
    (clobber (reg:CC FLAGS_REG))]
-  "optimize_size || TARGET_USE_CLTD"
+  "optimize_function_for_size_p (cfun) || TARGET_USE_CLTD"
   "#"
   [(set_attr "type" "multi")])
 
              (clobber (reg:CC FLAGS_REG))])]
 {
   /* Avoid use of cltd in favor of a mov+shift.  */
-  if (!TARGET_USE_CLTD && !optimize_size)
+  if (!TARGET_USE_CLTD && optimize_function_for_speed_p (cfun))
     {
       if (true_regnum (operands[1]))
         emit_move_insn (operands[0], operands[1]);
        (and (match_dup 0)
             (const_int -65536)))
    (clobber (reg:CC FLAGS_REG))]
-  "optimize_size || (TARGET_FAST_PREFIX && !TARGET_PARTIAL_REG_STALL)"
+  "optimize_function_for_size_p (cfun) || (TARGET_FAST_PREFIX && !TARGET_PARTIAL_REG_STALL)"
   [(set (strict_low_part (match_dup 1)) (const_int 0))]
   "operands[1] = gen_lowpart (HImode, operands[0]);")
 
        (and (match_dup 0)
             (const_int -256)))
    (clobber (reg:CC FLAGS_REG))]
-  "(optimize_size || !TARGET_PARTIAL_REG_STALL) && reload_completed"
+  "(optimize_function_for_size_p (cfun) || !TARGET_PARTIAL_REG_STALL) && reload_completed"
   [(set (strict_low_part (match_dup 1)) (const_int 0))]
   "operands[1] = gen_lowpart (QImode, operands[0]);")
 
        (and (match_dup 0)
             (const_int -65281)))
    (clobber (reg:CC FLAGS_REG))]
-  "(optimize_size || !TARGET_PARTIAL_REG_STALL) && reload_completed"
+  "(optimize_function_for_size_p (cfun) || !TARGET_PARTIAL_REG_STALL) && reload_completed"
   [(parallel [(set (zero_extract:SI (match_dup 0)
                                    (const_int 8)
                                    (const_int 8))
        (and:QI (match_dup 0)
                (match_operand:QI 1 "general_operand" "qn,qmn")))
    (clobber (reg:CC FLAGS_REG))]
-  "(! TARGET_PARTIAL_REG_STALL || optimize_size)
+  "(! TARGET_PARTIAL_REG_STALL || optimize_function_for_size_p (cfun))
    && !(MEM_P (operands[0]) && MEM_P (operands[1]))"
   "and{b}\t{%1, %0|%0, %1}"
   [(set_attr "type" "alu1")
                 (const_int 0)))
    (set (strict_low_part (match_dup 0))
        (and:QI (match_dup 0) (match_dup 1)))]
-  "(! TARGET_PARTIAL_REG_STALL || optimize_size)
+  "(! TARGET_PARTIAL_REG_STALL || optimize_function_for_size_p (cfun))
    && ix86_match_ccmode (insn, CCNOmode)
    && !(MEM_P (operands[0]) && MEM_P (operands[1]))"
   "and{b}\t{%1, %0|%0, %1}"
    (clobber (reg:CC FLAGS_REG))]
    "reload_completed
     && QI_REG_P (operands[0])
-    && (!TARGET_PARTIAL_REG_STALL || optimize_size)
+    && (!TARGET_PARTIAL_REG_STALL || optimize_function_for_size_p (cfun))
     && !(~INTVAL (operands[2]) & ~(255 << 8))
     && GET_MODE (operands[0]) != QImode"
   [(parallel [(set (zero_extract:SI (match_dup 0) (const_int 8) (const_int 8))
    (clobber (reg:CC FLAGS_REG))]
    "reload_completed
     && ANY_QI_REG_P (operands[0])
-    && (!TARGET_PARTIAL_REG_STALL || optimize_size)
+    && (!TARGET_PARTIAL_REG_STALL || optimize_function_for_size_p (cfun))
     && !(~INTVAL (operands[2]) & ~255)
     && !(INTVAL (operands[2]) & 128)
     && GET_MODE (operands[0]) != QImode"
        (ior:QI (match_dup 0)
                (match_operand:QI 1 "general_operand" "qmn,qn")))
    (clobber (reg:CC FLAGS_REG))]
-  "(! TARGET_PARTIAL_REG_STALL || optimize_size)
+  "(! TARGET_PARTIAL_REG_STALL || optimize_function_for_size_p (cfun))
    && !(MEM_P (operands[0]) && MEM_P (operands[1]))"
   "or{b}\t{%1, %0|%0, %1}"
   [(set_attr "type" "alu1")
                 (const_int 0)))
    (set (strict_low_part (match_dup 0))
        (ior:QI (match_dup 0) (match_dup 1)))]
-  "(! TARGET_PARTIAL_REG_STALL || optimize_size)
+  "(! TARGET_PARTIAL_REG_STALL || optimize_function_for_size_p (cfun))
    && ix86_match_ccmode (insn, CCNOmode)
    && !(MEM_P (operands[0]) && MEM_P (operands[1]))"
   "or{b}\t{%1, %0|%0, %1}"
   [(set_attr "type" "alu")
    (set_attr "mode" "QI")])
 
-(define_insn "iorqi_ext_0"
+(define_insn "*iorqi_ext_0"
   [(set (zero_extract:SI (match_operand 0 "ext_register_operand" "=Q")
                         (const_int 8)
                         (const_int 8))
            (const_int 8))
          (match_operand 2 "const_int_operand" "n")))
    (clobber (reg:CC FLAGS_REG))]
-  "(!TARGET_PARTIAL_REG_STALL || optimize_size)"
+  "(!TARGET_PARTIAL_REG_STALL || optimize_function_for_size_p (cfun))"
   "or{b}\t{%2, %h0|%h0, %2}"
   [(set_attr "type" "alu")
    (set_attr "length_immediate" "1")
            (match_operand:QI 2 "general_operand" "Qm"))))
    (clobber (reg:CC FLAGS_REG))]
   "!TARGET_64BIT
-   && (!TARGET_PARTIAL_REG_STALL || optimize_size)"
+   && (!TARGET_PARTIAL_REG_STALL || optimize_function_for_size_p (cfun))"
   "or{b}\t{%2, %h0|%h0, %2}"
   [(set_attr "type" "alu")
    (set_attr "length_immediate" "0")
            (match_operand 2 "ext_register_operand" "Q"))))
    (clobber (reg:CC FLAGS_REG))]
   "TARGET_64BIT
-   && (!TARGET_PARTIAL_REG_STALL || optimize_size)"
+   && (!TARGET_PARTIAL_REG_STALL || optimize_function_for_size_p (cfun))"
   "or{b}\t{%2, %h0|%h0, %2}"
   [(set_attr "type" "alu")
    (set_attr "length_immediate" "0")
                           (const_int 8)
                           (const_int 8))))
    (clobber (reg:CC FLAGS_REG))]
-  "(!TARGET_PARTIAL_REG_STALL || optimize_size)"
+  "(!TARGET_PARTIAL_REG_STALL || optimize_function_for_size_p (cfun))"
   "ior{b}\t{%h2, %h0|%h0, %h2}"
   [(set_attr "type" "alu")
    (set_attr "length_immediate" "0")
    (clobber (reg:CC FLAGS_REG))]
    "reload_completed
     && QI_REG_P (operands[0])
-    && (!TARGET_PARTIAL_REG_STALL || optimize_size)
+    && (!TARGET_PARTIAL_REG_STALL || optimize_function_for_size_p (cfun))
     && !(INTVAL (operands[2]) & ~(255 << 8))
     && GET_MODE (operands[0]) != QImode"
   [(parallel [(set (zero_extract:SI (match_dup 0) (const_int 8) (const_int 8))
    (clobber (reg:CC FLAGS_REG))]
    "reload_completed
     && ANY_QI_REG_P (operands[0])
-    && (!TARGET_PARTIAL_REG_STALL || optimize_size)
+    && (!TARGET_PARTIAL_REG_STALL || optimize_function_for_size_p (cfun))
     && !(INTVAL (operands[2]) & ~255)
     && (INTVAL (operands[2]) & 128)
     && GET_MODE (operands[0]) != QImode"
        (xor:QI (match_dup 0)
                (match_operand:QI 1 "general_operand" "qn,qmn")))
    (clobber (reg:CC FLAGS_REG))]
-  "(! TARGET_PARTIAL_REG_STALL || optimize_size)
+  "(! TARGET_PARTIAL_REG_STALL || optimize_function_for_size_p (cfun))
    && !(MEM_P (operands[0]) && MEM_P (operands[1]))"
   "xor{b}\t{%1, %0|%0, %1}"
   [(set_attr "type" "alu1")
    (set_attr "mode" "QI")])
 
-(define_insn "xorqi_ext_0"
+(define_insn "*xorqi_ext_0"
   [(set (zero_extract:SI (match_operand 0 "ext_register_operand" "=Q")
                         (const_int 8)
                         (const_int 8))
            (const_int 8))
          (match_operand 2 "const_int_operand" "n")))
    (clobber (reg:CC FLAGS_REG))]
-  "(!TARGET_PARTIAL_REG_STALL || optimize_size)"
+  "(!TARGET_PARTIAL_REG_STALL || optimize_function_for_size_p (cfun))"
   "xor{b}\t{%2, %h0|%h0, %2}"
   [(set_attr "type" "alu")
    (set_attr "length_immediate" "1")
            (match_operand:QI 2 "general_operand" "Qm"))))
    (clobber (reg:CC FLAGS_REG))]
   "!TARGET_64BIT
-   && (!TARGET_PARTIAL_REG_STALL || optimize_size)"
+   && (!TARGET_PARTIAL_REG_STALL || optimize_function_for_size_p (cfun))"
   "xor{b}\t{%2, %h0|%h0, %2}"
   [(set_attr "type" "alu")
    (set_attr "length_immediate" "0")
            (match_operand 2 "ext_register_operand" "Q"))))
    (clobber (reg:CC FLAGS_REG))]
   "TARGET_64BIT
-   && (!TARGET_PARTIAL_REG_STALL || optimize_size)"
+   && (!TARGET_PARTIAL_REG_STALL || optimize_function_for_size_p (cfun))"
   "xor{b}\t{%2, %h0|%h0, %2}"
   [(set_attr "type" "alu")
    (set_attr "length_immediate" "0")
                           (const_int 8)
                           (const_int 8))))
    (clobber (reg:CC FLAGS_REG))]
-  "(!TARGET_PARTIAL_REG_STALL || optimize_size)"
+  "(!TARGET_PARTIAL_REG_STALL || optimize_function_for_size_p (cfun))"
   "xor{b}\t{%h2, %h0|%h0, %h2}"
   [(set_attr "type" "alu")
    (set_attr "length_immediate" "0")
                 (const_int 0)))
    (set (strict_low_part (match_dup 0))
        (xor:QI (match_dup 0) (match_dup 1)))]
-  "(! TARGET_PARTIAL_REG_STALL || optimize_size)
+  "(! TARGET_PARTIAL_REG_STALL || optimize_function_for_size_p (cfun))
    && ix86_match_ccmode (insn, CCNOmode)
    && !(MEM_P (operands[0]) && MEM_P (operands[1]))"
   "xor{b}\t{%1, %0|%0, %1}"
    (clobber (reg:CC FLAGS_REG))]
    "reload_completed
     && QI_REG_P (operands[0])
-    && (!TARGET_PARTIAL_REG_STALL || optimize_size)
+    && (!TARGET_PARTIAL_REG_STALL || optimize_function_for_size_p (cfun))
     && !(INTVAL (operands[2]) & ~(255 << 8))
     && GET_MODE (operands[0]) != QImode"
   [(parallel [(set (zero_extract:SI (match_dup 0) (const_int 8) (const_int 8))
    (clobber (reg:CC FLAGS_REG))]
    "reload_completed
     && ANY_QI_REG_P (operands[0])
-    && (!TARGET_PARTIAL_REG_STALL || optimize_size)
+    && (!TARGET_PARTIAL_REG_STALL || optimize_function_for_size_p (cfun))
     && !(INTVAL (operands[2]) & ~255)
     && (INTVAL (operands[2]) & 128)
     && GET_MODE (operands[0]) != QImode"
       if (REG_P (operands[2]))
        return "sal{q}\t{%b2, %0|%0, %b2}";
       else if (operands[2] == const1_rtx
-              && (TARGET_SHIFT1 || optimize_size))
+              && (TARGET_SHIFT1 || optimize_function_for_size_p (cfun)))
        return "sal{q}\t%0";
       else
        return "sal{q}\t{%2, %0|%0, %2}";
    (set (match_operand:DI 0 "nonimmediate_operand" "=rm")
        (ashift:DI (match_dup 1) (match_dup 2)))]
   "TARGET_64BIT
-   && (optimize_size
+   && (optimize_function_for_size_p (cfun)
        || !TARGET_PARTIAL_FLAG_REG_STALL
        || (operands[2] == const1_rtx
           && (TARGET_SHIFT1
       if (REG_P (operands[2]))
        return "sal{q}\t{%b2, %0|%0, %b2}";
       else if (operands[2] == const1_rtx
-              && (TARGET_SHIFT1 || optimize_size))
+              && (TARGET_SHIFT1 || optimize_function_for_size_p (cfun)))
        return "sal{q}\t%0";
       else
        return "sal{q}\t{%2, %0|%0, %2}";
          (const_int 0)))
    (clobber (match_scratch:DI 0 "=r"))]
   "TARGET_64BIT
-   && (optimize_size
+   && (optimize_function_for_size_p (cfun)
        || !TARGET_PARTIAL_FLAG_REG_STALL
        || (operands[2] == const1_rtx
           && (TARGET_SHIFT1
       if (REG_P (operands[2]))
        return "sal{q}\t{%b2, %0|%0, %b2}";
       else if (operands[2] == const1_rtx
-              && (TARGET_SHIFT1 || optimize_size))
+              && (TARGET_SHIFT1 || optimize_function_for_size_p (cfun)))
        return "sal{q}\t%0";
       else
        return "sal{q}\t{%2, %0|%0, %2}";
       if (REG_P (operands[2]))
        return "sal{l}\t{%b2, %0|%0, %b2}";
       else if (operands[2] == const1_rtx
-              && (TARGET_SHIFT1 || optimize_size))
+              && (TARGET_SHIFT1 || optimize_function_for_size_p (cfun)))
        return "sal{l}\t%0";
       else
        return "sal{l}\t{%2, %0|%0, %2}";
       if (REG_P (operands[2]))
        return "sal{l}\t{%b2, %k0|%k0, %b2}";
       else if (operands[2] == const1_rtx
-              && (TARGET_SHIFT1 || optimize_size))
+              && (TARGET_SHIFT1 || optimize_function_for_size_p (cfun)))
        return "sal{l}\t%k0";
       else
        return "sal{l}\t{%2, %k0|%k0, %2}";
          (const_int 0)))
    (set (match_operand:SI 0 "nonimmediate_operand" "=rm")
        (ashift:SI (match_dup 1) (match_dup 2)))]
-   "(optimize_size
+   "(optimize_function_for_size_p (cfun)
      || !TARGET_PARTIAL_FLAG_REG_STALL
      || (operands[2] == const1_rtx
         && (TARGET_SHIFT1
       if (REG_P (operands[2]))
        return "sal{l}\t{%b2, %0|%0, %b2}";
       else if (operands[2] == const1_rtx
-              && (TARGET_SHIFT1 || optimize_size))
+              && (TARGET_SHIFT1 || optimize_function_for_size_p (cfun)))
        return "sal{l}\t%0";
       else
        return "sal{l}\t{%2, %0|%0, %2}";
                     (match_operand:QI 2 "const_1_to_31_operand" "I"))
          (const_int 0)))
    (clobber (match_scratch:SI 0 "=r"))]
-  "(optimize_size
+  "(optimize_function_for_size_p (cfun)
     || !TARGET_PARTIAL_FLAG_REG_STALL
     || (operands[2] == const1_rtx
        && (TARGET_SHIFT1
       if (REG_P (operands[2]))
        return "sal{l}\t{%b2, %0|%0, %b2}";
       else if (operands[2] == const1_rtx
-              && (TARGET_SHIFT1 || optimize_size))
+              && (TARGET_SHIFT1 || optimize_function_for_size_p (cfun)))
        return "sal{l}\t%0";
       else
        return "sal{l}\t{%2, %0|%0, %2}";
    (set (match_operand:DI 0 "register_operand" "=r")
        (zero_extend:DI (ashift:SI (match_dup 1) (match_dup 2))))]
   "TARGET_64BIT
-   && (optimize_size
+   && (optimize_function_for_size_p (cfun)
        || !TARGET_PARTIAL_FLAG_REG_STALL
        || (operands[2] == const1_rtx
           && (TARGET_SHIFT1
       if (REG_P (operands[2]))
        return "sal{l}\t{%b2, %k0|%k0, %b2}";
       else if (operands[2] == const1_rtx
-              && (TARGET_SHIFT1 || optimize_size))
+              && (TARGET_SHIFT1 || optimize_function_for_size_p (cfun)))
        return "sal{l}\t%k0";
       else
        return "sal{l}\t{%2, %k0|%k0, %2}";
       if (REG_P (operands[2]))
        return "sal{w}\t{%b2, %0|%0, %b2}";
       else if (operands[2] == const1_rtx
-              && (TARGET_SHIFT1 || optimize_size))
+              && (TARGET_SHIFT1 || optimize_function_for_size_p (cfun)))
        return "sal{w}\t%0";
       else
        return "sal{w}\t{%2, %0|%0, %2}";
       if (REG_P (operands[2]))
        return "sal{w}\t{%b2, %0|%0, %b2}";
       else if (operands[2] == const1_rtx
-              && (TARGET_SHIFT1 || optimize_size))
+              && (TARGET_SHIFT1 || optimize_function_for_size_p (cfun)))
        return "sal{w}\t%0";
       else
        return "sal{w}\t{%2, %0|%0, %2}";
          (const_int 0)))
    (set (match_operand:HI 0 "nonimmediate_operand" "=rm")
        (ashift:HI (match_dup 1) (match_dup 2)))]
-  "(optimize_size
+  "(optimize_function_for_size_p (cfun)
     || !TARGET_PARTIAL_FLAG_REG_STALL
     || (operands[2] == const1_rtx
        && (TARGET_SHIFT1
       if (REG_P (operands[2]))
        return "sal{w}\t{%b2, %0|%0, %b2}";
       else if (operands[2] == const1_rtx
-              && (TARGET_SHIFT1 || optimize_size))
+              && (TARGET_SHIFT1 || optimize_function_for_size_p (cfun)))
        return "sal{w}\t%0";
       else
        return "sal{w}\t{%2, %0|%0, %2}";
                     (match_operand:QI 2 "const_1_to_31_operand" "I"))
          (const_int 0)))
    (clobber (match_scratch:HI 0 "=r"))]
-  "(optimize_size
+  "(optimize_function_for_size_p (cfun)
     || !TARGET_PARTIAL_FLAG_REG_STALL
     || (operands[2] == const1_rtx
        && (TARGET_SHIFT1
       if (REG_P (operands[2]))
        return "sal{w}\t{%b2, %0|%0, %b2}";
       else if (operands[2] == const1_rtx
-              && (TARGET_SHIFT1 || optimize_size))
+              && (TARGET_SHIFT1 || optimize_function_for_size_p (cfun)))
        return "sal{w}\t%0";
       else
        return "sal{w}\t{%2, %0|%0, %2}";
            return "sal{b}\t{%b2, %0|%0, %b2}";
        }
       else if (operands[2] == const1_rtx
-              && (TARGET_SHIFT1 || optimize_size))
+              && (TARGET_SHIFT1 || optimize_function_for_size_p (cfun)))
        {
          if (get_attr_mode (insn) == MODE_SI)
            return "sal{l}\t%0";
            return "sal{b}\t{%b2, %0|%0, %b2}";
        }
       else if (operands[2] == const1_rtx
-              && (TARGET_SHIFT1 || optimize_size))
+              && (TARGET_SHIFT1 || optimize_function_for_size_p (cfun)))
        {
          if (get_attr_mode (insn) == MODE_SI)
            return "sal{l}\t%0";
          (const_int 0)))
    (set (match_operand:QI 0 "nonimmediate_operand" "=qm")
        (ashift:QI (match_dup 1) (match_dup 2)))]
-  "(optimize_size
+  "(optimize_function_for_size_p (cfun)
     || !TARGET_PARTIAL_FLAG_REG_STALL
     || (operands[2] == const1_rtx
        && (TARGET_SHIFT1
       if (REG_P (operands[2]))
        return "sal{b}\t{%b2, %0|%0, %b2}";
       else if (operands[2] == const1_rtx
-              && (TARGET_SHIFT1 || optimize_size))
+              && (TARGET_SHIFT1 || optimize_function_for_size_p (cfun)))
        return "sal{b}\t%0";
       else
        return "sal{b}\t{%2, %0|%0, %2}";
                     (match_operand:QI 2 "const_1_to_31_operand" "I"))
          (const_int 0)))
    (clobber (match_scratch:QI 0 "=q"))]
-  "(optimize_size
+  "(optimize_function_for_size_p (cfun)
     || !TARGET_PARTIAL_FLAG_REG_STALL
     || (operands[2] == const1_rtx
        && (TARGET_SHIFT1
       if (REG_P (operands[2]))
        return "sal{b}\t{%b2, %0|%0, %b2}";
       else if (operands[2] == const1_rtx
-              && (TARGET_SHIFT1 || optimize_size))
+              && (TARGET_SHIFT1 || optimize_function_for_size_p (cfun)))
        return "sal{b}\t%0";
       else
        return "sal{b}\t{%2, %0|%0, %2}";
                     (match_operand:DI 2 "const_int_operand" "i,i")))
    (clobber (reg:CC FLAGS_REG))]
   "TARGET_64BIT && INTVAL (operands[2]) == 63
-   && (TARGET_USE_CLTD || optimize_size)
+   && (TARGET_USE_CLTD || optimize_function_for_size_p (cfun))
    && ix86_binary_operator_ok (ASHIFTRT, DImode, operands)"
   "@
    {cqto|cqo}
                     (match_operand:QI 2 "const1_operand" "")))
    (clobber (reg:CC FLAGS_REG))]
   "TARGET_64BIT
-   && (TARGET_SHIFT1 || optimize_size)
+   && (TARGET_SHIFT1 || optimize_function_for_size_p (cfun))
    && ix86_binary_operator_ok (ASHIFTRT, DImode, operands)"
   "sar{q}\t%0"
   [(set_attr "type" "ishift")
    (set (match_operand:DI 0 "nonimmediate_operand" "=rm")
        (ashiftrt:DI (match_dup 1) (match_dup 2)))]
   "TARGET_64BIT
-   && (TARGET_SHIFT1 || optimize_size)
+   && (TARGET_SHIFT1 || optimize_function_for_size_p (cfun))
    && ix86_match_ccmode (insn, CCGOCmode)
    && ix86_binary_operator_ok (ASHIFTRT, DImode, operands)"
   "sar{q}\t%0"
          (const_int 0)))
    (clobber (match_scratch:DI 0 "=r"))]
   "TARGET_64BIT
-   && (TARGET_SHIFT1 || optimize_size)
+   && (TARGET_SHIFT1 || optimize_function_for_size_p (cfun))
    && ix86_match_ccmode (insn, CCGOCmode)
    && ix86_binary_operator_ok (ASHIFTRT, DImode, operands)"
   "sar{q}\t%0"
    (set (match_operand:DI 0 "nonimmediate_operand" "=rm")
        (ashiftrt:DI (match_dup 1) (match_dup 2)))]
   "TARGET_64BIT
-   && (optimize_size || !TARGET_PARTIAL_FLAG_REG_STALL)
+   && (optimize_function_for_size_p (cfun) || !TARGET_PARTIAL_FLAG_REG_STALL)
    && ix86_match_ccmode (insn, CCGOCmode)
    && ix86_binary_operator_ok (ASHIFTRT, DImode, operands)"
   "sar{q}\t{%2, %0|%0, %2}"
          (const_int 0)))
    (clobber (match_scratch:DI 0 "=r"))]
   "TARGET_64BIT
-   && (optimize_size || !TARGET_PARTIAL_FLAG_REG_STALL)
+   && (optimize_function_for_size_p (cfun) || !TARGET_PARTIAL_FLAG_REG_STALL)
    && ix86_match_ccmode (insn, CCGOCmode)
    && ix86_binary_operator_ok (ASHIFTRT, DImode, operands)"
   "sar{q}\t{%2, %0|%0, %2}"
   DONE;
 })
 
-(define_insn "ashrsi3_31"
+(define_expand "ashrsi3_31"
+  [(parallel [(set (match_operand:SI 0 "nonimmediate_operand" "=*d,rm")
+                  (ashiftrt:SI (match_operand:SI 1 "nonimmediate_operand" "*a,0")
+                               (match_operand:SI 2 "const_int_operand" "i,i")))
+              (clobber (reg:CC FLAGS_REG))])]
+  "")
+
+(define_insn "*ashrsi3_31"
   [(set (match_operand:SI 0 "nonimmediate_operand" "=*d,rm")
        (ashiftrt:SI (match_operand:SI 1 "nonimmediate_operand" "*a,0")
                     (match_operand:SI 2 "const_int_operand" "i,i")))
    (clobber (reg:CC FLAGS_REG))]
-  "INTVAL (operands[2]) == 31 && (TARGET_USE_CLTD || optimize_size)
+  "INTVAL (operands[2]) == 31
+   && (TARGET_USE_CLTD || optimize_function_for_size_p (cfun))
    && ix86_binary_operator_ok (ASHIFTRT, SImode, operands)"
   "@
    {cltd|cdq}
        (zero_extend:DI (ashiftrt:SI (match_operand:SI 1 "register_operand" "*a,0")
                                     (match_operand:SI 2 "const_int_operand" "i,i"))))
    (clobber (reg:CC FLAGS_REG))]
-  "TARGET_64BIT && (TARGET_USE_CLTD || optimize_size)
+  "TARGET_64BIT && (TARGET_USE_CLTD || optimize_function_for_size_p (cfun))
    && INTVAL (operands[2]) == 31
    && ix86_binary_operator_ok (ASHIFTRT, SImode, operands)"
   "@
        (ashiftrt:SI (match_operand:SI 1 "nonimmediate_operand" "0")
                     (match_operand:QI 2 "const1_operand" "")))
    (clobber (reg:CC FLAGS_REG))]
-  "(TARGET_SHIFT1 || optimize_size)
+  "(TARGET_SHIFT1 || optimize_function_for_size_p (cfun))
    && ix86_binary_operator_ok (ASHIFTRT, SImode, operands)"
   "sar{l}\t%0"
   [(set_attr "type" "ishift")
                                     (match_operand:QI 2 "const1_operand" ""))))
    (clobber (reg:CC FLAGS_REG))]
   "TARGET_64BIT
-   && (TARGET_SHIFT1 || optimize_size)
+   && (TARGET_SHIFT1 || optimize_function_for_size_p (cfun))
    && ix86_binary_operator_ok (ASHIFTRT, SImode, operands)"
   "sar{l}\t%k0"
   [(set_attr "type" "ishift")
          (const_int 0)))
    (set (match_operand:SI 0 "nonimmediate_operand" "=rm")
        (ashiftrt:SI (match_dup 1) (match_dup 2)))]
-  "(TARGET_SHIFT1 || optimize_size)
+  "(TARGET_SHIFT1 || optimize_function_for_size_p (cfun))
    && ix86_match_ccmode (insn, CCGOCmode)
    && ix86_binary_operator_ok (ASHIFTRT, SImode, operands)"
   "sar{l}\t%0"
                       (match_operand:QI 2 "const1_operand" ""))
          (const_int 0)))
    (clobber (match_scratch:SI 0 "=r"))]
-  "(TARGET_SHIFT1 || optimize_size)
+  "(TARGET_SHIFT1 || optimize_function_for_size_p (cfun))
    && ix86_match_ccmode (insn, CCGOCmode)
    && ix86_binary_operator_ok (ASHIFTRT, SImode, operands)"
   "sar{l}\t%0"
    (set (match_operand:DI 0 "register_operand" "=r")
        (zero_extend:DI (ashiftrt:SI (match_dup 1) (match_dup 2))))]
   "TARGET_64BIT
-   && (TARGET_SHIFT1 || optimize_size)
+   && (TARGET_SHIFT1 || optimize_function_for_size_p (cfun))
    && ix86_match_ccmode (insn, CCmode)
    && ix86_binary_operator_ok (ASHIFTRT, SImode, operands)"
   "sar{l}\t%k0"
          (const_int 0)))
    (set (match_operand:SI 0 "nonimmediate_operand" "=rm")
        (ashiftrt:SI (match_dup 1) (match_dup 2)))]
-  "(optimize_size || !TARGET_PARTIAL_FLAG_REG_STALL)
+  "(optimize_function_for_size_p (cfun) || !TARGET_PARTIAL_FLAG_REG_STALL)
    && ix86_match_ccmode (insn, CCGOCmode)
    && ix86_binary_operator_ok (ASHIFTRT, SImode, operands)"
   "sar{l}\t{%2, %0|%0, %2}"
                       (match_operand:QI 2 "const_1_to_31_operand" "I"))
          (const_int 0)))
    (clobber (match_scratch:SI 0 "=r"))]
-  "(optimize_size || !TARGET_PARTIAL_FLAG_REG_STALL)
+  "(optimize_function_for_size_p (cfun) || !TARGET_PARTIAL_FLAG_REG_STALL)
    && ix86_match_ccmode (insn, CCGOCmode)
    && ix86_binary_operator_ok (ASHIFTRT, SImode, operands)"
   "sar{l}\t{%2, %0|%0, %2}"
    (set (match_operand:DI 0 "register_operand" "=r")
        (zero_extend:DI (ashiftrt:SI (match_dup 1) (match_dup 2))))]
   "TARGET_64BIT
-   && (optimize_size || !TARGET_PARTIAL_FLAG_REG_STALL)
+   && (optimize_function_for_size_p (cfun) || !TARGET_PARTIAL_FLAG_REG_STALL)
    && ix86_match_ccmode (insn, CCGOCmode)
    && ix86_binary_operator_ok (ASHIFTRT, SImode, operands)"
   "sar{l}\t{%2, %k0|%k0, %2}"
        (ashiftrt:HI (match_operand:HI 1 "nonimmediate_operand" "0")
                     (match_operand:QI 2 "const1_operand" "")))
    (clobber (reg:CC FLAGS_REG))]
-  "(TARGET_SHIFT1 || optimize_size)
+  "(TARGET_SHIFT1 || optimize_function_for_size_p (cfun))
    && ix86_binary_operator_ok (ASHIFTRT, HImode, operands)"
   "sar{w}\t%0"
   [(set_attr "type" "ishift")
          (const_int 0)))
    (set (match_operand:HI 0 "nonimmediate_operand" "=rm")
        (ashiftrt:HI (match_dup 1) (match_dup 2)))]
-  "(TARGET_SHIFT1 || optimize_size)
+  "(TARGET_SHIFT1 || optimize_function_for_size_p (cfun))
    && ix86_match_ccmode (insn, CCGOCmode)
    && ix86_binary_operator_ok (ASHIFTRT, HImode, operands)"
   "sar{w}\t%0"
                       (match_operand:QI 2 "const1_operand" ""))
          (const_int 0)))
    (clobber (match_scratch:HI 0 "=r"))]
-  "(TARGET_SHIFT1 || optimize_size)
+  "(TARGET_SHIFT1 || optimize_function_for_size_p (cfun))
    && ix86_match_ccmode (insn, CCGOCmode)
    && ix86_binary_operator_ok (ASHIFTRT, HImode, operands)"
   "sar{w}\t%0"
          (const_int 0)))
    (set (match_operand:HI 0 "nonimmediate_operand" "=rm")
        (ashiftrt:HI (match_dup 1) (match_dup 2)))]
-  "(optimize_size || !TARGET_PARTIAL_FLAG_REG_STALL)
+  "(optimize_function_for_size_p (cfun) || !TARGET_PARTIAL_FLAG_REG_STALL)
    && ix86_match_ccmode (insn, CCGOCmode)
    && ix86_binary_operator_ok (ASHIFTRT, HImode, operands)"
   "sar{w}\t{%2, %0|%0, %2}"
                       (match_operand:QI 2 "const_1_to_31_operand" "I"))
          (const_int 0)))
    (clobber (match_scratch:HI 0 "=r"))]
-  "(optimize_size || !TARGET_PARTIAL_FLAG_REG_STALL)
+  "(optimize_function_for_size_p (cfun) || !TARGET_PARTIAL_FLAG_REG_STALL)
    && ix86_match_ccmode (insn, CCGOCmode)
    && ix86_binary_operator_ok (ASHIFTRT, HImode, operands)"
   "sar{w}\t{%2, %0|%0, %2}"
        (ashiftrt:QI (match_operand:QI 1 "nonimmediate_operand" "0")
                     (match_operand:QI 2 "const1_operand" "")))
    (clobber (reg:CC FLAGS_REG))]
-  "(TARGET_SHIFT1 || optimize_size)
+  "(TARGET_SHIFT1 || optimize_function_for_size_p (cfun))
    && ix86_binary_operator_ok (ASHIFTRT, QImode, operands)"
   "sar{b}\t%0"
   [(set_attr "type" "ishift")
        (ashiftrt:QI (match_dup 0)
                     (match_operand:QI 1 "const1_operand" "")))
    (clobber (reg:CC FLAGS_REG))]
-  "(! TARGET_PARTIAL_REG_STALL || optimize_size)
-   && (TARGET_SHIFT1 || optimize_size)
+  "(! TARGET_PARTIAL_REG_STALL || optimize_function_for_size_p (cfun))
+   && (TARGET_SHIFT1 || optimize_function_for_size_p (cfun))
    && ix86_binary_operator_ok (ASHIFTRT, QImode, operands)"
   "sar{b}\t%0"
   [(set_attr "type" "ishift1")
        (ashiftrt:QI (match_dup 0)
                     (match_operand:QI 1 "nonmemory_operand" "I,c")))
    (clobber (reg:CC FLAGS_REG))]
-  "(! TARGET_PARTIAL_REG_STALL || optimize_size)
+  "(! TARGET_PARTIAL_REG_STALL || optimize_function_for_size_p (cfun))
    && !(MEM_P (operands[0]) && MEM_P (operands[1]))"
   "@
    sar{b}\t{%1, %0|%0, %1}
          (const_int 0)))
    (set (match_operand:QI 0 "nonimmediate_operand" "=qm")
        (ashiftrt:QI (match_dup 1) (match_dup 2)))]
-  "(TARGET_SHIFT1 || optimize_size)
+  "(TARGET_SHIFT1 || optimize_function_for_size_p (cfun))
    && ix86_match_ccmode (insn, CCGOCmode)
    && ix86_binary_operator_ok (ASHIFTRT, QImode, operands)"
   "sar{b}\t%0"
                       (match_operand:QI 2 "const1_operand" ""))
          (const_int 0)))
    (clobber (match_scratch:QI 0 "=q"))]
-  "(TARGET_SHIFT1 || optimize_size)
+  "(TARGET_SHIFT1 || optimize_function_for_size_p (cfun))
    && ix86_match_ccmode (insn, CCGOCmode)
    && ix86_binary_operator_ok (ASHIFTRT, QImode, operands)"
   "sar{b}\t%0"
          (const_int 0)))
    (set (match_operand:QI 0 "nonimmediate_operand" "=qm")
        (ashiftrt:QI (match_dup 1) (match_dup 2)))]
-  "(optimize_size || !TARGET_PARTIAL_FLAG_REG_STALL)
+  "(optimize_function_for_size_p (cfun) || !TARGET_PARTIAL_FLAG_REG_STALL)
    && ix86_match_ccmode (insn, CCGOCmode)
    && ix86_binary_operator_ok (ASHIFTRT, QImode, operands)"
   "sar{b}\t{%2, %0|%0, %2}"
                       (match_operand:QI 2 "const_1_to_31_operand" "I"))
          (const_int 0)))
    (clobber (match_scratch:QI 0 "=q"))]
-  "(optimize_size || !TARGET_PARTIAL_FLAG_REG_STALL)
+  "(optimize_function_for_size_p (cfun) || !TARGET_PARTIAL_FLAG_REG_STALL)
    && ix86_match_ccmode (insn, CCGOCmode)
    && ix86_binary_operator_ok (ASHIFTRT, QImode, operands)"
   "sar{b}\t{%2, %0|%0, %2}"
                     (match_operand:QI 2 "const1_operand" "")))
    (clobber (reg:CC FLAGS_REG))]
   "TARGET_64BIT
-   && (TARGET_SHIFT1 || optimize_size)
+   && (TARGET_SHIFT1 || optimize_function_for_size_p (cfun))
    && ix86_binary_operator_ok (LSHIFTRT, HImode, operands)"
   "shr{q}\t%0"
   [(set_attr "type" "ishift")
    (set (match_operand:DI 0 "nonimmediate_operand" "=rm")
        (lshiftrt:DI (match_dup 1) (match_dup 2)))]
   "TARGET_64BIT
-   && (TARGET_SHIFT1 || optimize_size)
+   && (TARGET_SHIFT1 || optimize_function_for_size_p (cfun))
    && ix86_match_ccmode (insn, CCGOCmode)
    && ix86_binary_operator_ok (LSHIFTRT, HImode, operands)"
   "shr{q}\t%0"
          (const_int 0)))
    (clobber (match_scratch:DI 0 "=r"))]
   "TARGET_64BIT
-   && (TARGET_SHIFT1 || optimize_size)
+   && (TARGET_SHIFT1 || optimize_function_for_size_p (cfun))
    && ix86_match_ccmode (insn, CCGOCmode)
    && ix86_binary_operator_ok (LSHIFTRT, HImode, operands)"
   "shr{q}\t%0"
    (set (match_operand:DI 0 "nonimmediate_operand" "=rm")
        (lshiftrt:DI (match_dup 1) (match_dup 2)))]
   "TARGET_64BIT
-   && (optimize_size || !TARGET_PARTIAL_FLAG_REG_STALL)
+   && (optimize_function_for_size_p (cfun) || !TARGET_PARTIAL_FLAG_REG_STALL)
    && ix86_match_ccmode (insn, CCGOCmode)
    && ix86_binary_operator_ok (LSHIFTRT, HImode, operands)"
   "shr{q}\t{%2, %0|%0, %2}"
          (const_int 0)))
    (clobber (match_scratch:DI 0 "=r"))]
   "TARGET_64BIT
-   && (optimize_size || !TARGET_PARTIAL_FLAG_REG_STALL)
+   && (optimize_function_for_size_p (cfun) || !TARGET_PARTIAL_FLAG_REG_STALL)
    && ix86_match_ccmode (insn, CCGOCmode)
    && ix86_binary_operator_ok (LSHIFTRT, HImode, operands)"
   "shr{q}\t{%2, %0|%0, %2}"
        (lshiftrt:SI (match_operand:SI 1 "nonimmediate_operand" "0")
                     (match_operand:QI 2 "const1_operand" "")))
    (clobber (reg:CC FLAGS_REG))]
-  "(TARGET_SHIFT1 || optimize_size)
+  "(TARGET_SHIFT1 || optimize_function_for_size_p (cfun))
    && ix86_binary_operator_ok (LSHIFTRT, HImode, operands)"
   "shr{l}\t%0"
   [(set_attr "type" "ishift")
                     (match_operand:QI 2 "const1_operand" "")))
    (clobber (reg:CC FLAGS_REG))]
   "TARGET_64BIT
-   && (TARGET_SHIFT1 || optimize_size)
+   && (TARGET_SHIFT1 || optimize_function_for_size_p (cfun))
    && ix86_binary_operator_ok (LSHIFTRT, HImode, operands)"
   "shr{l}\t%k0"
   [(set_attr "type" "ishift")
          (const_int 0)))
    (set (match_operand:SI 0 "nonimmediate_operand" "=rm")
        (lshiftrt:SI (match_dup 1) (match_dup 2)))]
-  "(TARGET_SHIFT1 || optimize_size)
+  "(TARGET_SHIFT1 || optimize_function_for_size_p (cfun))
    && ix86_match_ccmode (insn, CCGOCmode)
    && ix86_binary_operator_ok (LSHIFTRT, HImode, operands)"
   "shr{l}\t%0"
                       (match_operand:QI 2 "const1_operand" ""))
          (const_int 0)))
    (clobber (match_scratch:SI 0 "=r"))]
-  "(TARGET_SHIFT1 || optimize_size)
+  "(TARGET_SHIFT1 || optimize_function_for_size_p (cfun))
    && ix86_match_ccmode (insn, CCGOCmode)
    && ix86_binary_operator_ok (LSHIFTRT, HImode, operands)"
   "shr{l}\t%0"
    (set (match_operand:DI 0 "register_operand" "=r")
        (lshiftrt:DI (zero_extend:DI (match_dup 1)) (match_dup 2)))]
   "TARGET_64BIT
-   && (TARGET_SHIFT1 || optimize_size)
+   && (TARGET_SHIFT1 || optimize_function_for_size_p (cfun))
    && ix86_match_ccmode (insn, CCGOCmode)
    && ix86_binary_operator_ok (LSHIFTRT, HImode, operands)"
   "shr{l}\t%k0"
          (const_int 0)))
    (set (match_operand:SI 0 "nonimmediate_operand" "=rm")
        (lshiftrt:SI (match_dup 1) (match_dup 2)))]
-  "(optimize_size || !TARGET_PARTIAL_FLAG_REG_STALL)
+  "(optimize_function_for_size_p (cfun) || !TARGET_PARTIAL_FLAG_REG_STALL)
    && ix86_match_ccmode (insn, CCGOCmode)
    && ix86_binary_operator_ok (LSHIFTRT, HImode, operands)"
   "shr{l}\t{%2, %0|%0, %2}"
                     (match_operand:QI 2 "const_1_to_31_operand" "I"))
         (const_int 0)))
    (clobber (match_scratch:SI 0 "=r"))]
-  "(optimize_size || !TARGET_PARTIAL_FLAG_REG_STALL)
+  "(optimize_function_for_size_p (cfun) || !TARGET_PARTIAL_FLAG_REG_STALL)
    && ix86_match_ccmode (insn, CCGOCmode)
    && ix86_binary_operator_ok (LSHIFTRT, HImode, operands)"
   "shr{l}\t{%2, %0|%0, %2}"
    (set (match_operand:DI 0 "register_operand" "=r")
        (lshiftrt:DI (zero_extend:DI (match_dup 1)) (match_dup 2)))]
   "TARGET_64BIT
-   && (optimize_size || !TARGET_PARTIAL_FLAG_REG_STALL)
+   && (optimize_function_for_size_p (cfun) || !TARGET_PARTIAL_FLAG_REG_STALL)
    && ix86_match_ccmode (insn, CCGOCmode)
    && ix86_binary_operator_ok (LSHIFTRT, HImode, operands)"
   "shr{l}\t{%2, %k0|%k0, %2}"
        (lshiftrt:HI (match_operand:HI 1 "nonimmediate_operand" "0")
                     (match_operand:QI 2 "const1_operand" "")))
    (clobber (reg:CC FLAGS_REG))]
-  "(TARGET_SHIFT1 || optimize_size)
+  "(TARGET_SHIFT1 || optimize_function_for_size_p (cfun))
    && ix86_binary_operator_ok (LSHIFTRT, HImode, operands)"
   "shr{w}\t%0"
   [(set_attr "type" "ishift")
          (const_int 0)))
    (set (match_operand:HI 0 "nonimmediate_operand" "=rm")
        (lshiftrt:HI (match_dup 1) (match_dup 2)))]
-  "(TARGET_SHIFT1 || optimize_size)
+  "(TARGET_SHIFT1 || optimize_function_for_size_p (cfun))
    && ix86_match_ccmode (insn, CCGOCmode)
    && ix86_binary_operator_ok (LSHIFTRT, HImode, operands)"
   "shr{w}\t%0"
                       (match_operand:QI 2 "const1_operand" ""))
          (const_int 0)))
    (clobber (match_scratch:HI 0 "=r"))]
-  "(TARGET_SHIFT1 || optimize_size)
+  "(TARGET_SHIFT1 || optimize_function_for_size_p (cfun))
    && ix86_match_ccmode (insn, CCGOCmode)
    && ix86_binary_operator_ok (LSHIFTRT, HImode, operands)"
   "shr{w}\t%0"
          (const_int 0)))
    (set (match_operand:HI 0 "nonimmediate_operand" "=rm")
        (lshiftrt:HI (match_dup 1) (match_dup 2)))]
-  "(optimize_size || !TARGET_PARTIAL_FLAG_REG_STALL)
+  "(optimize_function_for_size_p (cfun) || !TARGET_PARTIAL_FLAG_REG_STALL)
    && ix86_match_ccmode (insn, CCGOCmode)
    && ix86_binary_operator_ok (LSHIFTRT, HImode, operands)"
   "shr{w}\t{%2, %0|%0, %2}"
                       (match_operand:QI 2 "const_1_to_31_operand" "I"))
          (const_int 0)))
    (clobber (match_scratch:HI 0 "=r"))]
-  "(optimize_size || !TARGET_PARTIAL_FLAG_REG_STALL)
+  "(optimize_function_for_size_p (cfun) || !TARGET_PARTIAL_FLAG_REG_STALL)
    && ix86_match_ccmode (insn, CCGOCmode)
    && ix86_binary_operator_ok (LSHIFTRT, HImode, operands)"
   "shr{w}\t{%2, %0|%0, %2}"
        (lshiftrt:QI (match_operand:QI 1 "nonimmediate_operand" "0")
                     (match_operand:QI 2 "const1_operand" "")))
    (clobber (reg:CC FLAGS_REG))]
-  "(TARGET_SHIFT1 || optimize_size)
+  "(TARGET_SHIFT1 || optimize_function_for_size_p (cfun))
    && ix86_binary_operator_ok (LSHIFTRT, QImode, operands)"
   "shr{b}\t%0"
   [(set_attr "type" "ishift")
        (lshiftrt:QI (match_dup 0)
                     (match_operand:QI 1 "const1_operand" "")))
    (clobber (reg:CC FLAGS_REG))]
-  "(! TARGET_PARTIAL_REG_STALL || optimize_size)
-   && (TARGET_SHIFT1 || optimize_size)"
+  "(! TARGET_PARTIAL_REG_STALL || optimize_function_for_size_p (cfun))
+   && (TARGET_SHIFT1 || optimize_function_for_size_p (cfun))"
   "shr{b}\t%0"
   [(set_attr "type" "ishift1")
    (set (attr "length")
        (lshiftrt:QI (match_dup 0)
                     (match_operand:QI 1 "nonmemory_operand" "I,c")))
    (clobber (reg:CC FLAGS_REG))]
-  "(! TARGET_PARTIAL_REG_STALL || optimize_size)
+  "(! TARGET_PARTIAL_REG_STALL || optimize_function_for_size_p (cfun))
    && !(MEM_P (operands[0]) && MEM_P (operands[1]))"
   "@
    shr{b}\t{%1, %0|%0, %1}
          (const_int 0)))
    (set (match_operand:QI 0 "nonimmediate_operand" "=qm")
        (lshiftrt:QI (match_dup 1) (match_dup 2)))]
-  "(TARGET_SHIFT1 || optimize_size)
+  "(TARGET_SHIFT1 || optimize_function_for_size_p (cfun))
    && ix86_match_ccmode (insn, CCGOCmode)
    && ix86_binary_operator_ok (LSHIFTRT, QImode, operands)"
   "shr{b}\t%0"
                       (match_operand:QI 2 "const1_operand" ""))
          (const_int 0)))
    (clobber (match_scratch:QI 0 "=q"))]
-  "(TARGET_SHIFT1 || optimize_size)
+  "(TARGET_SHIFT1 || optimize_function_for_size_p (cfun))
    && ix86_match_ccmode (insn, CCGOCmode)
    && ix86_binary_operator_ok (LSHIFTRT, QImode, operands)"
   "shr{b}\t%0"
          (const_int 0)))
    (set (match_operand:QI 0 "nonimmediate_operand" "=qm")
        (lshiftrt:QI (match_dup 1) (match_dup 2)))]
-  "(optimize_size || !TARGET_PARTIAL_FLAG_REG_STALL)
+  "(optimize_function_for_size_p (cfun) || !TARGET_PARTIAL_FLAG_REG_STALL)
    && ix86_match_ccmode (insn, CCGOCmode)
    && ix86_binary_operator_ok (LSHIFTRT, QImode, operands)"
   "shr{b}\t{%2, %0|%0, %2}"
                       (match_operand:QI 2 "const_1_to_31_operand" "I"))
          (const_int 0)))
    (clobber (match_scratch:QI 0 "=q"))]
-  "(optimize_size || !TARGET_PARTIAL_FLAG_REG_STALL)
+  "(optimize_function_for_size_p (cfun) || !TARGET_PARTIAL_FLAG_REG_STALL)
    && ix86_match_ccmode (insn, CCGOCmode)
    && ix86_binary_operator_ok (LSHIFTRT, QImode, operands)"
   "shr{b}\t{%2, %0|%0, %2}"
                   (match_operand:QI 2 "const1_operand" "")))
    (clobber (reg:CC FLAGS_REG))]
   "TARGET_64BIT
-   && (TARGET_SHIFT1 || optimize_size)
+   && (TARGET_SHIFT1 || optimize_function_for_size_p (cfun))
    && ix86_binary_operator_ok (ROTATE, DImode, operands)"
   "rol{q}\t%0"
   [(set_attr "type" "rotate")
        (rotate:SI (match_operand:SI 1 "nonimmediate_operand" "0")
                   (match_operand:QI 2 "const1_operand" "")))
    (clobber (reg:CC FLAGS_REG))]
-  "(TARGET_SHIFT1 || optimize_size)
+  "(TARGET_SHIFT1 || optimize_function_for_size_p (cfun))
    && ix86_binary_operator_ok (ROTATE, SImode, operands)"
   "rol{l}\t%0"
   [(set_attr "type" "rotate")
                     (match_operand:QI 2 "const1_operand" ""))))
    (clobber (reg:CC FLAGS_REG))]
   "TARGET_64BIT
-   && (TARGET_SHIFT1 || optimize_size)
+   && (TARGET_SHIFT1 || optimize_function_for_size_p (cfun))
    && ix86_binary_operator_ok (ROTATE, SImode, operands)"
   "rol{l}\t%k0"
   [(set_attr "type" "rotate")
        (rotate:HI (match_operand:HI 1 "nonimmediate_operand" "0")
                   (match_operand:QI 2 "const1_operand" "")))
    (clobber (reg:CC FLAGS_REG))]
-  "(TARGET_SHIFT1 || optimize_size)
+  "(TARGET_SHIFT1 || optimize_function_for_size_p (cfun))
    && ix86_binary_operator_ok (ROTATE, HImode, operands)"
   "rol{w}\t%0"
   [(set_attr "type" "rotate")
        (rotate:QI (match_dup 0)
                   (match_operand:QI 1 "const1_operand" "")))
    (clobber (reg:CC FLAGS_REG))]
-  "(! TARGET_PARTIAL_REG_STALL || optimize_size)
-   && (TARGET_SHIFT1 || optimize_size)"
+  "(! TARGET_PARTIAL_REG_STALL || optimize_function_for_size_p (cfun))
+   && (TARGET_SHIFT1 || optimize_function_for_size_p (cfun))"
   "rol{b}\t%0"
   [(set_attr "type" "rotate1")
    (set (attr "length")
        (rotate:QI (match_operand:QI 1 "nonimmediate_operand" "0")
                   (match_operand:QI 2 "const1_operand" "")))
    (clobber (reg:CC FLAGS_REG))]
-  "(TARGET_SHIFT1 || optimize_size)
+  "(TARGET_SHIFT1 || optimize_function_for_size_p (cfun))
    && ix86_binary_operator_ok (ROTATE, QImode, operands)"
   "rol{b}\t%0"
   [(set_attr "type" "rotate")
        (rotate:QI (match_dup 0)
                   (match_operand:QI 1 "nonmemory_operand" "I,c")))
    (clobber (reg:CC FLAGS_REG))]
-  "(! TARGET_PARTIAL_REG_STALL || optimize_size)
+  "(! TARGET_PARTIAL_REG_STALL || optimize_function_for_size_p (cfun))
    && !(MEM_P (operands[0]) && MEM_P (operands[1]))"
   "@
    rol{b}\t{%1, %0|%0, %1}
                     (match_operand:QI 2 "const1_operand" "")))
    (clobber (reg:CC FLAGS_REG))]
   "TARGET_64BIT
-   && (TARGET_SHIFT1 || optimize_size)
+   && (TARGET_SHIFT1 || optimize_function_for_size_p (cfun))
    && ix86_binary_operator_ok (ROTATERT, DImode, operands)"
   "ror{q}\t%0"
   [(set_attr "type" "rotate")
        (rotatert:SI (match_operand:SI 1 "nonimmediate_operand" "0")
                     (match_operand:QI 2 "const1_operand" "")))
    (clobber (reg:CC FLAGS_REG))]
-  "(TARGET_SHIFT1 || optimize_size)
+  "(TARGET_SHIFT1 || optimize_function_for_size_p (cfun))
    && ix86_binary_operator_ok (ROTATERT, SImode, operands)"
   "ror{l}\t%0"
   [(set_attr "type" "rotate")
                       (match_operand:QI 2 "const1_operand" ""))))
    (clobber (reg:CC FLAGS_REG))]
   "TARGET_64BIT
-   && (TARGET_SHIFT1 || optimize_size)
+   && (TARGET_SHIFT1 || optimize_function_for_size_p (cfun))
    && ix86_binary_operator_ok (ROTATERT, SImode, operands)"
   "ror{l}\t%k0"
   [(set_attr "type" "rotate")
        (rotatert:HI (match_operand:HI 1 "nonimmediate_operand" "0")
                     (match_operand:QI 2 "const1_operand" "")))
    (clobber (reg:CC FLAGS_REG))]
-  "(TARGET_SHIFT1 || optimize_size)
+  "(TARGET_SHIFT1 || optimize_function_for_size_p (cfun))
    && ix86_binary_operator_ok (ROTATERT, HImode, operands)"
   "ror{w}\t%0"
   [(set_attr "type" "rotate")
        (rotatert:QI (match_operand:QI 1 "nonimmediate_operand" "0")
                     (match_operand:QI 2 "const1_operand" "")))
    (clobber (reg:CC FLAGS_REG))]
-  "(TARGET_SHIFT1 || optimize_size)
+  "(TARGET_SHIFT1 || optimize_function_for_size_p (cfun))
    && ix86_binary_operator_ok (ROTATERT, QImode, operands)"
   "ror{b}\t%0"
   [(set_attr "type" "rotate")
        (rotatert:QI (match_dup 0)
                     (match_operand:QI 1 "const1_operand" "")))
    (clobber (reg:CC FLAGS_REG))]
-  "(! TARGET_PARTIAL_REG_STALL || optimize_size)
-   && (TARGET_SHIFT1 || optimize_size)"
+  "(! TARGET_PARTIAL_REG_STALL || optimize_function_for_size_p (cfun))
+   && (TARGET_SHIFT1 || optimize_function_for_size_p (cfun))"
   "ror{b}\t%0"
   [(set_attr "type" "rotate1")
    (set (attr "length")
        (rotatert:QI (match_dup 0)
                     (match_operand:QI 1 "nonmemory_operand" "I,c")))
    (clobber (reg:CC FLAGS_REG))]
-  "(! TARGET_PARTIAL_REG_STALL || optimize_size)
+  "(! TARGET_PARTIAL_REG_STALL || optimize_function_for_size_p (cfun))
    && !(MEM_P (operands[0]) && MEM_P (operands[1]))"
   "@
    ror{b}\t{%1, %0|%0, %1}
            (const_int 1)
            (match_operand:DI 1 "nonmemory_operand" "rN"))
          (const_int 0)))]
-  "TARGET_64BIT && (TARGET_USE_BT || optimize_size)"
+  "TARGET_64BIT && (TARGET_USE_BT || optimize_function_for_size_p (cfun))"
   "bt{q}\t{%1, %0|%0, %1}"
   [(set_attr "type" "alu1")])
 
            (const_int 1)
            (match_operand:SI 1 "nonmemory_operand" "rN"))
          (const_int 0)))]
-  "TARGET_USE_BT || optimize_size"
+  "TARGET_USE_BT || optimize_function_for_size_p (cfun)"
   "bt{l}\t{%1, %0|%0, %1}"
   [(set_attr "type" "alu1")])
 \f
                         (const_int 0)])
                      (label_ref (match_operand 3 "" ""))
                      (pc)))]
-  "TARGET_64BIT && (TARGET_USE_BT || optimize_size)"
+  "TARGET_64BIT && (TARGET_USE_BT || optimize_function_for_size_p (cfun))"
   "#"
   "&& 1"
   [(set (reg:CCC FLAGS_REG)
                             (match_operand:SI 3 "const_int_operand" "n")))])
                      (label_ref (match_operand 4 "" ""))
                      (pc)))]
-  "TARGET_64BIT && (TARGET_USE_BT || optimize_size)
+  "TARGET_64BIT && (TARGET_USE_BT || optimize_function_for_size_p (cfun))
    && (INTVAL (operands[3]) & 0x3f) == 0x3f"
   "#"
   "&& 1"
                         (const_int 0)])
                      (label_ref (match_operand 3 "" ""))
                      (pc)))]
-  "TARGET_USE_BT || optimize_size"
+  "TARGET_USE_BT || optimize_function_for_size_p (cfun)"
   "#"
   "&& 1"
   [(set (reg:CCC FLAGS_REG)
                             (match_operand:SI 3 "const_int_operand" "n")))])
                      (label_ref (match_operand 4 "" ""))
                      (pc)))]
-  "(TARGET_USE_BT || optimize_size)
+  "(TARGET_USE_BT || optimize_function_for_size_p (cfun))
    && (INTVAL (operands[3]) & 0x1f) == 0x1f"
   "#"
   "&& 1"
                         (const_int 0)])
                      (label_ref (match_operand 3 "" ""))
                      (pc)))]
-  "TARGET_USE_BT || optimize_size"
+  "TARGET_USE_BT || optimize_function_for_size_p (cfun)"
   "#"
   "&& 1"
   [(set (reg:CCC FLAGS_REG)
             (const_int 0)])
          (label_ref (match_operand 4 "" ""))
          (pc)))]
-  "(TARGET_USE_BT || optimize_size)
+  "(TARGET_USE_BT || optimize_function_for_size_p (cfun))
    && (INTVAL (operands[3]) & 0x1f) == 0x1f"
   "#"
   "&& 1"
    (clobber (reg:CCFP FLAGS_REG))
    (clobber (match_scratch:HI 5 "=a,a"))]
   "X87_FLOAT_MODE_P (GET_MODE (operands[3]))
-   && (TARGET_USE_<MODE>MODE_FIOP || optimize_size)
+   && (TARGET_USE_<MODE>MODE_FIOP || optimize_function_for_size_p (cfun))
    && GET_MODE (operands[1]) == GET_MODE (operands[3])
    && !ix86_use_fcomi_compare (swap_condition (GET_CODE (operands[0])))
    && ix86_fp_compare_mode (swap_condition (GET_CODE (operands[0]))) == CCFPmode
   [(set (strict_low_part (match_operand:HI 0 "register_operand" "+Q,r"))
        (bswap:HI (match_dup 0)))
    (clobber (reg:CC FLAGS_REG))]
-  "TARGET_USE_XCHGB || optimize_size"
+  "TARGET_USE_XCHGB || optimize_function_for_size_p (cfun)"
   "@
     xchg{b}\t{%h0, %b0|%b0, %h0}
     rol{w}\t{$8, %0|%0, 8}"
             (match_operand:X87MODEI12 1 "nonimmediate_operand" "m,?r"))
           (match_operand:MODEF 2 "register_operand" "0,0")]))]
   "TARGET_80387 && !(SSE_FLOAT_MODE_P (<MODEF:MODE>mode) && TARGET_SSE_MATH)
-   && (TARGET_USE_<X87MODEI12:MODE>MODE_FIOP || optimize_size)"
+   && (TARGET_USE_<X87MODEI12:MODE>MODE_FIOP || optimize_function_for_size_p (cfun))"
   "* return which_alternative ? \"#\" : output_387_binary_op (insn, operands);"
   [(set (attr "type")
         (cond [(match_operand:MODEF 3 "mult_operator" "")
           (float:MODEF
             (match_operand:X87MODEI12 2 "nonimmediate_operand" "m,?r"))]))]
   "TARGET_80387 && !(SSE_FLOAT_MODE_P (<MODEF:MODE>mode) && TARGET_SSE_MATH)
-   && (TARGET_USE_<X87MODEI12:MODE>MODE_FIOP || optimize_size)"
+   && (TARGET_USE_<X87MODEI12:MODE>MODE_FIOP || optimize_function_for_size_p (cfun))"
   "* return which_alternative ? \"#\" : output_387_binary_op (insn, operands);"
   [(set (attr "type")
         (cond [(match_operand:MODEF 3 "mult_operator" "")
          [(float:XF
             (match_operand:X87MODEI12 1 "nonimmediate_operand" "m,?r"))
           (match_operand:XF 2 "register_operand" "0,0")]))]
-  "TARGET_80387 && (TARGET_USE_<MODE>MODE_FIOP || optimize_size)"
+  "TARGET_80387 && (TARGET_USE_<MODE>MODE_FIOP || optimize_function_for_size_p (cfun))"
   "* return which_alternative ? \"#\" : output_387_binary_op (insn, operands);"
   [(set (attr "type")
         (cond [(match_operand:XF 3 "mult_operator" "")
          [(match_operand:XF 1 "register_operand" "0,0")
           (float:XF
             (match_operand:X87MODEI12 2 "nonimmediate_operand" "m,?r"))]))]
-  "TARGET_80387 && (TARGET_USE_<MODE>MODE_FIOP || optimize_size)"
+  "TARGET_80387 && (TARGET_USE_<MODE>MODE_FIOP || optimize_function_for_size_p (cfun))"
   "* return which_alternative ? \"#\" : output_387_binary_op (insn, operands);"
   [(set (attr "type")
         (cond [(match_operand:XF 3 "mult_operator" "")
    || (SSE_FLOAT_MODE_P (<MODE>mode) && TARGET_SSE_MATH)"
 {
   if (<MODE>mode == SFmode
-      && TARGET_SSE_MATH && TARGET_RECIP && !optimize_size
+      && TARGET_SSE_MATH && TARGET_RECIP && !optimize_function_for_size_p (cfun)
       && flag_finite_math_only && !flag_trapping_math
       && flag_unsafe_math_optimizations)
     {
    (clobber (reg:CC FLAGS_REG))]
   "! TARGET_PARTIAL_REG_STALL && reload_completed
    && ((GET_MODE (operands[0]) == HImode
-       && ((!optimize_size && !TARGET_FAST_PREFIX)
+       && ((optimize_function_for_speed_p (cfun) && !TARGET_FAST_PREFIX)
             /* ??? next two lines just !satisfies_constraint_K (...) */
            || !CONST_INT_P (operands[2])
            || satisfies_constraint_K (operands[2])))
        || (GET_MODE (operands[0]) == QImode
-          && (TARGET_PROMOTE_QImode || optimize_size)))"
+          && (TARGET_PROMOTE_QImode || optimize_function_for_size_p (cfun))))"
   [(parallel [(set (match_dup 0)
                   (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
              (clobber (reg:CC FLAGS_REG))])]
    (set (match_operand 1 "register_operand" "")
        (and (match_dup 3) (match_dup 4)))]
   "! TARGET_PARTIAL_REG_STALL && reload_completed
-   && ! optimize_size
+   && optimize_insn_for_speed_p ()
    && ((GET_MODE (operands[1]) == HImode && ! TARGET_FAST_PREFIX)
        || (GET_MODE (operands[1]) == QImode && TARGET_PROMOTE_QImode))
    /* Ensure that the operand will remain sign-extended immediate.  */
index 208a530..e35d857 100644 (file)
 }
   [(set_attr "type" "sselog1,ssemov,ssemov")
    (set (attr "mode")
-       (cond [(ior (ior (ne (symbol_ref "optimize_size") (const_int 0))
+       (cond [(ior (ior (ne (symbol_ref "optimize_function_for_size_p (cfun)") (const_int 0))
                         (eq (symbol_ref "TARGET_SSE2") (const_int 0)))
                    (and (eq_attr "alternative" "2")
                         (ne (symbol_ref "TARGET_SSE_TYPELESS_STORES")
                  (match_operand:V4SF 2 "nonimmediate_operand" "")))]
   "TARGET_SSE"
 {
-  if (TARGET_SSE_MATH && TARGET_RECIP && !optimize_size
+  if (TARGET_SSE_MATH && TARGET_RECIP && optimize_insn_for_speed_p ()
       && flag_finite_math_only && !flag_trapping_math
       && flag_unsafe_math_optimizations)
     {
        (sqrt:V4SF (match_operand:V4SF 1 "nonimmediate_operand" "")))]
   "TARGET_SSE"
 {
-  if (TARGET_SSE_MATH && TARGET_RECIP && !optimize_size
+  if (TARGET_SSE_MATH && TARGET_RECIP && optimize_insn_for_speed_p ()
       && flag_finite_math_only && !flag_trapping_math
       && flag_unsafe_math_optimizations)
     {
index b102241..8212992 100644 (file)
@@ -521,6 +521,8 @@ store_bit_field_1 (rtx str_rtx, unsigned HOST_WIDE_INT bitsize,
          != CODE_FOR_nothing))
     {
       int icode = optab_handler (movstrict_optab, fieldmode)->insn_code;
+      rtx insn;
+      rtx start = get_last_insn ();
 
       /* Get appropriate low part of the value being stored.  */
       if (GET_CODE (value) == CONST_INT || REG_P (value))
@@ -544,13 +546,17 @@ store_bit_field_1 (rtx str_rtx, unsigned HOST_WIDE_INT bitsize,
          op0 = SUBREG_REG (op0);
        }
 
-      emit_insn (GEN_FCN (icode)
+      insn = (GEN_FCN (icode)
                 (gen_rtx_SUBREG (fieldmode, op0,
                                  (bitnum % BITS_PER_WORD) / BITS_PER_UNIT
                                  + (offset * UNITS_PER_WORD)),
                                  value));
-
-      return true;
+      if (insn)
+       {
+         emit_insn (insn);
+         return true;
+       }
+      delete_insns_since (start);
     }
 
   /* Handle fields bigger than a word.  */
index e90b95c..69ebe6b 100644 (file)
@@ -182,11 +182,19 @@ probably_never_executed_bb_p (const_basic_block bb)
 
 /* Return true when current function should always be optimized for size.  */
 
-static bool
-always_optimize_for_size_p (void)
+bool
+optimize_function_for_size_p (struct function *fun)
 {
   return (optimize_size
-         || cfun->function_frequency == FUNCTION_FREQUENCY_UNLIKELY_EXECUTED);
+         || fun->function_frequency == FUNCTION_FREQUENCY_UNLIKELY_EXECUTED);
+}
+
+/* Return true when current function should always be optimized for speed.  */
+
+bool
+optimize_function_for_speed_p (struct function *fun)
+{
+  return !optimize_function_for_size_p (fun);
 }
 
 /* Return TRUE when BB should be optimized for size.  */
@@ -194,7 +202,7 @@ always_optimize_for_size_p (void)
 bool
 optimize_bb_for_size_p (basic_block bb)
 {
-  return always_optimize_for_size_p () || !maybe_hot_bb_p (bb);
+  return optimize_function_for_size_p (cfun) || !maybe_hot_bb_p (bb);
 }
 
 /* Return TRUE when BB should be optimized for speed.  */
@@ -210,7 +218,7 @@ optimize_bb_for_speed_p (basic_block bb)
 bool
 optimize_edge_for_size_p (edge e)
 {
-  return always_optimize_for_size_p () || !maybe_hot_edge_p (e);
+  return optimize_function_for_size_p (cfun) || !maybe_hot_edge_p (e);
 }
 
 /* Return TRUE when BB should be optimized for speed.  */
@@ -226,7 +234,7 @@ optimize_edge_for_speed_p (edge e)
 bool
 optimize_insn_for_size_p (void)
 {
-  return always_optimize_for_size_p () || !crtl->maybe_hot_insn_p;
+  return optimize_function_for_size_p (cfun) || !crtl->maybe_hot_insn_p;
 }
 
 /* Return TRUE when BB should be optimized for speed.  */