1 /* Perform simple optimizations to clean up the result of reload.
2 Copyright (C) 1987-2016 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
37 #include "cfgcleanup.h"
40 #include "tree-pass.h"
43 #ifndef LOAD_EXTEND_OP
44 #define LOAD_EXTEND_OP(M) UNKNOWN
47 static int reload_cse_noop_set_p (rtx);
48 static bool reload_cse_simplify (rtx_insn *, rtx);
49 static void reload_cse_regs_1 (void);
50 static int reload_cse_simplify_set (rtx, rtx_insn *);
51 static int reload_cse_simplify_operands (rtx_insn *, rtx);
53 static void reload_combine (void);
54 static void reload_combine_note_use (rtx *, rtx_insn *, int, rtx);
55 static void reload_combine_note_store (rtx, const_rtx, void *);
57 static bool reload_cse_move2add (rtx_insn *);
58 static void move2add_note_store (rtx, const_rtx, void *);
60 /* Call cse / combine like post-reload optimization phases.
61 FIRST is the first instruction. */
64 reload_cse_regs (rtx_insn *first ATTRIBUTE_UNUSED)
69 moves_converted = reload_cse_move2add (first);
70 if (flag_expensive_optimizations)
78 /* See whether a single set SET is a noop. */
80 reload_cse_noop_set_p (rtx set)
82 if (cselib_reg_set_mode (SET_DEST (set)) != GET_MODE (SET_DEST (set)))
85 return rtx_equal_for_cselib_p (SET_DEST (set), SET_SRC (set));
88 /* Try to simplify INSN. Return true if the CFG may have changed. */
90 reload_cse_simplify (rtx_insn *insn, rtx testreg)
92 rtx body = PATTERN (insn);
93 basic_block insn_bb = BLOCK_FOR_INSN (insn);
94 unsigned insn_bb_succs = EDGE_COUNT (insn_bb->succs);
96 /* If NO_FUNCTION_CSE has been set by the target, then we should not try
97 to cse function calls. */
98 if (NO_FUNCTION_CSE && CALL_P (insn))
101 if (GET_CODE (body) == SET)
105 /* Simplify even if we may think it is a no-op.
106 We may think a memory load of a value smaller than WORD_SIZE
107 is redundant because we haven't taken into account possible
108 implicit extension. reload_cse_simplify_set() will bring
109 this out, so it's safer to simplify before we delete. */
110 count += reload_cse_simplify_set (body, insn);
112 if (!count && reload_cse_noop_set_p (body))
114 if (check_for_inc_dec (insn))
115 delete_insn_and_edges (insn);
116 /* We're done with this insn. */
121 apply_change_group ();
123 reload_cse_simplify_operands (insn, testreg);
125 else if (GET_CODE (body) == PARALLEL)
129 rtx value = NULL_RTX;
131 /* Registers mentioned in the clobber list for an asm cannot be reused
132 within the body of the asm. Invalidate those registers now so that
133 we don't try to substitute values for them. */
134 if (asm_noperands (body) >= 0)
136 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
138 rtx part = XVECEXP (body, 0, i);
139 if (GET_CODE (part) == CLOBBER && REG_P (XEXP (part, 0)))
140 cselib_invalidate_rtx (XEXP (part, 0));
144 /* If every action in a PARALLEL is a noop, we can delete
145 the entire PARALLEL. */
146 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
148 rtx part = XVECEXP (body, 0, i);
149 if (GET_CODE (part) == SET)
151 if (! reload_cse_noop_set_p (part))
153 if (REG_P (SET_DEST (part))
154 && REG_FUNCTION_VALUE_P (SET_DEST (part)))
158 value = SET_DEST (part);
161 else if (GET_CODE (part) != CLOBBER)
167 if (check_for_inc_dec (insn))
168 delete_insn_and_edges (insn);
169 /* We're done with this insn. */
173 /* It's not a no-op, but we can try to simplify it. */
174 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
175 if (GET_CODE (XVECEXP (body, 0, i)) == SET)
176 count += reload_cse_simplify_set (XVECEXP (body, 0, i), insn);
179 apply_change_group ();
181 reload_cse_simplify_operands (insn, testreg);
185 return (EDGE_COUNT (insn_bb->succs) != insn_bb_succs);
188 /* Do a very simple CSE pass over the hard registers.
190 This function detects no-op moves where we happened to assign two
191 different pseudo-registers to the same hard register, and then
192 copied one to the other. Reload will generate a useless
193 instruction copying a register to itself.
195 This function also detects cases where we load a value from memory
196 into two different registers, and (if memory is more expensive than
197 registers) changes it to simply copy the first register into the
200 Another optimization is performed that scans the operands of each
201 instruction to see whether the value is already available in a
202 hard register. It then replaces the operand with the hard register
203 if possible, much like an optional reload would. */
206 reload_cse_regs_1 (void)
208 bool cfg_changed = false;
211 rtx testreg = gen_rtx_REG (word_mode, LAST_VIRTUAL_REGISTER + 1);
213 cselib_init (CSELIB_RECORD_MEMORY);
214 init_alias_analysis ();
216 FOR_EACH_BB_FN (bb, cfun)
217 FOR_BB_INSNS (bb, insn)
220 cfg_changed |= reload_cse_simplify (insn, testreg);
222 cselib_process_insn (insn);
226 end_alias_analysis ();
232 /* Try to simplify a single SET instruction. SET is the set pattern.
233 INSN is the instruction it came from.
234 This function only handles one case: if we set a register to a value
235 which is not a register, we try to find that value in some other register
236 and change the set into a register copy. */
239 reload_cse_simplify_set (rtx set, rtx_insn *insn)
247 struct elt_loc_list *l;
248 enum rtx_code extend_op = UNKNOWN;
249 bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn));
251 dreg = true_regnum (SET_DEST (set));
256 if (side_effects_p (src) || true_regnum (src) >= 0)
259 dclass = REGNO_REG_CLASS (dreg);
261 /* When replacing a memory with a register, we need to honor assumptions
262 that combine made wrt the contents of sign bits. We'll do this by
263 generating an extend instruction instead of a reg->reg copy. Thus
264 the destination must be a register that we can widen. */
266 && GET_MODE_BITSIZE (GET_MODE (src)) < BITS_PER_WORD
267 && (extend_op = LOAD_EXTEND_OP (GET_MODE (src))) != UNKNOWN
268 && !REG_P (SET_DEST (set)))
271 val = cselib_lookup (src, GET_MODE (SET_DEST (set)), 0, VOIDmode);
275 /* If memory loads are cheaper than register copies, don't change them. */
277 old_cost = memory_move_cost (GET_MODE (src), dclass, true);
278 else if (REG_P (src))
279 old_cost = register_move_cost (GET_MODE (src),
280 REGNO_REG_CLASS (REGNO (src)), dclass);
282 old_cost = set_src_cost (src, GET_MODE (SET_DEST (set)), speed);
284 for (l = val->locs; l; l = l->next)
286 rtx this_rtx = l->loc;
289 if (CONSTANT_P (this_rtx) && ! references_value_p (this_rtx, 0))
291 if (extend_op != UNKNOWN)
295 if (!CONST_SCALAR_INT_P (this_rtx))
301 result = wide_int::from (std::make_pair (this_rtx,
303 BITS_PER_WORD, UNSIGNED);
306 result = wide_int::from (std::make_pair (this_rtx,
308 BITS_PER_WORD, SIGNED);
313 this_rtx = immed_wide_int_const (result, word_mode);
316 this_cost = set_src_cost (this_rtx, GET_MODE (SET_DEST (set)), speed);
318 else if (REG_P (this_rtx))
320 if (extend_op != UNKNOWN)
322 this_rtx = gen_rtx_fmt_e (extend_op, word_mode, this_rtx);
323 this_cost = set_src_cost (this_rtx, word_mode, speed);
326 this_cost = register_move_cost (GET_MODE (this_rtx),
327 REGNO_REG_CLASS (REGNO (this_rtx)),
333 /* If equal costs, prefer registers over anything else. That
334 tends to lead to smaller instructions on some machines. */
335 if (this_cost < old_cost
336 || (this_cost == old_cost
338 && !REG_P (SET_SRC (set))))
340 if (GET_MODE_BITSIZE (GET_MODE (SET_DEST (set))) < BITS_PER_WORD
341 && extend_op != UNKNOWN
342 #ifdef CANNOT_CHANGE_MODE_CLASS
343 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SET_DEST (set)),
345 REGNO_REG_CLASS (REGNO (SET_DEST (set))))
349 rtx wide_dest = gen_rtx_REG (word_mode, REGNO (SET_DEST (set)));
350 ORIGINAL_REGNO (wide_dest) = ORIGINAL_REGNO (SET_DEST (set));
351 validate_change (insn, &SET_DEST (set), wide_dest, 1);
354 validate_unshare_change (insn, &SET_SRC (set), this_rtx, 1);
355 old_cost = this_cost, did_change = 1;
362 /* Try to replace operands in INSN with equivalent values that are already
363 in registers. This can be viewed as optional reloading.
365 For each non-register operand in the insn, see if any hard regs are
366 known to be equivalent to that operand. Record the alternatives which
367 can accept these hard registers. Among all alternatives, select the
368 ones which are better or equal to the one currently matching, where
369 "better" is in terms of '?' and '!' constraints. Among the remaining
370 alternatives, select the one which replaces most operands with
374 reload_cse_simplify_operands (rtx_insn *insn, rtx testreg)
378 /* For each operand, all registers that are equivalent to it. */
379 HARD_REG_SET equiv_regs[MAX_RECOG_OPERANDS];
381 const char *constraints[MAX_RECOG_OPERANDS];
383 /* Vector recording how bad an alternative is. */
384 int *alternative_reject;
385 /* Vector recording how many registers can be introduced by choosing
387 int *alternative_nregs;
388 /* Array of vectors recording, for each operand and each alternative,
389 which hard register to substitute, or -1 if the operand should be
391 int *op_alt_regno[MAX_RECOG_OPERANDS];
392 /* Array of alternatives, sorted in order of decreasing desirability. */
393 int *alternative_order;
395 extract_constrain_insn (insn);
397 if (recog_data.n_alternatives == 0 || recog_data.n_operands == 0)
400 alternative_reject = XALLOCAVEC (int, recog_data.n_alternatives);
401 alternative_nregs = XALLOCAVEC (int, recog_data.n_alternatives);
402 alternative_order = XALLOCAVEC (int, recog_data.n_alternatives);
403 memset (alternative_reject, 0, recog_data.n_alternatives * sizeof (int));
404 memset (alternative_nregs, 0, recog_data.n_alternatives * sizeof (int));
406 /* For each operand, find out which regs are equivalent. */
407 for (i = 0; i < recog_data.n_operands; i++)
410 struct elt_loc_list *l;
413 CLEAR_HARD_REG_SET (equiv_regs[i]);
415 /* cselib blows up on CODE_LABELs. Trying to fix that doesn't seem
416 right, so avoid the problem here. Likewise if we have a constant
417 and the insn pattern doesn't tell us the mode we need. */
418 if (LABEL_P (recog_data.operand[i])
419 || (CONSTANT_P (recog_data.operand[i])
420 && recog_data.operand_mode[i] == VOIDmode))
423 op = recog_data.operand[i];
425 && GET_MODE_BITSIZE (GET_MODE (op)) < BITS_PER_WORD
426 && LOAD_EXTEND_OP (GET_MODE (op)) != UNKNOWN)
428 rtx set = single_set (insn);
430 /* We might have multiple sets, some of which do implicit
431 extension. Punt on this for now. */
434 /* If the destination is also a MEM or a STRICT_LOW_PART, no
436 Also, if there is an explicit extension, we don't have to
437 worry about an implicit one. */
438 else if (MEM_P (SET_DEST (set))
439 || GET_CODE (SET_DEST (set)) == STRICT_LOW_PART
440 || GET_CODE (SET_SRC (set)) == ZERO_EXTEND
441 || GET_CODE (SET_SRC (set)) == SIGN_EXTEND)
442 ; /* Continue ordinary processing. */
443 #ifdef CANNOT_CHANGE_MODE_CLASS
444 /* If the register cannot change mode to word_mode, it follows that
445 it cannot have been used in word_mode. */
446 else if (REG_P (SET_DEST (set))
447 && CANNOT_CHANGE_MODE_CLASS (GET_MODE (SET_DEST (set)),
449 REGNO_REG_CLASS (REGNO (SET_DEST (set)))))
450 ; /* Continue ordinary processing. */
452 /* If this is a straight load, make the extension explicit. */
453 else if (REG_P (SET_DEST (set))
454 && recog_data.n_operands == 2
455 && SET_SRC (set) == op
456 && SET_DEST (set) == recog_data.operand[1-i])
458 validate_change (insn, recog_data.operand_loc[i],
459 gen_rtx_fmt_e (LOAD_EXTEND_OP (GET_MODE (op)),
462 validate_change (insn, recog_data.operand_loc[1-i],
463 gen_rtx_REG (word_mode, REGNO (SET_DEST (set))),
465 if (! apply_change_group ())
467 return reload_cse_simplify_operands (insn, testreg);
470 /* ??? There might be arithmetic operations with memory that are
471 safe to optimize, but is it worth the trouble? */
475 if (side_effects_p (op))
477 v = cselib_lookup (op, recog_data.operand_mode[i], 0, VOIDmode);
481 for (l = v->locs; l; l = l->next)
483 SET_HARD_REG_BIT (equiv_regs[i], REGNO (l->loc));
486 alternative_mask preferred = get_preferred_alternatives (insn);
487 for (i = 0; i < recog_data.n_operands; i++)
493 op_alt_regno[i] = XALLOCAVEC (int, recog_data.n_alternatives);
494 for (j = 0; j < recog_data.n_alternatives; j++)
495 op_alt_regno[i][j] = -1;
497 p = constraints[i] = recog_data.constraints[i];
498 mode = recog_data.operand_mode[i];
500 /* Add the reject values for each alternative given by the constraints
509 alternative_reject[j] += 3;
511 alternative_reject[j] += 300;
514 /* We won't change operands which are already registers. We
515 also don't want to modify output operands. */
516 regno = true_regnum (recog_data.operand[i]);
518 || constraints[i][0] == '='
519 || constraints[i][0] == '+')
522 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
524 enum reg_class rclass = NO_REGS;
526 if (! TEST_HARD_REG_BIT (equiv_regs[i], regno))
529 set_mode_and_regno (testreg, mode, regno);
531 /* We found a register equal to this operand. Now look for all
532 alternatives that can accept this register and have not been
533 assigned a register they can use yet. */
543 rclass = reg_class_subunion[rclass][GENERAL_REGS];
548 = (reg_class_subunion
550 [reg_class_for_constraint (lookup_constraint (p))]);
554 /* See if REGNO fits this alternative, and set it up as the
555 replacement register if we don't have one for this
556 alternative yet and the operand being replaced is not
557 a cheap CONST_INT. */
558 if (op_alt_regno[i][j] == -1
559 && TEST_BIT (preferred, j)
560 && reg_fits_class_p (testreg, rclass, 0, mode)
561 && (!CONST_INT_P (recog_data.operand[i])
562 || (set_src_cost (recog_data.operand[i], mode,
563 optimize_bb_for_speed_p
564 (BLOCK_FOR_INSN (insn)))
565 > set_src_cost (testreg, mode,
566 optimize_bb_for_speed_p
567 (BLOCK_FOR_INSN (insn))))))
569 alternative_nregs[j]++;
570 op_alt_regno[i][j] = regno;
576 p += CONSTRAINT_LEN (c, p);
584 /* Record all alternatives which are better or equal to the currently
585 matching one in the alternative_order array. */
586 for (i = j = 0; i < recog_data.n_alternatives; i++)
587 if (alternative_reject[i] <= alternative_reject[which_alternative])
588 alternative_order[j++] = i;
589 recog_data.n_alternatives = j;
591 /* Sort it. Given a small number of alternatives, a dumb algorithm
592 won't hurt too much. */
593 for (i = 0; i < recog_data.n_alternatives - 1; i++)
596 int best_reject = alternative_reject[alternative_order[i]];
597 int best_nregs = alternative_nregs[alternative_order[i]];
599 for (j = i + 1; j < recog_data.n_alternatives; j++)
601 int this_reject = alternative_reject[alternative_order[j]];
602 int this_nregs = alternative_nregs[alternative_order[j]];
604 if (this_reject < best_reject
605 || (this_reject == best_reject && this_nregs > best_nregs))
608 best_reject = this_reject;
609 best_nregs = this_nregs;
613 std::swap (alternative_order[best], alternative_order[i]);
616 /* Substitute the operands as determined by op_alt_regno for the best
618 j = alternative_order[0];
620 for (i = 0; i < recog_data.n_operands; i++)
622 machine_mode mode = recog_data.operand_mode[i];
623 if (op_alt_regno[i][j] == -1)
626 validate_change (insn, recog_data.operand_loc[i],
627 gen_rtx_REG (mode, op_alt_regno[i][j]), 1);
630 for (i = recog_data.n_dups - 1; i >= 0; i--)
632 int op = recog_data.dup_num[i];
633 machine_mode mode = recog_data.operand_mode[op];
635 if (op_alt_regno[op][j] == -1)
638 validate_change (insn, recog_data.dup_loc[i],
639 gen_rtx_REG (mode, op_alt_regno[op][j]), 1);
642 return apply_change_group ();
645 /* If reload couldn't use reg+reg+offset addressing, try to use reg+reg
647 This code might also be useful when reload gave up on reg+reg addressing
648 because of clashes between the return register and INDEX_REG_CLASS. */
650 /* The maximum number of uses of a register we can keep track of to
651 replace them with reg+reg addressing. */
652 #define RELOAD_COMBINE_MAX_USES 16
654 /* Describes a recorded use of a register. */
657 /* The insn where a register has been used. */
659 /* Points to the memory reference enclosing the use, if any, NULL_RTX
662 /* Location of the register within INSN. */
664 /* The reverse uid of the insn. */
668 /* If the register is used in some unknown fashion, USE_INDEX is negative.
669 If it is dead, USE_INDEX is RELOAD_COMBINE_MAX_USES, and STORE_RUID
670 indicates where it is first set or clobbered.
671 Otherwise, USE_INDEX is the index of the last encountered use of the
672 register (which is first among these we have seen since we scan backwards).
673 USE_RUID indicates the first encountered, i.e. last, of these uses.
674 If ALL_OFFSETS_MATCH is true, all encountered uses were inside a PLUS
675 with a constant offset; OFFSET contains this constant in that case.
676 STORE_RUID is always meaningful if we only want to use a value in a
677 register in a different place: it denotes the next insn in the insn
678 stream (i.e. the last encountered) that sets or clobbers the register.
679 REAL_STORE_RUID is similar, but clobbers are ignored when updating it. */
682 struct reg_use reg_use[RELOAD_COMBINE_MAX_USES];
688 bool all_offsets_match;
689 } reg_state[FIRST_PSEUDO_REGISTER];
691 /* Reverse linear uid. This is increased in reload_combine while scanning
692 the instructions from last to first. It is used to set last_label_ruid
693 and the store_ruid / use_ruid fields in reg_state. */
694 static int reload_combine_ruid;
696 /* The RUID of the last label we encountered in reload_combine. */
697 static int last_label_ruid;
699 /* The RUID of the last jump we encountered in reload_combine. */
700 static int last_jump_ruid;
702 /* The register numbers of the first and last index register. A value of
703 -1 in LAST_INDEX_REG indicates that we've previously computed these
704 values and found no suitable index registers. */
705 static int first_index_reg = -1;
706 static int last_index_reg;
708 #define LABEL_LIVE(LABEL) \
709 (label_live[CODE_LABEL_NUMBER (LABEL) - min_labelno])
711 /* Subroutine of reload_combine_split_ruids, called to fix up a single
712 ruid pointed to by *PRUID if it is higher than SPLIT_RUID. */
715 reload_combine_split_one_ruid (int *pruid, int split_ruid)
717 if (*pruid > split_ruid)
721 /* Called when we insert a new insn in a position we've already passed in
722 the scan. Examine all our state, increasing all ruids that are higher
723 than SPLIT_RUID by one in order to make room for a new insn. */
726 reload_combine_split_ruids (int split_ruid)
730 reload_combine_split_one_ruid (&reload_combine_ruid, split_ruid);
731 reload_combine_split_one_ruid (&last_label_ruid, split_ruid);
732 reload_combine_split_one_ruid (&last_jump_ruid, split_ruid);
734 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
736 int j, idx = reg_state[i].use_index;
737 reload_combine_split_one_ruid (®_state[i].use_ruid, split_ruid);
738 reload_combine_split_one_ruid (®_state[i].store_ruid, split_ruid);
739 reload_combine_split_one_ruid (®_state[i].real_store_ruid,
743 for (j = idx; j < RELOAD_COMBINE_MAX_USES; j++)
745 reload_combine_split_one_ruid (®_state[i].reg_use[j].ruid,
751 /* Called when we are about to rescan a previously encountered insn with
752 reload_combine_note_use after modifying some part of it. This clears all
753 information about uses in that particular insn. */
756 reload_combine_purge_insn_uses (rtx_insn *insn)
760 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
762 int j, k, idx = reg_state[i].use_index;
765 j = k = RELOAD_COMBINE_MAX_USES;
768 if (reg_state[i].reg_use[j].insn != insn)
772 reg_state[i].reg_use[k] = reg_state[i].reg_use[j];
775 reg_state[i].use_index = k;
779 /* Called when we need to forget about all uses of REGNO after an insn
780 which is identified by RUID. */
783 reload_combine_purge_reg_uses_after_ruid (unsigned regno, int ruid)
785 int j, k, idx = reg_state[regno].use_index;
788 j = k = RELOAD_COMBINE_MAX_USES;
791 if (reg_state[regno].reg_use[j].ruid >= ruid)
795 reg_state[regno].reg_use[k] = reg_state[regno].reg_use[j];
798 reg_state[regno].use_index = k;
801 /* Find the use of REGNO with the ruid that is highest among those
802 lower than RUID_LIMIT, and return it if it is the only use of this
803 reg in the insn. Return NULL otherwise. */
805 static struct reg_use *
806 reload_combine_closest_single_use (unsigned regno, int ruid_limit)
808 int i, best_ruid = 0;
809 int use_idx = reg_state[regno].use_index;
810 struct reg_use *retval;
815 for (i = use_idx; i < RELOAD_COMBINE_MAX_USES; i++)
817 struct reg_use *use = reg_state[regno].reg_use + i;
818 int this_ruid = use->ruid;
819 if (this_ruid >= ruid_limit)
821 if (this_ruid > best_ruid)
823 best_ruid = this_ruid;
826 else if (this_ruid == best_ruid)
829 if (last_label_ruid >= best_ruid)
834 /* After we've moved an add insn, fix up any debug insns that occur
835 between the old location of the add and the new location. REG is
836 the destination register of the add insn; REPLACEMENT is the
837 SET_SRC of the add. FROM and TO specify the range in which we
838 should make this change on debug insns. */
841 fixup_debug_insns (rtx reg, rtx replacement, rtx_insn *from, rtx_insn *to)
844 for (insn = from; insn != to; insn = NEXT_INSN (insn))
848 if (!DEBUG_INSN_P (insn))
851 t = INSN_VAR_LOCATION_LOC (insn);
852 t = simplify_replace_rtx (t, reg, replacement);
853 validate_change (insn, &INSN_VAR_LOCATION_LOC (insn), t, 0);
857 /* Subroutine of reload_combine_recognize_const_pattern. Try to replace REG
858 with SRC in the insn described by USE, taking costs into account. Return
859 true if we made the replacement. */
862 try_replace_in_use (struct reg_use *use, rtx reg, rtx src)
864 rtx_insn *use_insn = use->insn;
865 rtx mem = use->containing_mem;
866 bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (use_insn));
870 addr_space_t as = MEM_ADDR_SPACE (mem);
871 rtx oldaddr = XEXP (mem, 0);
872 rtx newaddr = NULL_RTX;
873 int old_cost = address_cost (oldaddr, GET_MODE (mem), as, speed);
876 newaddr = simplify_replace_rtx (oldaddr, reg, src);
877 if (memory_address_addr_space_p (GET_MODE (mem), newaddr, as))
879 XEXP (mem, 0) = newaddr;
880 new_cost = address_cost (newaddr, GET_MODE (mem), as, speed);
881 XEXP (mem, 0) = oldaddr;
882 if (new_cost <= old_cost
883 && validate_change (use_insn,
884 &XEXP (mem, 0), newaddr, 0))
890 rtx new_set = single_set (use_insn);
892 && REG_P (SET_DEST (new_set))
893 && GET_CODE (SET_SRC (new_set)) == PLUS
894 && REG_P (XEXP (SET_SRC (new_set), 0))
895 && CONSTANT_P (XEXP (SET_SRC (new_set), 1)))
898 machine_mode mode = GET_MODE (SET_DEST (new_set));
899 int old_cost = set_src_cost (SET_SRC (new_set), mode, speed);
901 gcc_assert (rtx_equal_p (XEXP (SET_SRC (new_set), 0), reg));
902 new_src = simplify_replace_rtx (SET_SRC (new_set), reg, src);
904 if (set_src_cost (new_src, mode, speed) <= old_cost
905 && validate_change (use_insn, &SET_SRC (new_set),
913 /* Called by reload_combine when scanning INSN. This function tries to detect
914 patterns where a constant is added to a register, and the result is used
916 Return true if no further processing is needed on INSN; false if it wasn't
917 recognized and should be handled normally. */
920 reload_combine_recognize_const_pattern (rtx_insn *insn)
922 int from_ruid = reload_combine_ruid;
923 rtx set, pat, reg, src, addreg;
927 rtx_insn *add_moved_after_insn = NULL;
928 int add_moved_after_ruid = 0;
929 int clobbered_regno = -1;
931 set = single_set (insn);
935 reg = SET_DEST (set);
938 || REG_NREGS (reg) != 1
939 || GET_MODE (reg) != Pmode
940 || reg == stack_pointer_rtx)
945 /* We look for a REG1 = REG2 + CONSTANT insn, followed by either
946 uses of REG1 inside an address, or inside another add insn. If
947 possible and profitable, merge the addition into subsequent
949 if (GET_CODE (src) != PLUS
950 || !REG_P (XEXP (src, 0))
951 || !CONSTANT_P (XEXP (src, 1)))
954 addreg = XEXP (src, 0);
955 must_move_add = rtx_equal_p (reg, addreg);
957 pat = PATTERN (insn);
958 if (must_move_add && set != pat)
960 /* We have to be careful when moving the add; apart from the
961 single_set there may also be clobbers. Recognize one special
962 case, that of one clobber alongside the set (likely a clobber
963 of the CC register). */
964 gcc_assert (GET_CODE (PATTERN (insn)) == PARALLEL);
965 if (XVECLEN (pat, 0) != 2 || XVECEXP (pat, 0, 0) != set
966 || GET_CODE (XVECEXP (pat, 0, 1)) != CLOBBER
967 || !REG_P (XEXP (XVECEXP (pat, 0, 1), 0)))
969 clobbered_regno = REGNO (XEXP (XVECEXP (pat, 0, 1), 0));
974 use = reload_combine_closest_single_use (regno, from_ruid);
977 /* Start the search for the next use from here. */
978 from_ruid = use->ruid;
980 if (use && GET_MODE (*use->usep) == Pmode)
982 bool delete_add = false;
983 rtx_insn *use_insn = use->insn;
984 int use_ruid = use->ruid;
986 /* Avoid moving the add insn past a jump. */
987 if (must_move_add && use_ruid <= last_jump_ruid)
990 /* If the add clobbers another hard reg in parallel, don't move
991 it past a real set of this hard reg. */
992 if (must_move_add && clobbered_regno >= 0
993 && reg_state[clobbered_regno].real_store_ruid >= use_ruid)
996 /* Do not separate cc0 setter and cc0 user on HAVE_cc0 targets. */
997 if (HAVE_cc0 && must_move_add && sets_cc0_p (PATTERN (use_insn)))
1000 gcc_assert (reg_state[regno].store_ruid <= use_ruid);
1001 /* Avoid moving a use of ADDREG past a point where it is stored. */
1002 if (reg_state[REGNO (addreg)].store_ruid > use_ruid)
1005 /* We also must not move the addition past an insn that sets
1006 the same register, unless we can combine two add insns. */
1007 if (must_move_add && reg_state[regno].store_ruid == use_ruid)
1009 if (use->containing_mem == NULL_RTX)
1015 if (try_replace_in_use (use, reg, src))
1017 reload_combine_purge_insn_uses (use_insn);
1018 reload_combine_note_use (&PATTERN (use_insn), use_insn,
1019 use_ruid, NULL_RTX);
1023 fixup_debug_insns (reg, src, insn, use_insn);
1029 add_moved_after_insn = use_insn;
1030 add_moved_after_ruid = use_ruid;
1035 /* If we get here, we couldn't handle this use. */
1041 if (!must_move_add || add_moved_after_insn == NULL_RTX)
1042 /* Process the add normally. */
1045 fixup_debug_insns (reg, src, insn, add_moved_after_insn);
1047 reorder_insns (insn, insn, add_moved_after_insn);
1048 reload_combine_purge_reg_uses_after_ruid (regno, add_moved_after_ruid);
1049 reload_combine_split_ruids (add_moved_after_ruid - 1);
1050 reload_combine_note_use (&PATTERN (insn), insn,
1051 add_moved_after_ruid, NULL_RTX);
1052 reg_state[regno].store_ruid = add_moved_after_ruid;
1057 /* Called by reload_combine when scanning INSN. Try to detect a pattern we
1058 can handle and improve. Return true if no further processing is needed on
1059 INSN; false if it wasn't recognized and should be handled normally. */
1062 reload_combine_recognize_pattern (rtx_insn *insn)
1066 set = single_set (insn);
1067 if (set == NULL_RTX)
1070 reg = SET_DEST (set);
1071 src = SET_SRC (set);
1072 if (!REG_P (reg) || REG_NREGS (reg) != 1)
1075 unsigned int regno = REGNO (reg);
1076 machine_mode mode = GET_MODE (reg);
1078 if (reg_state[regno].use_index < 0
1079 || reg_state[regno].use_index >= RELOAD_COMBINE_MAX_USES)
1082 for (int i = reg_state[regno].use_index;
1083 i < RELOAD_COMBINE_MAX_USES; i++)
1085 struct reg_use *use = reg_state[regno].reg_use + i;
1086 if (GET_MODE (*use->usep) != mode)
1090 /* Look for (set (REGX) (CONST_INT))
1091 (set (REGX) (PLUS (REGX) (REGY)))
1093 ... (MEM (REGX)) ...
1095 (set (REGZ) (CONST_INT))
1097 ... (MEM (PLUS (REGZ) (REGY)))... .
1099 First, check that we have (set (REGX) (PLUS (REGX) (REGY)))
1100 and that we know all uses of REGX before it dies.
1101 Also, explicitly check that REGX != REGY; our life information
1102 does not yet show whether REGY changes in this insn. */
1104 if (GET_CODE (src) == PLUS
1105 && reg_state[regno].all_offsets_match
1106 && last_index_reg != -1
1107 && REG_P (XEXP (src, 1))
1108 && rtx_equal_p (XEXP (src, 0), reg)
1109 && !rtx_equal_p (XEXP (src, 1), reg)
1110 && last_label_ruid < reg_state[regno].use_ruid)
1112 rtx base = XEXP (src, 1);
1113 rtx_insn *prev = prev_nonnote_nondebug_insn (insn);
1114 rtx prev_set = prev ? single_set (prev) : NULL_RTX;
1115 rtx index_reg = NULL_RTX;
1116 rtx reg_sum = NULL_RTX;
1119 /* Now we need to set INDEX_REG to an index register (denoted as
1120 REGZ in the illustration above) and REG_SUM to the expression
1121 register+register that we want to use to substitute uses of REG
1122 (typically in MEMs) with. First check REG and BASE for being
1123 index registers; we can use them even if they are not dead. */
1124 if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], regno)
1125 || TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS],
1133 /* Otherwise, look for a free index register. Since we have
1134 checked above that neither REG nor BASE are index registers,
1135 if we find anything at all, it will be different from these
1137 for (i = first_index_reg; i <= last_index_reg; i++)
1139 if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], i)
1140 && reg_state[i].use_index == RELOAD_COMBINE_MAX_USES
1141 && reg_state[i].store_ruid <= reg_state[regno].use_ruid
1142 && (call_used_regs[i] || df_regs_ever_live_p (i))
1143 && (!frame_pointer_needed || i != HARD_FRAME_POINTER_REGNUM)
1144 && !fixed_regs[i] && !global_regs[i]
1145 && hard_regno_nregs[i][GET_MODE (reg)] == 1
1146 && targetm.hard_regno_scratch_ok (i))
1148 index_reg = gen_rtx_REG (GET_MODE (reg), i);
1149 reg_sum = gen_rtx_PLUS (GET_MODE (reg), index_reg, base);
1155 /* Check that PREV_SET is indeed (set (REGX) (CONST_INT)) and that
1156 (REGY), i.e. BASE, is not clobbered before the last use we'll
1160 && CONST_INT_P (SET_SRC (prev_set))
1161 && rtx_equal_p (SET_DEST (prev_set), reg)
1162 && (reg_state[REGNO (base)].store_ruid
1163 <= reg_state[regno].use_ruid))
1165 /* Change destination register and, if necessary, the constant
1166 value in PREV, the constant loading instruction. */
1167 validate_change (prev, &SET_DEST (prev_set), index_reg, 1);
1168 if (reg_state[regno].offset != const0_rtx)
1169 validate_change (prev,
1170 &SET_SRC (prev_set),
1171 GEN_INT (INTVAL (SET_SRC (prev_set))
1172 + INTVAL (reg_state[regno].offset)),
1175 /* Now for every use of REG that we have recorded, replace REG
1177 for (i = reg_state[regno].use_index;
1178 i < RELOAD_COMBINE_MAX_USES; i++)
1179 validate_unshare_change (reg_state[regno].reg_use[i].insn,
1180 reg_state[regno].reg_use[i].usep,
1181 /* Each change must have its own
1185 if (apply_change_group ())
1187 struct reg_use *lowest_ruid = NULL;
1189 /* For every new use of REG_SUM, we have to record the use
1190 of BASE therein, i.e. operand 1. */
1191 for (i = reg_state[regno].use_index;
1192 i < RELOAD_COMBINE_MAX_USES; i++)
1194 struct reg_use *use = reg_state[regno].reg_use + i;
1195 reload_combine_note_use (&XEXP (*use->usep, 1), use->insn,
1196 use->ruid, use->containing_mem);
1197 if (lowest_ruid == NULL || use->ruid < lowest_ruid->ruid)
1201 fixup_debug_insns (reg, reg_sum, insn, lowest_ruid->insn);
1203 /* Delete the reg-reg addition. */
1206 if (reg_state[regno].offset != const0_rtx
1207 /* Previous REG_EQUIV / REG_EQUAL notes for PREV
1209 && remove_reg_equal_equiv_notes (prev))
1210 df_notes_rescan (prev);
1212 reg_state[regno].use_index = RELOAD_COMBINE_MAX_USES;
1221 reload_combine (void)
1223 rtx_insn *insn, *prev;
1226 int min_labelno, n_labels;
1227 HARD_REG_SET ever_live_at_start, *label_live;
1229 /* To avoid wasting too much time later searching for an index register,
1230 determine the minimum and maximum index register numbers. */
1231 if (INDEX_REG_CLASS == NO_REGS)
1232 last_index_reg = -1;
1233 else if (first_index_reg == -1 && last_index_reg == 0)
1235 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
1236 if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], r))
1238 if (first_index_reg == -1)
1239 first_index_reg = r;
1244 /* If no index register is available, we can quit now. Set LAST_INDEX_REG
1245 to -1 so we'll know to quit early the next time we get here. */
1246 if (first_index_reg == -1)
1248 last_index_reg = -1;
1253 /* Set up LABEL_LIVE and EVER_LIVE_AT_START. The register lifetime
1254 information is a bit fuzzy immediately after reload, but it's
1255 still good enough to determine which registers are live at a jump
1257 min_labelno = get_first_label_num ();
1258 n_labels = max_label_num () - min_labelno;
1259 label_live = XNEWVEC (HARD_REG_SET, n_labels);
1260 CLEAR_HARD_REG_SET (ever_live_at_start);
1262 FOR_EACH_BB_REVERSE_FN (bb, cfun)
1264 insn = BB_HEAD (bb);
1268 bitmap live_in = df_get_live_in (bb);
1270 REG_SET_TO_HARD_REG_SET (live, live_in);
1271 compute_use_by_pseudos (&live, live_in);
1272 COPY_HARD_REG_SET (LABEL_LIVE (insn), live);
1273 IOR_HARD_REG_SET (ever_live_at_start, live);
1277 /* Initialize last_label_ruid, reload_combine_ruid and reg_state. */
1278 last_label_ruid = last_jump_ruid = reload_combine_ruid = 0;
1279 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
1281 reg_state[r].store_ruid = 0;
1282 reg_state[r].real_store_ruid = 0;
1284 reg_state[r].use_index = -1;
1286 reg_state[r].use_index = RELOAD_COMBINE_MAX_USES;
1289 for (insn = get_last_insn (); insn; insn = prev)
1291 bool control_flow_insn;
1294 prev = PREV_INSN (insn);
1296 /* We cannot do our optimization across labels. Invalidating all the use
1297 information we have would be costly, so we just note where the label
1298 is and then later disable any optimization that would cross it. */
1300 last_label_ruid = reload_combine_ruid;
1301 else if (BARRIER_P (insn))
1303 /* Crossing a barrier resets all the use information. */
1304 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
1305 if (! fixed_regs[r])
1306 reg_state[r].use_index = RELOAD_COMBINE_MAX_USES;
1308 else if (INSN_P (insn) && volatile_insn_p (PATTERN (insn)))
1309 /* Optimizations across insns being marked as volatile must be
1310 prevented. All the usage information is invalidated
1312 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
1314 && reg_state[r].use_index != RELOAD_COMBINE_MAX_USES)
1315 reg_state[r].use_index = -1;
1317 if (! NONDEBUG_INSN_P (insn))
1320 reload_combine_ruid++;
1322 control_flow_insn = control_flow_insn_p (insn);
1323 if (control_flow_insn)
1324 last_jump_ruid = reload_combine_ruid;
1326 if (reload_combine_recognize_const_pattern (insn)
1327 || reload_combine_recognize_pattern (insn))
1330 note_stores (PATTERN (insn), reload_combine_note_store, NULL);
1335 HARD_REG_SET used_regs;
1337 get_call_reg_set_usage (insn, &used_regs, call_used_reg_set);
1339 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
1340 if (TEST_HARD_REG_BIT (used_regs, r))
1342 reg_state[r].use_index = RELOAD_COMBINE_MAX_USES;
1343 reg_state[r].store_ruid = reload_combine_ruid;
1346 for (link = CALL_INSN_FUNCTION_USAGE (insn); link;
1347 link = XEXP (link, 1))
1349 rtx setuse = XEXP (link, 0);
1350 rtx usage_rtx = XEXP (setuse, 0);
1351 if ((GET_CODE (setuse) == USE || GET_CODE (setuse) == CLOBBER)
1352 && REG_P (usage_rtx))
1354 unsigned int end_regno = END_REGNO (usage_rtx);
1355 for (unsigned int i = REGNO (usage_rtx); i < end_regno; ++i)
1356 if (GET_CODE (XEXP (link, 0)) == CLOBBER)
1358 reg_state[i].use_index = RELOAD_COMBINE_MAX_USES;
1359 reg_state[i].store_ruid = reload_combine_ruid;
1362 reg_state[i].use_index = -1;
1367 if (control_flow_insn && !ANY_RETURN_P (PATTERN (insn)))
1369 /* Non-spill registers might be used at the call destination in
1370 some unknown fashion, so we have to mark the unknown use. */
1373 if ((condjump_p (insn) || condjump_in_parallel_p (insn))
1374 && JUMP_LABEL (insn))
1376 if (ANY_RETURN_P (JUMP_LABEL (insn)))
1379 live = &LABEL_LIVE (JUMP_LABEL (insn));
1382 live = &ever_live_at_start;
1385 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
1386 if (TEST_HARD_REG_BIT (*live, r))
1387 reg_state[r].use_index = -1;
1390 reload_combine_note_use (&PATTERN (insn), insn, reload_combine_ruid,
1393 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1395 if (REG_NOTE_KIND (note) == REG_INC && REG_P (XEXP (note, 0)))
1397 int regno = REGNO (XEXP (note, 0));
1398 reg_state[regno].store_ruid = reload_combine_ruid;
1399 reg_state[regno].real_store_ruid = reload_combine_ruid;
1400 reg_state[regno].use_index = -1;
1408 /* Check if DST is a register or a subreg of a register; if it is,
1409 update store_ruid, real_store_ruid and use_index in the reg_state
1410 structure accordingly. Called via note_stores from reload_combine. */
1413 reload_combine_note_store (rtx dst, const_rtx set, void *data ATTRIBUTE_UNUSED)
1417 machine_mode mode = GET_MODE (dst);
1419 if (GET_CODE (dst) == SUBREG)
1421 regno = subreg_regno_offset (REGNO (SUBREG_REG (dst)),
1422 GET_MODE (SUBREG_REG (dst)),
1425 dst = SUBREG_REG (dst);
1428 /* Some targets do argument pushes without adding REG_INC notes. */
1432 dst = XEXP (dst, 0);
1433 if (GET_CODE (dst) == PRE_INC || GET_CODE (dst) == POST_INC
1434 || GET_CODE (dst) == PRE_DEC || GET_CODE (dst) == POST_DEC
1435 || GET_CODE (dst) == PRE_MODIFY || GET_CODE (dst) == POST_MODIFY)
1437 unsigned int end_regno = END_REGNO (XEXP (dst, 0));
1438 for (unsigned int i = REGNO (XEXP (dst, 0)); i < end_regno; ++i)
1440 /* We could probably do better, but for now mark the register
1441 as used in an unknown fashion and set/clobbered at this
1443 reg_state[i].use_index = -1;
1444 reg_state[i].store_ruid = reload_combine_ruid;
1445 reg_state[i].real_store_ruid = reload_combine_ruid;
1454 regno += REGNO (dst);
1456 /* note_stores might have stripped a STRICT_LOW_PART, so we have to be
1457 careful with registers / register parts that are not full words.
1458 Similarly for ZERO_EXTRACT. */
1459 if (GET_CODE (SET_DEST (set)) == ZERO_EXTRACT
1460 || GET_CODE (SET_DEST (set)) == STRICT_LOW_PART)
1462 for (i = hard_regno_nregs[regno][mode] - 1 + regno; i >= regno; i--)
1464 reg_state[i].use_index = -1;
1465 reg_state[i].store_ruid = reload_combine_ruid;
1466 reg_state[i].real_store_ruid = reload_combine_ruid;
1471 for (i = hard_regno_nregs[regno][mode] - 1 + regno; i >= regno; i--)
1473 reg_state[i].store_ruid = reload_combine_ruid;
1474 if (GET_CODE (set) == SET)
1475 reg_state[i].real_store_ruid = reload_combine_ruid;
1476 reg_state[i].use_index = RELOAD_COMBINE_MAX_USES;
1481 /* XP points to a piece of rtl that has to be checked for any uses of
1483 *XP is the pattern of INSN, or a part of it.
1484 Called from reload_combine, and recursively by itself. */
1486 reload_combine_note_use (rtx *xp, rtx_insn *insn, int ruid, rtx containing_mem)
1489 enum rtx_code code = x->code;
1492 rtx offset = const0_rtx; /* For the REG case below. */
1497 if (REG_P (SET_DEST (x)))
1499 reload_combine_note_use (&SET_SRC (x), insn, ruid, NULL_RTX);
1505 /* If this is the USE of a return value, we can't change it. */
1506 if (REG_P (XEXP (x, 0)) && REG_FUNCTION_VALUE_P (XEXP (x, 0)))
1508 /* Mark the return register as used in an unknown fashion. */
1509 rtx reg = XEXP (x, 0);
1510 unsigned int end_regno = END_REGNO (reg);
1511 for (unsigned int regno = REGNO (reg); regno < end_regno; ++regno)
1512 reg_state[regno].use_index = -1;
1518 if (REG_P (SET_DEST (x)))
1520 /* No spurious CLOBBERs of pseudo registers may remain. */
1521 gcc_assert (REGNO (SET_DEST (x)) < FIRST_PSEUDO_REGISTER);
1527 /* We are interested in (plus (reg) (const_int)) . */
1528 if (!REG_P (XEXP (x, 0))
1529 || !CONST_INT_P (XEXP (x, 1)))
1531 offset = XEXP (x, 1);
1536 int regno = REGNO (x);
1540 /* No spurious USEs of pseudo registers may remain. */
1541 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
1543 nregs = REG_NREGS (x);
1545 /* We can't substitute into multi-hard-reg uses. */
1548 while (--nregs >= 0)
1549 reg_state[regno + nregs].use_index = -1;
1553 /* We may be called to update uses in previously seen insns.
1554 Don't add uses beyond the last store we saw. */
1555 if (ruid < reg_state[regno].store_ruid)
1558 /* If this register is already used in some unknown fashion, we
1560 If we decrement the index from zero to -1, we can't store more
1561 uses, so this register becomes used in an unknown fashion. */
1562 use_index = --reg_state[regno].use_index;
1566 if (use_index == RELOAD_COMBINE_MAX_USES - 1)
1568 /* This is the first use of this register we have seen since we
1569 marked it as dead. */
1570 reg_state[regno].offset = offset;
1571 reg_state[regno].all_offsets_match = true;
1572 reg_state[regno].use_ruid = ruid;
1576 if (reg_state[regno].use_ruid > ruid)
1577 reg_state[regno].use_ruid = ruid;
1579 if (! rtx_equal_p (offset, reg_state[regno].offset))
1580 reg_state[regno].all_offsets_match = false;
1583 reg_state[regno].reg_use[use_index].insn = insn;
1584 reg_state[regno].reg_use[use_index].ruid = ruid;
1585 reg_state[regno].reg_use[use_index].containing_mem = containing_mem;
1586 reg_state[regno].reg_use[use_index].usep = xp;
1598 /* Recursively process the components of X. */
1599 fmt = GET_RTX_FORMAT (code);
1600 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1603 reload_combine_note_use (&XEXP (x, i), insn, ruid, containing_mem);
1604 else if (fmt[i] == 'E')
1606 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1607 reload_combine_note_use (&XVECEXP (x, i, j), insn, ruid,
1613 /* See if we can reduce the cost of a constant by replacing a move
1614 with an add. We track situations in which a register is set to a
1615 constant or to a register plus a constant. */
1616 /* We cannot do our optimization across labels. Invalidating all the
1617 information about register contents we have would be costly, so we
1618 use move2add_last_label_luid to note where the label is and then
1619 later disable any optimization that would cross it.
1620 reg_offset[n] / reg_base_reg[n] / reg_symbol_ref[n] / reg_mode[n]
1621 are only valid if reg_set_luid[n] is greater than
1622 move2add_last_label_luid.
1623 For a set that established a new (potential) base register with
1624 non-constant value, we use move2add_luid from the place where the
1625 setting insn is encountered; registers based off that base then
1626 get the same reg_set_luid. Constants all get
1627 move2add_last_label_luid + 1 as their reg_set_luid. */
1628 static int reg_set_luid[FIRST_PSEUDO_REGISTER];
1630 /* If reg_base_reg[n] is negative, register n has been set to
1631 reg_offset[n] or reg_symbol_ref[n] + reg_offset[n] in mode reg_mode[n].
1632 If reg_base_reg[n] is non-negative, register n has been set to the
1633 sum of reg_offset[n] and the value of register reg_base_reg[n]
1634 before reg_set_luid[n], calculated in mode reg_mode[n] .
1635 For multi-hard-register registers, all but the first one are
1636 recorded as BLKmode in reg_mode. Setting reg_mode to VOIDmode
1637 marks it as invalid. */
1638 static HOST_WIDE_INT reg_offset[FIRST_PSEUDO_REGISTER];
1639 static int reg_base_reg[FIRST_PSEUDO_REGISTER];
1640 static rtx reg_symbol_ref[FIRST_PSEUDO_REGISTER];
1641 static machine_mode reg_mode[FIRST_PSEUDO_REGISTER];
1643 /* move2add_luid is linearly increased while scanning the instructions
1644 from first to last. It is used to set reg_set_luid in
1645 reload_cse_move2add and move2add_note_store. */
1646 static int move2add_luid;
1648 /* move2add_last_label_luid is set whenever a label is found. Labels
1649 invalidate all previously collected reg_offset data. */
1650 static int move2add_last_label_luid;
1652 /* ??? We don't know how zero / sign extension is handled, hence we
1653 can't go from a narrower to a wider mode. */
1654 #define MODES_OK_FOR_MOVE2ADD(OUTMODE, INMODE) \
1655 (GET_MODE_SIZE (OUTMODE) == GET_MODE_SIZE (INMODE) \
1656 || (GET_MODE_SIZE (OUTMODE) <= GET_MODE_SIZE (INMODE) \
1657 && TRULY_NOOP_TRUNCATION_MODES_P (OUTMODE, INMODE)))
1659 /* Record that REG is being set to a value with the mode of REG. */
1662 move2add_record_mode (rtx reg)
1665 machine_mode mode = GET_MODE (reg);
1667 if (GET_CODE (reg) == SUBREG)
1669 regno = subreg_regno (reg);
1670 nregs = subreg_nregs (reg);
1672 else if (REG_P (reg))
1674 regno = REGNO (reg);
1675 nregs = REG_NREGS (reg);
1679 for (int i = nregs - 1; i > 0; i--)
1680 reg_mode[regno + i] = BLKmode;
1681 reg_mode[regno] = mode;
1684 /* Record that REG is being set to the sum of SYM and OFF. */
1687 move2add_record_sym_value (rtx reg, rtx sym, rtx off)
1689 int regno = REGNO (reg);
1691 move2add_record_mode (reg);
1692 reg_set_luid[regno] = move2add_luid;
1693 reg_base_reg[regno] = -1;
1694 reg_symbol_ref[regno] = sym;
1695 reg_offset[regno] = INTVAL (off);
1698 /* Check if REGNO contains a valid value in MODE. */
1701 move2add_valid_value_p (int regno, machine_mode mode)
1703 if (reg_set_luid[regno] <= move2add_last_label_luid)
1706 if (mode != reg_mode[regno])
1708 if (!MODES_OK_FOR_MOVE2ADD (mode, reg_mode[regno]))
1710 /* The value loaded into regno in reg_mode[regno] is also valid in
1711 mode after truncation only if (REG:mode regno) is the lowpart of
1712 (REG:reg_mode[regno] regno). Now, for big endian, the starting
1713 regno of the lowpart might be different. */
1714 int s_off = subreg_lowpart_offset (mode, reg_mode[regno]);
1715 s_off = subreg_regno_offset (regno, reg_mode[regno], s_off, mode);
1717 /* We could in principle adjust regno, check reg_mode[regno] to be
1718 BLKmode, and return s_off to the caller (vs. -1 for failure),
1719 but we currently have no callers that could make use of this
1724 for (int i = hard_regno_nregs[regno][mode] - 1; i > 0; i--)
1725 if (reg_mode[regno + i] != BLKmode)
1730 /* This function is called with INSN that sets REG to (SYM + OFF),
1731 while REG is known to already have value (SYM + offset).
1732 This function tries to change INSN into an add instruction
1733 (set (REG) (plus (REG) (OFF - offset))) using the known value.
1734 It also updates the information about REG's known value.
1735 Return true if we made a change. */
1738 move2add_use_add2_insn (rtx reg, rtx sym, rtx off, rtx_insn *insn)
1740 rtx pat = PATTERN (insn);
1741 rtx src = SET_SRC (pat);
1742 int regno = REGNO (reg);
1743 rtx new_src = gen_int_mode (UINTVAL (off) - reg_offset[regno],
1745 bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn));
1746 bool changed = false;
1748 /* (set (reg) (plus (reg) (const_int 0))) is not canonical;
1749 use (set (reg) (reg)) instead.
1750 We don't delete this insn, nor do we convert it into a
1751 note, to avoid losing register notes or the return
1752 value flag. jump2 already knows how to get rid of
1754 if (new_src == const0_rtx)
1756 /* If the constants are different, this is a
1757 truncation, that, if turned into (set (reg)
1758 (reg)), would be discarded. Maybe we should
1759 try a truncMN pattern? */
1760 if (INTVAL (off) == reg_offset [regno])
1761 changed = validate_change (insn, &SET_SRC (pat), reg, 0);
1765 struct full_rtx_costs oldcst, newcst;
1766 rtx tem = gen_rtx_PLUS (GET_MODE (reg), reg, new_src);
1768 get_full_set_rtx_cost (pat, &oldcst);
1769 SET_SRC (pat) = tem;
1770 get_full_set_rtx_cost (pat, &newcst);
1771 SET_SRC (pat) = src;
1773 if (costs_lt_p (&newcst, &oldcst, speed)
1774 && have_add2_insn (reg, new_src))
1775 changed = validate_change (insn, &SET_SRC (pat), tem, 0);
1776 else if (sym == NULL_RTX && GET_MODE (reg) != BImode)
1778 machine_mode narrow_mode;
1779 for (narrow_mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1780 narrow_mode != VOIDmode
1781 && narrow_mode != GET_MODE (reg);
1782 narrow_mode = GET_MODE_WIDER_MODE (narrow_mode))
1784 if (have_insn_for (STRICT_LOW_PART, narrow_mode)
1785 && ((reg_offset[regno] & ~GET_MODE_MASK (narrow_mode))
1786 == (INTVAL (off) & ~GET_MODE_MASK (narrow_mode))))
1788 rtx narrow_reg = gen_lowpart_common (narrow_mode, reg);
1789 rtx narrow_src = gen_int_mode (INTVAL (off),
1792 = gen_rtx_SET (gen_rtx_STRICT_LOW_PART (VOIDmode,
1795 get_full_set_rtx_cost (new_set, &newcst);
1796 if (costs_lt_p (&newcst, &oldcst, speed))
1798 changed = validate_change (insn, &PATTERN (insn),
1807 move2add_record_sym_value (reg, sym, off);
1812 /* This function is called with INSN that sets REG to (SYM + OFF),
1813 but REG doesn't have known value (SYM + offset). This function
1814 tries to find another register which is known to already have
1815 value (SYM + offset) and change INSN into an add instruction
1816 (set (REG) (plus (the found register) (OFF - offset))) if such
1817 a register is found. It also updates the information about
1819 Return true iff we made a change. */
1822 move2add_use_add3_insn (rtx reg, rtx sym, rtx off, rtx_insn *insn)
1824 rtx pat = PATTERN (insn);
1825 rtx src = SET_SRC (pat);
1826 int regno = REGNO (reg);
1828 bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn));
1830 bool changed = false;
1831 struct full_rtx_costs oldcst, newcst, mincst;
1834 init_costs_to_max (&mincst);
1835 get_full_set_rtx_cost (pat, &oldcst);
1837 plus_expr = gen_rtx_PLUS (GET_MODE (reg), reg, const0_rtx);
1838 SET_SRC (pat) = plus_expr;
1840 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1841 if (move2add_valid_value_p (i, GET_MODE (reg))
1842 && reg_base_reg[i] < 0
1843 && reg_symbol_ref[i] != NULL_RTX
1844 && rtx_equal_p (sym, reg_symbol_ref[i]))
1846 rtx new_src = gen_int_mode (UINTVAL (off) - reg_offset[i],
1848 /* (set (reg) (plus (reg) (const_int 0))) is not canonical;
1849 use (set (reg) (reg)) instead.
1850 We don't delete this insn, nor do we convert it into a
1851 note, to avoid losing register notes or the return
1852 value flag. jump2 already knows how to get rid of
1854 if (new_src == const0_rtx)
1856 init_costs_to_zero (&mincst);
1862 XEXP (plus_expr, 1) = new_src;
1863 get_full_set_rtx_cost (pat, &newcst);
1865 if (costs_lt_p (&newcst, &mincst, speed))
1872 SET_SRC (pat) = src;
1874 if (costs_lt_p (&mincst, &oldcst, speed))
1878 tem = gen_rtx_REG (GET_MODE (reg), min_regno);
1881 rtx new_src = gen_int_mode (UINTVAL (off) - reg_offset[min_regno],
1883 tem = gen_rtx_PLUS (GET_MODE (reg), tem, new_src);
1885 if (validate_change (insn, &SET_SRC (pat), tem, 0))
1888 reg_set_luid[regno] = move2add_luid;
1889 move2add_record_sym_value (reg, sym, off);
1893 /* Convert move insns with constant inputs to additions if they are cheaper.
1894 Return true if any changes were made. */
1896 reload_cse_move2add (rtx_insn *first)
1900 bool changed = false;
1902 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
1904 reg_set_luid[i] = 0;
1906 reg_base_reg[i] = 0;
1907 reg_symbol_ref[i] = NULL_RTX;
1908 reg_mode[i] = VOIDmode;
1911 move2add_last_label_luid = 0;
1913 for (insn = first; insn; insn = NEXT_INSN (insn), move2add_luid++)
1919 move2add_last_label_luid = move2add_luid;
1920 /* We're going to increment move2add_luid twice after a
1921 label, so that we can use move2add_last_label_luid + 1 as
1922 the luid for constants. */
1926 if (! INSN_P (insn))
1928 pat = PATTERN (insn);
1929 /* For simplicity, we only perform this optimization on
1930 straightforward SETs. */
1931 if (GET_CODE (pat) == SET
1932 && REG_P (SET_DEST (pat)))
1934 rtx reg = SET_DEST (pat);
1935 int regno = REGNO (reg);
1936 rtx src = SET_SRC (pat);
1938 /* Check if we have valid information on the contents of this
1939 register in the mode of REG. */
1940 if (move2add_valid_value_p (regno, GET_MODE (reg))
1941 && dbg_cnt (cse2_move2add))
1943 /* Try to transform (set (REGX) (CONST_INT A))
1945 (set (REGX) (CONST_INT B))
1947 (set (REGX) (CONST_INT A))
1949 (set (REGX) (plus (REGX) (CONST_INT B-A)))
1951 (set (REGX) (CONST_INT A))
1953 (set (STRICT_LOW_PART (REGX)) (CONST_INT B))
1956 if (CONST_INT_P (src)
1957 && reg_base_reg[regno] < 0
1958 && reg_symbol_ref[regno] == NULL_RTX)
1960 changed |= move2add_use_add2_insn (reg, NULL_RTX, src, insn);
1964 /* Try to transform (set (REGX) (REGY))
1965 (set (REGX) (PLUS (REGX) (CONST_INT A)))
1968 (set (REGX) (PLUS (REGX) (CONST_INT B)))
1971 (set (REGX) (PLUS (REGX) (CONST_INT A)))
1973 (set (REGX) (plus (REGX) (CONST_INT B-A))) */
1974 else if (REG_P (src)
1975 && reg_set_luid[regno] == reg_set_luid[REGNO (src)]
1976 && reg_base_reg[regno] == reg_base_reg[REGNO (src)]
1977 && move2add_valid_value_p (REGNO (src), GET_MODE (reg)))
1979 rtx_insn *next = next_nonnote_nondebug_insn (insn);
1982 set = single_set (next);
1984 && SET_DEST (set) == reg
1985 && GET_CODE (SET_SRC (set)) == PLUS
1986 && XEXP (SET_SRC (set), 0) == reg
1987 && CONST_INT_P (XEXP (SET_SRC (set), 1)))
1989 rtx src3 = XEXP (SET_SRC (set), 1);
1990 unsigned HOST_WIDE_INT added_offset = UINTVAL (src3);
1991 HOST_WIDE_INT base_offset = reg_offset[REGNO (src)];
1992 HOST_WIDE_INT regno_offset = reg_offset[regno];
1994 gen_int_mode (added_offset
1998 bool success = false;
1999 bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn));
2001 if (new_src == const0_rtx)
2002 /* See above why we create (set (reg) (reg)) here. */
2004 = validate_change (next, &SET_SRC (set), reg, 0);
2007 rtx old_src = SET_SRC (set);
2008 struct full_rtx_costs oldcst, newcst;
2009 rtx tem = gen_rtx_PLUS (GET_MODE (reg), reg, new_src);
2011 get_full_set_rtx_cost (set, &oldcst);
2012 SET_SRC (set) = tem;
2013 get_full_set_src_cost (tem, GET_MODE (reg), &newcst);
2014 SET_SRC (set) = old_src;
2015 costs_add_n_insns (&oldcst, 1);
2017 if (costs_lt_p (&newcst, &oldcst, speed)
2018 && have_add2_insn (reg, new_src))
2020 rtx newpat = gen_rtx_SET (reg, tem);
2022 = validate_change (next, &PATTERN (next),
2030 move2add_record_mode (reg);
2032 = trunc_int_for_mode (added_offset + base_offset,
2040 (set (REGX) (CONST (PLUS (SYMBOL_REF) (CONST_INT A))))
2042 (set (REGY) (CONST (PLUS (SYMBOL_REF) (CONST_INT B))))
2044 (set (REGX) (CONST (PLUS (SYMBOL_REF) (CONST_INT A))))
2046 (set (REGY) (CONST (PLUS (REGX) (CONST_INT B-A)))) */
2047 if ((GET_CODE (src) == SYMBOL_REF
2048 || (GET_CODE (src) == CONST
2049 && GET_CODE (XEXP (src, 0)) == PLUS
2050 && GET_CODE (XEXP (XEXP (src, 0), 0)) == SYMBOL_REF
2051 && CONST_INT_P (XEXP (XEXP (src, 0), 1))))
2052 && dbg_cnt (cse2_move2add))
2056 if (GET_CODE (src) == SYMBOL_REF)
2063 sym = XEXP (XEXP (src, 0), 0);
2064 off = XEXP (XEXP (src, 0), 1);
2067 /* If the reg already contains the value which is sum of
2068 sym and some constant value, we can use an add2 insn. */
2069 if (move2add_valid_value_p (regno, GET_MODE (reg))
2070 && reg_base_reg[regno] < 0
2071 && reg_symbol_ref[regno] != NULL_RTX
2072 && rtx_equal_p (sym, reg_symbol_ref[regno]))
2073 changed |= move2add_use_add2_insn (reg, sym, off, insn);
2075 /* Otherwise, we have to find a register whose value is sum
2076 of sym and some constant value. */
2078 changed |= move2add_use_add3_insn (reg, sym, off, insn);
2084 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
2086 if (REG_NOTE_KIND (note) == REG_INC
2087 && REG_P (XEXP (note, 0)))
2089 /* Reset the information about this register. */
2090 int regno = REGNO (XEXP (note, 0));
2091 if (regno < FIRST_PSEUDO_REGISTER)
2093 move2add_record_mode (XEXP (note, 0));
2094 reg_mode[regno] = VOIDmode;
2098 note_stores (PATTERN (insn), move2add_note_store, insn);
2100 /* If INSN is a conditional branch, we try to extract an
2101 implicit set out of it. */
2102 if (any_condjump_p (insn))
2104 rtx cnd = fis_get_condition (insn);
2107 && GET_CODE (cnd) == NE
2108 && REG_P (XEXP (cnd, 0))
2109 && !reg_set_p (XEXP (cnd, 0), insn)
2110 /* The following two checks, which are also in
2111 move2add_note_store, are intended to reduce the
2112 number of calls to gen_rtx_SET to avoid memory
2113 allocation if possible. */
2114 && SCALAR_INT_MODE_P (GET_MODE (XEXP (cnd, 0)))
2115 && REG_NREGS (XEXP (cnd, 0)) == 1
2116 && CONST_INT_P (XEXP (cnd, 1)))
2119 gen_rtx_SET (XEXP (cnd, 0), XEXP (cnd, 1));
2120 move2add_note_store (SET_DEST (implicit_set), implicit_set, insn);
2124 /* If this is a CALL_INSN, all call used registers are stored with
2130 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
2132 if (call_used_regs[i])
2133 /* Reset the information about this register. */
2134 reg_mode[i] = VOIDmode;
2137 for (link = CALL_INSN_FUNCTION_USAGE (insn); link;
2138 link = XEXP (link, 1))
2140 rtx setuse = XEXP (link, 0);
2141 rtx usage_rtx = XEXP (setuse, 0);
2142 if (GET_CODE (setuse) == CLOBBER
2143 && REG_P (usage_rtx))
2145 unsigned int end_regno = END_REGNO (usage_rtx);
2146 for (unsigned int r = REGNO (usage_rtx); r < end_regno; ++r)
2147 /* Reset the information about this register. */
2148 reg_mode[r] = VOIDmode;
2156 /* SET is a SET or CLOBBER that sets DST. DATA is the insn which
2158 Update reg_set_luid, reg_offset and reg_base_reg accordingly.
2159 Called from reload_cse_move2add via note_stores. */
2162 move2add_note_store (rtx dst, const_rtx set, void *data)
2164 rtx_insn *insn = (rtx_insn *) data;
2165 unsigned int regno = 0;
2166 machine_mode mode = GET_MODE (dst);
2168 /* Some targets do argument pushes without adding REG_INC notes. */
2172 dst = XEXP (dst, 0);
2173 if (GET_CODE (dst) == PRE_INC || GET_CODE (dst) == POST_INC
2174 || GET_CODE (dst) == PRE_DEC || GET_CODE (dst) == POST_DEC)
2175 reg_mode[REGNO (XEXP (dst, 0))] = VOIDmode;
2179 if (GET_CODE (dst) == SUBREG)
2180 regno = subreg_regno (dst);
2181 else if (REG_P (dst))
2182 regno = REGNO (dst);
2186 if (SCALAR_INT_MODE_P (mode)
2187 && GET_CODE (set) == SET)
2189 rtx note, sym = NULL_RTX;
2192 note = find_reg_equal_equiv_note (insn);
2193 if (note && GET_CODE (XEXP (note, 0)) == SYMBOL_REF)
2195 sym = XEXP (note, 0);
2198 else if (note && GET_CODE (XEXP (note, 0)) == CONST
2199 && GET_CODE (XEXP (XEXP (note, 0), 0)) == PLUS
2200 && GET_CODE (XEXP (XEXP (XEXP (note, 0), 0), 0)) == SYMBOL_REF
2201 && CONST_INT_P (XEXP (XEXP (XEXP (note, 0), 0), 1)))
2203 sym = XEXP (XEXP (XEXP (note, 0), 0), 0);
2204 off = XEXP (XEXP (XEXP (note, 0), 0), 1);
2207 if (sym != NULL_RTX)
2209 move2add_record_sym_value (dst, sym, off);
2214 if (SCALAR_INT_MODE_P (mode)
2215 && GET_CODE (set) == SET
2216 && GET_CODE (SET_DEST (set)) != ZERO_EXTRACT
2217 && GET_CODE (SET_DEST (set)) != STRICT_LOW_PART)
2219 rtx src = SET_SRC (set);
2221 unsigned HOST_WIDE_INT offset;
2224 switch (GET_CODE (src))
2227 if (REG_P (XEXP (src, 0)))
2229 base_reg = XEXP (src, 0);
2231 if (CONST_INT_P (XEXP (src, 1)))
2232 offset = UINTVAL (XEXP (src, 1));
2233 else if (REG_P (XEXP (src, 1))
2234 && move2add_valid_value_p (REGNO (XEXP (src, 1)), mode))
2236 if (reg_base_reg[REGNO (XEXP (src, 1))] < 0
2237 && reg_symbol_ref[REGNO (XEXP (src, 1))] == NULL_RTX)
2238 offset = reg_offset[REGNO (XEXP (src, 1))];
2239 /* Maybe the first register is known to be a
2241 else if (move2add_valid_value_p (REGNO (base_reg), mode)
2242 && reg_base_reg[REGNO (base_reg)] < 0
2243 && reg_symbol_ref[REGNO (base_reg)] == NULL_RTX)
2245 offset = reg_offset[REGNO (base_reg)];
2246 base_reg = XEXP (src, 1);
2265 /* Start tracking the register as a constant. */
2266 reg_base_reg[regno] = -1;
2267 reg_symbol_ref[regno] = NULL_RTX;
2268 reg_offset[regno] = INTVAL (SET_SRC (set));
2269 /* We assign the same luid to all registers set to constants. */
2270 reg_set_luid[regno] = move2add_last_label_luid + 1;
2271 move2add_record_mode (dst);
2278 base_regno = REGNO (base_reg);
2279 /* If information about the base register is not valid, set it
2280 up as a new base register, pretending its value is known
2281 starting from the current insn. */
2282 if (!move2add_valid_value_p (base_regno, mode))
2284 reg_base_reg[base_regno] = base_regno;
2285 reg_symbol_ref[base_regno] = NULL_RTX;
2286 reg_offset[base_regno] = 0;
2287 reg_set_luid[base_regno] = move2add_luid;
2288 gcc_assert (GET_MODE (base_reg) == mode);
2289 move2add_record_mode (base_reg);
2292 /* Copy base information from our base register. */
2293 reg_set_luid[regno] = reg_set_luid[base_regno];
2294 reg_base_reg[regno] = reg_base_reg[base_regno];
2295 reg_symbol_ref[regno] = reg_symbol_ref[base_regno];
2297 /* Compute the sum of the offsets or constants. */
2299 = trunc_int_for_mode (offset + reg_offset[base_regno], mode);
2301 move2add_record_mode (dst);
2306 /* Invalidate the contents of the register. */
2307 move2add_record_mode (dst);
2308 reg_mode[regno] = VOIDmode;
2314 const pass_data pass_data_postreload_cse =
2316 RTL_PASS, /* type */
2317 "postreload", /* name */
2318 OPTGROUP_NONE, /* optinfo_flags */
2319 TV_RELOAD_CSE_REGS, /* tv_id */
2320 0, /* properties_required */
2321 0, /* properties_provided */
2322 0, /* properties_destroyed */
2323 0, /* todo_flags_start */
2324 TODO_df_finish, /* todo_flags_finish */
2327 class pass_postreload_cse : public rtl_opt_pass
2330 pass_postreload_cse (gcc::context *ctxt)
2331 : rtl_opt_pass (pass_data_postreload_cse, ctxt)
2334 /* opt_pass methods: */
2335 virtual bool gate (function *) { return (optimize > 0 && reload_completed); }
2337 virtual unsigned int execute (function *);
2339 }; // class pass_postreload_cse
2342 pass_postreload_cse::execute (function *fun)
2344 if (!dbg_cnt (postreload_cse))
2347 /* Do a very simple CSE pass over just the hard registers. */
2348 reload_cse_regs (get_insns ());
2349 /* Reload_cse_regs can eliminate potentially-trapping MEMs.
2350 Remove any EH edges associated with them. */
2351 if (fun->can_throw_non_call_exceptions
2352 && purge_all_dead_edges ())
2361 make_pass_postreload_cse (gcc::context *ctxt)
2363 return new pass_postreload_cse (ctxt);