1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This is the pathetic reminder of old fame of the jump-optimization pass
21 of the compiler. Now it contains basically a set of utility functions to
24 Each CODE_LABEL has a count of the times it is used
25 stored in the LABEL_NUSES internal field, and each JUMP_INSN
26 has one label that it refers to stored in the
27 JUMP_LABEL internal field. With this we can detect labels that
28 become unused because of the deletion of all the jumps that
29 formerly used them. The JUMP_LABEL info is sometimes looked
30 at by later passes. For return insns, it contains either a
31 RETURN or a SIMPLE_RETURN rtx.
33 The subroutines redirect_jump and invert_jump are used
34 from other passes as well. */
38 #include "coretypes.h"
43 #include "hard-reg-set.h"
45 #include "insn-config.h"
46 #include "insn-attr.h"
54 #include "dominance.h"
57 #include "basic-block.h"
59 #include "statistics.h"
72 #include "diagnostic-core.h"
74 #include "tree-pass.h"
78 /* Optimize jump y; x: ... y: jumpif... x?
79 Don't know if it is worth bothering with. */
80 /* Optimize two cases of conditional jump to conditional jump?
81 This can never delete any instruction or make anything dead,
82 or even change what is live at any point.
83 So perhaps let combiner do it. */
85 static void init_label_info (rtx_insn *);
86 static void mark_all_labels (rtx_insn *);
87 static void mark_jump_label_1 (rtx, rtx_insn *, bool, bool);
88 static void mark_jump_label_asm (rtx, rtx_insn *);
89 static void redirect_exp_1 (rtx *, rtx, rtx, rtx);
90 static int invert_exp_1 (rtx, rtx);
92 /* Worker for rebuild_jump_labels and rebuild_jump_labels_chain. */
94 rebuild_jump_labels_1 (rtx_insn *f, bool count_forced)
98 timevar_push (TV_REBUILD_JUMP);
102 /* Keep track of labels used from static data; we don't track them
103 closely enough to delete them here, so make sure their reference
104 count doesn't drop to zero. */
107 for (insn = forced_labels; insn; insn = insn->next ())
108 if (LABEL_P (insn->insn ()))
109 LABEL_NUSES (insn->insn ())++;
110 timevar_pop (TV_REBUILD_JUMP);
113 /* This function rebuilds the JUMP_LABEL field and REG_LABEL_TARGET
114 notes in jumping insns and REG_LABEL_OPERAND notes in non-jumping
115 instructions and jumping insns that have labels as operands
116 (e.g. cbranchsi4). */
118 rebuild_jump_labels (rtx_insn *f)
120 rebuild_jump_labels_1 (f, true);
123 /* This function is like rebuild_jump_labels, but doesn't run over
124 forced_labels. It can be used on insn chains that aren't the
125 main function chain. */
127 rebuild_jump_labels_chain (rtx_insn *chain)
129 rebuild_jump_labels_1 (chain, false);
132 /* Some old code expects exactly one BARRIER as the NEXT_INSN of a
133 non-fallthru insn. This is not generally true, as multiple barriers
134 may have crept in, or the BARRIER may be separated from the last
135 real insn by one or more NOTEs.
137 This simple pass moves barriers and removes duplicates so that the
141 cleanup_barriers (void)
144 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
146 if (BARRIER_P (insn))
148 rtx_insn *prev = prev_nonnote_insn (insn);
154 /* Make sure we do not split a call and its corresponding
155 CALL_ARG_LOCATION note. */
156 rtx_insn *next = NEXT_INSN (prev);
159 && NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
163 if (BARRIER_P (prev))
165 else if (prev != PREV_INSN (insn))
167 basic_block bb = BLOCK_FOR_INSN (prev);
168 rtx_insn *end = PREV_INSN (insn);
169 reorder_insns_nobb (insn, insn, prev);
172 /* If the backend called in machine reorg compute_bb_for_insn
173 and didn't free_bb_for_insn again, preserve basic block
174 boundaries. Move the end of basic block to PREV since
175 it is followed by a barrier now, and clear BLOCK_FOR_INSN
176 on the following notes.
177 ??? Maybe the proper solution for the targets that have
178 cfg around after machine reorg is not to run cleanup_barriers
183 prev = NEXT_INSN (prev);
184 if (prev != insn && BLOCK_FOR_INSN (prev) == bb)
185 BLOCK_FOR_INSN (prev) = NULL;
197 const pass_data pass_data_cleanup_barriers =
200 "barriers", /* name */
201 OPTGROUP_NONE, /* optinfo_flags */
203 0, /* properties_required */
204 0, /* properties_provided */
205 0, /* properties_destroyed */
206 0, /* todo_flags_start */
207 0, /* todo_flags_finish */
210 class pass_cleanup_barriers : public rtl_opt_pass
213 pass_cleanup_barriers (gcc::context *ctxt)
214 : rtl_opt_pass (pass_data_cleanup_barriers, ctxt)
217 /* opt_pass methods: */
218 virtual unsigned int execute (function *) { return cleanup_barriers (); }
220 }; // class pass_cleanup_barriers
225 make_pass_cleanup_barriers (gcc::context *ctxt)
227 return new pass_cleanup_barriers (ctxt);
231 /* Initialize LABEL_NUSES and JUMP_LABEL fields, add REG_LABEL_TARGET
232 for remaining targets for JUMP_P. Delete any REG_LABEL_OPERAND
233 notes whose labels don't occur in the insn any more. */
236 init_label_info (rtx_insn *f)
240 for (insn = f; insn; insn = NEXT_INSN (insn))
243 LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
245 /* REG_LABEL_TARGET notes (including the JUMP_LABEL field) are
246 sticky and not reset here; that way we won't lose association
247 with a label when e.g. the source for a target register
248 disappears out of reach for targets that may use jump-target
249 registers. Jump transformations are supposed to transform
250 any REG_LABEL_TARGET notes. The target label reference in a
251 branch may disappear from the branch (and from the
252 instruction before it) for other reasons, like register
259 for (note = REG_NOTES (insn); note; note = next)
261 next = XEXP (note, 1);
262 if (REG_NOTE_KIND (note) == REG_LABEL_OPERAND
263 && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
264 remove_note (insn, note);
270 /* A subroutine of mark_all_labels. Trivially propagate a simple label
271 load into a jump_insn that uses it. */
274 maybe_propagate_label_ref (rtx_insn *jump_insn, rtx_insn *prev_nonjump_insn)
276 rtx label_note, pc, pc_src;
278 pc = pc_set (jump_insn);
279 pc_src = pc != NULL ? SET_SRC (pc) : NULL;
280 label_note = find_reg_note (prev_nonjump_insn, REG_LABEL_OPERAND, NULL);
282 /* If the previous non-jump insn sets something to a label,
283 something that this jump insn uses, make that label the primary
284 target of this insn if we don't yet have any. That previous
285 insn must be a single_set and not refer to more than one label.
286 The jump insn must not refer to other labels as jump targets
287 and must be a plain (set (pc) ...), maybe in a parallel, and
288 may refer to the item being set only directly or as one of the
289 arms in an IF_THEN_ELSE. */
291 if (label_note != NULL && pc_src != NULL)
293 rtx label_set = single_set (prev_nonjump_insn);
294 rtx label_dest = label_set != NULL ? SET_DEST (label_set) : NULL;
296 if (label_set != NULL
297 /* The source must be the direct LABEL_REF, not a
298 PLUS, UNSPEC, IF_THEN_ELSE etc. */
299 && GET_CODE (SET_SRC (label_set)) == LABEL_REF
300 && (rtx_equal_p (label_dest, pc_src)
301 || (GET_CODE (pc_src) == IF_THEN_ELSE
302 && (rtx_equal_p (label_dest, XEXP (pc_src, 1))
303 || rtx_equal_p (label_dest, XEXP (pc_src, 2))))))
305 /* The CODE_LABEL referred to in the note must be the
306 CODE_LABEL in the LABEL_REF of the "set". We can
307 conveniently use it for the marker function, which
308 requires a LABEL_REF wrapping. */
309 gcc_assert (XEXP (label_note, 0) == LABEL_REF_LABEL (SET_SRC (label_set)));
311 mark_jump_label_1 (label_set, jump_insn, false, true);
313 gcc_assert (JUMP_LABEL (jump_insn) == XEXP (label_note, 0));
318 /* Mark the label each jump jumps to.
319 Combine consecutive labels, and count uses of labels. */
322 mark_all_labels (rtx_insn *f)
326 if (current_ir_type () == IR_RTL_CFGLAYOUT)
329 FOR_EACH_BB_FN (bb, cfun)
331 /* In cfglayout mode, we don't bother with trivial next-insn
332 propagation of LABEL_REFs into JUMP_LABEL. This will be
333 handled by other optimizers using better algorithms. */
334 FOR_BB_INSNS (bb, insn)
336 gcc_assert (! insn->deleted ());
337 if (NONDEBUG_INSN_P (insn))
338 mark_jump_label (PATTERN (insn), insn, 0);
341 /* In cfglayout mode, there may be non-insns between the
342 basic blocks. If those non-insns represent tablejump data,
343 they contain label references that we must record. */
344 for (insn = BB_HEADER (bb); insn; insn = NEXT_INSN (insn))
345 if (JUMP_TABLE_DATA_P (insn))
346 mark_jump_label (PATTERN (insn), insn, 0);
347 for (insn = BB_FOOTER (bb); insn; insn = NEXT_INSN (insn))
348 if (JUMP_TABLE_DATA_P (insn))
349 mark_jump_label (PATTERN (insn), insn, 0);
354 rtx_insn *prev_nonjump_insn = NULL;
355 for (insn = f; insn; insn = NEXT_INSN (insn))
357 if (insn->deleted ())
359 else if (LABEL_P (insn))
360 prev_nonjump_insn = NULL;
361 else if (JUMP_TABLE_DATA_P (insn))
362 mark_jump_label (PATTERN (insn), insn, 0);
363 else if (NONDEBUG_INSN_P (insn))
365 mark_jump_label (PATTERN (insn), insn, 0);
368 if (JUMP_LABEL (insn) == NULL && prev_nonjump_insn != NULL)
369 maybe_propagate_label_ref (insn, prev_nonjump_insn);
372 prev_nonjump_insn = insn;
378 /* Given a comparison (CODE ARG0 ARG1), inside an insn, INSN, return a code
379 of reversed comparison if it is possible to do so. Otherwise return UNKNOWN.
380 UNKNOWN may be returned in case we are having CC_MODE compare and we don't
381 know whether it's source is floating point or integer comparison. Machine
382 description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
383 to help this function avoid overhead in these cases. */
385 reversed_comparison_code_parts (enum rtx_code code, const_rtx arg0,
386 const_rtx arg1, const_rtx insn)
390 /* If this is not actually a comparison, we can't reverse it. */
391 if (GET_RTX_CLASS (code) != RTX_COMPARE
392 && GET_RTX_CLASS (code) != RTX_COMM_COMPARE)
395 mode = GET_MODE (arg0);
396 if (mode == VOIDmode)
397 mode = GET_MODE (arg1);
399 /* First see if machine description supplies us way to reverse the
400 comparison. Give it priority over everything else to allow
401 machine description to do tricks. */
402 if (GET_MODE_CLASS (mode) == MODE_CC
403 && REVERSIBLE_CC_MODE (mode))
405 #ifdef REVERSE_CONDITION
406 return REVERSE_CONDITION (code, mode);
408 return reverse_condition (code);
412 /* Try a few special cases based on the comparison code. */
421 /* It is always safe to reverse EQ and NE, even for the floating
422 point. Similarly the unsigned comparisons are never used for
423 floating point so we can reverse them in the default way. */
424 return reverse_condition (code);
429 /* In case we already see unordered comparison, we can be sure to
430 be dealing with floating point so we don't need any more tests. */
431 return reverse_condition_maybe_unordered (code);
436 /* We don't have safe way to reverse these yet. */
442 if (GET_MODE_CLASS (mode) == MODE_CC || CC0_P (arg0))
445 /* Try to search for the comparison to determine the real mode.
446 This code is expensive, but with sane machine description it
447 will be never used, since REVERSIBLE_CC_MODE will return true
452 /* These CONST_CAST's are okay because prev_nonnote_insn just
453 returns its argument and we assign it to a const_rtx
455 for (prev = prev_nonnote_insn (CONST_CAST_RTX (insn));
456 prev != 0 && !LABEL_P (prev);
457 prev = prev_nonnote_insn (CONST_CAST_RTX (prev)))
459 const_rtx set = set_of (arg0, prev);
460 if (set && GET_CODE (set) == SET
461 && rtx_equal_p (SET_DEST (set), arg0))
463 rtx src = SET_SRC (set);
465 if (GET_CODE (src) == COMPARE)
467 rtx comparison = src;
468 arg0 = XEXP (src, 0);
469 mode = GET_MODE (arg0);
470 if (mode == VOIDmode)
471 mode = GET_MODE (XEXP (comparison, 1));
474 /* We can get past reg-reg moves. This may be useful for model
475 of i387 comparisons that first move flag registers around. */
482 /* If register is clobbered in some ununderstandable way,
489 /* Test for an integer condition, or a floating-point comparison
490 in which NaNs can be ignored. */
491 if (CONST_INT_P (arg0)
492 || (GET_MODE (arg0) != VOIDmode
493 && GET_MODE_CLASS (mode) != MODE_CC
494 && !HONOR_NANS (mode)))
495 return reverse_condition (code);
500 /* A wrapper around the previous function to take COMPARISON as rtx
501 expression. This simplifies many callers. */
503 reversed_comparison_code (const_rtx comparison, const_rtx insn)
505 if (!COMPARISON_P (comparison))
507 return reversed_comparison_code_parts (GET_CODE (comparison),
508 XEXP (comparison, 0),
509 XEXP (comparison, 1), insn);
512 /* Return comparison with reversed code of EXP.
513 Return NULL_RTX in case we fail to do the reversal. */
515 reversed_comparison (const_rtx exp, machine_mode mode)
517 enum rtx_code reversed_code = reversed_comparison_code (exp, NULL_RTX);
518 if (reversed_code == UNKNOWN)
521 return simplify_gen_relational (reversed_code, mode, VOIDmode,
522 XEXP (exp, 0), XEXP (exp, 1));
526 /* Given an rtx-code for a comparison, return the code for the negated
527 comparison. If no such code exists, return UNKNOWN.
529 WATCH OUT! reverse_condition is not safe to use on a jump that might
530 be acting on the results of an IEEE floating point comparison, because
531 of the special treatment of non-signaling nans in comparisons.
532 Use reversed_comparison_code instead. */
535 reverse_condition (enum rtx_code code)
577 /* Similar, but we're allowed to generate unordered comparisons, which
578 makes it safe for IEEE floating-point. Of course, we have to recognize
579 that the target will support them too... */
582 reverse_condition_maybe_unordered (enum rtx_code code)
620 /* Similar, but return the code when two operands of a comparison are swapped.
621 This IS safe for IEEE floating-point. */
624 swap_condition (enum rtx_code code)
666 /* Given a comparison CODE, return the corresponding unsigned comparison.
667 If CODE is an equality comparison or already an unsigned comparison,
671 unsigned_condition (enum rtx_code code)
697 /* Similarly, return the signed version of a comparison. */
700 signed_condition (enum rtx_code code)
726 /* Return nonzero if CODE1 is more strict than CODE2, i.e., if the
727 truth of CODE1 implies the truth of CODE2. */
730 comparison_dominates_p (enum rtx_code code1, enum rtx_code code2)
732 /* UNKNOWN comparison codes can happen as a result of trying to revert
734 They can't match anything, so we have to reject them here. */
735 if (code1 == UNKNOWN || code2 == UNKNOWN)
744 if (code2 == UNLE || code2 == UNGE)
749 if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU
755 if (code2 == UNLE || code2 == NE)
760 if (code2 == LE || code2 == NE || code2 == ORDERED || code2 == LTGT)
765 if (code2 == UNGE || code2 == NE)
770 if (code2 == GE || code2 == NE || code2 == ORDERED || code2 == LTGT)
776 if (code2 == ORDERED)
781 if (code2 == NE || code2 == ORDERED)
786 if (code2 == LEU || code2 == NE)
791 if (code2 == GEU || code2 == NE)
796 if (code2 == NE || code2 == UNEQ || code2 == UNLE || code2 == UNLT
797 || code2 == UNGE || code2 == UNGT)
808 /* Return 1 if INSN is an unconditional jump and nothing else. */
811 simplejump_p (const rtx_insn *insn)
813 return (JUMP_P (insn)
814 && GET_CODE (PATTERN (insn)) == SET
815 && GET_CODE (SET_DEST (PATTERN (insn))) == PC
816 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
819 /* Return nonzero if INSN is a (possibly) conditional jump
822 Use of this function is deprecated, since we need to support combined
823 branch and compare insns. Use any_condjump_p instead whenever possible. */
826 condjump_p (const rtx_insn *insn)
828 const_rtx x = PATTERN (insn);
830 if (GET_CODE (x) != SET
831 || GET_CODE (SET_DEST (x)) != PC)
835 if (GET_CODE (x) == LABEL_REF)
838 return (GET_CODE (x) == IF_THEN_ELSE
839 && ((GET_CODE (XEXP (x, 2)) == PC
840 && (GET_CODE (XEXP (x, 1)) == LABEL_REF
841 || ANY_RETURN_P (XEXP (x, 1))))
842 || (GET_CODE (XEXP (x, 1)) == PC
843 && (GET_CODE (XEXP (x, 2)) == LABEL_REF
844 || ANY_RETURN_P (XEXP (x, 2))))));
847 /* Return nonzero if INSN is a (possibly) conditional jump inside a
850 Use this function is deprecated, since we need to support combined
851 branch and compare insns. Use any_condjump_p instead whenever possible. */
854 condjump_in_parallel_p (const rtx_insn *insn)
856 const_rtx x = PATTERN (insn);
858 if (GET_CODE (x) != PARALLEL)
861 x = XVECEXP (x, 0, 0);
863 if (GET_CODE (x) != SET)
865 if (GET_CODE (SET_DEST (x)) != PC)
867 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
869 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
871 if (XEXP (SET_SRC (x), 2) == pc_rtx
872 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
873 || ANY_RETURN_P (XEXP (SET_SRC (x), 1))))
875 if (XEXP (SET_SRC (x), 1) == pc_rtx
876 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
877 || ANY_RETURN_P (XEXP (SET_SRC (x), 2))))
882 /* Return set of PC, otherwise NULL. */
885 pc_set (const rtx_insn *insn)
890 pat = PATTERN (insn);
892 /* The set is allowed to appear either as the insn pattern or
893 the first set in a PARALLEL. */
894 if (GET_CODE (pat) == PARALLEL)
895 pat = XVECEXP (pat, 0, 0);
896 if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == PC)
902 /* Return true when insn is an unconditional direct jump,
903 possibly bundled inside a PARALLEL. */
906 any_uncondjump_p (const rtx_insn *insn)
908 const_rtx x = pc_set (insn);
911 if (GET_CODE (SET_SRC (x)) != LABEL_REF)
913 if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
918 /* Return true when insn is a conditional jump. This function works for
919 instructions containing PC sets in PARALLELs. The instruction may have
920 various other effects so before removing the jump you must verify
923 Note that unlike condjump_p it returns false for unconditional jumps. */
926 any_condjump_p (const rtx_insn *insn)
928 const_rtx x = pc_set (insn);
933 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
936 a = GET_CODE (XEXP (SET_SRC (x), 1));
937 b = GET_CODE (XEXP (SET_SRC (x), 2));
939 return ((b == PC && (a == LABEL_REF || a == RETURN || a == SIMPLE_RETURN))
941 && (b == LABEL_REF || b == RETURN || b == SIMPLE_RETURN)));
944 /* Return the label of a conditional jump. */
947 condjump_label (const rtx_insn *insn)
949 rtx x = pc_set (insn);
954 if (GET_CODE (x) == LABEL_REF)
956 if (GET_CODE (x) != IF_THEN_ELSE)
958 if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
960 if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
965 /* Return TRUE if INSN is a return jump. */
968 returnjump_p (const rtx_insn *insn)
972 subrtx_iterator::array_type array;
973 FOR_EACH_SUBRTX (iter, array, PATTERN (insn), NONCONST)
976 switch (GET_CODE (x))
984 if (SET_IS_RETURN_P (x))
996 /* Return true if INSN is a (possibly conditional) return insn. */
999 eh_returnjump_p (rtx_insn *insn)
1003 subrtx_iterator::array_type array;
1004 FOR_EACH_SUBRTX (iter, array, PATTERN (insn), NONCONST)
1005 if (GET_CODE (*iter) == EH_RETURN)
1011 /* Return true if INSN is a jump that only transfers control and
1015 onlyjump_p (const rtx_insn *insn)
1022 set = single_set (insn);
1025 if (GET_CODE (SET_DEST (set)) != PC)
1027 if (side_effects_p (SET_SRC (set)))
1033 /* Return true iff INSN is a jump and its JUMP_LABEL is a label, not
1034 NULL or a return. */
1036 jump_to_label_p (const rtx_insn *insn)
1038 return (JUMP_P (insn)
1039 && JUMP_LABEL (insn) != NULL && !ANY_RETURN_P (JUMP_LABEL (insn)));
1042 /* Return nonzero if X is an RTX that only sets the condition codes
1043 and has no side effects. */
1046 only_sets_cc0_p (const_rtx x)
1054 return sets_cc0_p (x) == 1 && ! side_effects_p (x);
1057 /* Return 1 if X is an RTX that does nothing but set the condition codes
1058 and CLOBBER or USE registers.
1059 Return -1 if X does explicitly set the condition codes,
1060 but also does other things. */
1063 sets_cc0_p (const_rtx x)
1071 if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
1073 if (GET_CODE (x) == PARALLEL)
1077 int other_things = 0;
1078 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1080 if (GET_CODE (XVECEXP (x, 0, i)) == SET
1081 && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
1083 else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
1086 return ! sets_cc0 ? 0 : other_things ? -1 : 1;
1091 /* Find all CODE_LABELs referred to in X, and increment their use
1092 counts. If INSN is a JUMP_INSN and there is at least one
1093 CODE_LABEL referenced in INSN as a jump target, then store the last
1094 one in JUMP_LABEL (INSN). For a tablejump, this must be the label
1095 for the ADDR_VEC. Store any other jump targets as REG_LABEL_TARGET
1096 notes. If INSN is an INSN or a CALL_INSN or non-target operands of
1097 a JUMP_INSN, and there is at least one CODE_LABEL referenced in
1098 INSN, add a REG_LABEL_OPERAND note containing that label to INSN.
1099 For returnjumps, the JUMP_LABEL will also be set as appropriate.
1101 Note that two labels separated by a loop-beginning note
1102 must be kept distinct if we have not yet done loop-optimization,
1103 because the gap between them is where loop-optimize
1104 will want to move invariant code to. CROSS_JUMP tells us
1105 that loop-optimization is done with. */
1108 mark_jump_label (rtx x, rtx_insn *insn, int in_mem)
1110 rtx asmop = extract_asm_operands (x);
1112 mark_jump_label_asm (asmop, insn);
1114 mark_jump_label_1 (x, insn, in_mem != 0,
1115 (insn != NULL && x == PATTERN (insn) && JUMP_P (insn)));
1118 /* Worker function for mark_jump_label. IN_MEM is TRUE when X occurs
1119 within a (MEM ...). IS_TARGET is TRUE when X is to be treated as a
1120 jump-target; when the JUMP_LABEL field of INSN should be set or a
1121 REG_LABEL_TARGET note should be added, not a REG_LABEL_OPERAND
1125 mark_jump_label_1 (rtx x, rtx_insn *insn, bool in_mem, bool is_target)
1127 RTX_CODE code = GET_CODE (x);
1144 gcc_assert (JUMP_LABEL (insn) == NULL || JUMP_LABEL (insn) == x);
1145 JUMP_LABEL (insn) = x;
1155 rtx_sequence *seq = as_a <rtx_sequence *> (x);
1156 for (i = 0; i < seq->len (); i++)
1157 mark_jump_label (PATTERN (seq->insn (i)),
1166 /* If this is a constant-pool reference, see if it is a label. */
1167 if (CONSTANT_POOL_ADDRESS_P (x))
1168 mark_jump_label_1 (get_pool_constant (x), insn, in_mem, is_target);
1171 /* Handle operands in the condition of an if-then-else as for a
1176 mark_jump_label_1 (XEXP (x, 0), insn, in_mem, false);
1177 mark_jump_label_1 (XEXP (x, 1), insn, in_mem, true);
1178 mark_jump_label_1 (XEXP (x, 2), insn, in_mem, true);
1183 rtx label = LABEL_REF_LABEL (x);
1185 /* Ignore remaining references to unreachable labels that
1186 have been deleted. */
1188 && NOTE_KIND (label) == NOTE_INSN_DELETED_LABEL)
1191 gcc_assert (LABEL_P (label));
1193 /* Ignore references to labels of containing functions. */
1194 if (LABEL_REF_NONLOCAL_P (x))
1197 LABEL_REF_LABEL (x) = label;
1198 if (! insn || ! insn->deleted ())
1199 ++LABEL_NUSES (label);
1204 /* Do not change a previous setting of JUMP_LABEL. If the
1205 JUMP_LABEL slot is occupied by a different label,
1206 create a note for this label. */
1207 && (JUMP_LABEL (insn) == NULL || JUMP_LABEL (insn) == label))
1208 JUMP_LABEL (insn) = label;
1212 = is_target ? REG_LABEL_TARGET : REG_LABEL_OPERAND;
1214 /* Add a REG_LABEL_OPERAND or REG_LABEL_TARGET note
1215 for LABEL unless there already is one. All uses of
1216 a label, except for the primary target of a jump,
1217 must have such a note. */
1218 if (! find_reg_note (insn, kind, label))
1219 add_reg_note (insn, kind, label);
1225 /* Do walk the labels in a vector, but not the first operand of an
1226 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
1229 if (! insn->deleted ())
1231 int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
1233 for (i = 0; i < XVECLEN (x, eltnum); i++)
1234 mark_jump_label_1 (XVECEXP (x, eltnum, i), NULL, in_mem,
1243 fmt = GET_RTX_FORMAT (code);
1245 /* The primary target of a tablejump is the label of the ADDR_VEC,
1246 which is canonically mentioned *last* in the insn. To get it
1247 marked as JUMP_LABEL, we iterate over items in reverse order. */
1248 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1251 mark_jump_label_1 (XEXP (x, i), insn, in_mem, is_target);
1252 else if (fmt[i] == 'E')
1256 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1257 mark_jump_label_1 (XVECEXP (x, i, j), insn, in_mem,
1263 /* Worker function for mark_jump_label. Handle asm insns specially.
1264 In particular, output operands need not be considered so we can
1265 avoid re-scanning the replicated asm_operand. Also, the asm_labels
1266 need to be considered targets. */
1269 mark_jump_label_asm (rtx asmop, rtx_insn *insn)
1273 for (i = ASM_OPERANDS_INPUT_LENGTH (asmop) - 1; i >= 0; --i)
1274 mark_jump_label_1 (ASM_OPERANDS_INPUT (asmop, i), insn, false, false);
1276 for (i = ASM_OPERANDS_LABEL_LENGTH (asmop) - 1; i >= 0; --i)
1277 mark_jump_label_1 (ASM_OPERANDS_LABEL (asmop, i), insn, false, true);
1280 /* Delete insn INSN from the chain of insns and update label ref counts
1281 and delete insns now unreachable.
1283 Returns the first insn after INSN that was not deleted.
1285 Usage of this instruction is deprecated. Use delete_insn instead and
1286 subsequent cfg_cleanup pass to delete unreachable code if needed. */
1289 delete_related_insns (rtx uncast_insn)
1291 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
1292 int was_code_label = (LABEL_P (insn));
1294 rtx_insn *next = NEXT_INSN (insn), *prev = PREV_INSN (insn);
1296 while (next && next->deleted ())
1297 next = NEXT_INSN (next);
1299 /* This insn is already deleted => return first following nondeleted. */
1300 if (insn->deleted ())
1305 /* If instruction is followed by a barrier,
1306 delete the barrier too. */
1308 if (next != 0 && BARRIER_P (next))
1311 /* If this is a call, then we have to remove the var tracking note
1312 for the call arguments. */
1315 || (NONJUMP_INSN_P (insn)
1316 && GET_CODE (PATTERN (insn)) == SEQUENCE
1317 && CALL_P (XVECEXP (PATTERN (insn), 0, 0))))
1321 for (p = next && next->deleted () ? NEXT_INSN (next) : next;
1324 if (NOTE_KIND (p) == NOTE_INSN_CALL_ARG_LOCATION)
1331 /* If deleting a jump, decrement the count of the label,
1332 and delete the label if it is now unused. */
1334 if (jump_to_label_p (insn))
1336 rtx lab = JUMP_LABEL (insn);
1337 rtx_jump_table_data *lab_next;
1339 if (LABEL_NUSES (lab) == 0)
1340 /* This can delete NEXT or PREV,
1341 either directly if NEXT is JUMP_LABEL (INSN),
1342 or indirectly through more levels of jumps. */
1343 delete_related_insns (lab);
1344 else if (tablejump_p (insn, NULL, &lab_next))
1346 /* If we're deleting the tablejump, delete the dispatch table.
1347 We may not be able to kill the label immediately preceding
1348 just yet, as it might be referenced in code leading up to
1350 delete_related_insns (lab_next);
1354 /* Likewise if we're deleting a dispatch table. */
1356 if (rtx_jump_table_data *table = dyn_cast <rtx_jump_table_data *> (insn))
1358 rtvec labels = table->get_labels ();
1360 int len = GET_NUM_ELEM (labels);
1362 for (i = 0; i < len; i++)
1363 if (LABEL_NUSES (XEXP (RTVEC_ELT (labels, i), 0)) == 0)
1364 delete_related_insns (XEXP (RTVEC_ELT (labels, i), 0));
1365 while (next && next->deleted ())
1366 next = NEXT_INSN (next);
1370 /* Likewise for any JUMP_P / INSN / CALL_INSN with a
1371 REG_LABEL_OPERAND or REG_LABEL_TARGET note. */
1373 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1374 if ((REG_NOTE_KIND (note) == REG_LABEL_OPERAND
1375 || REG_NOTE_KIND (note) == REG_LABEL_TARGET)
1376 /* This could also be a NOTE_INSN_DELETED_LABEL note. */
1377 && LABEL_P (XEXP (note, 0)))
1378 if (LABEL_NUSES (XEXP (note, 0)) == 0)
1379 delete_related_insns (XEXP (note, 0));
1381 while (prev && (prev->deleted () || NOTE_P (prev)))
1382 prev = PREV_INSN (prev);
1384 /* If INSN was a label and a dispatch table follows it,
1385 delete the dispatch table. The tablejump must have gone already.
1386 It isn't useful to fall through into a table. */
1389 && NEXT_INSN (insn) != 0
1390 && JUMP_TABLE_DATA_P (NEXT_INSN (insn)))
1391 next = delete_related_insns (NEXT_INSN (insn));
1393 /* If INSN was a label, delete insns following it if now unreachable. */
1395 if (was_code_label && prev && BARRIER_P (prev))
1400 code = GET_CODE (next);
1402 next = NEXT_INSN (next);
1403 /* Keep going past other deleted labels to delete what follows. */
1404 else if (code == CODE_LABEL && next->deleted ())
1405 next = NEXT_INSN (next);
1406 /* Keep the (use (insn))s created by dbr_schedule, which needs
1407 them in order to track liveness relative to a previous
1409 else if (INSN_P (next)
1410 && GET_CODE (PATTERN (next)) == USE
1411 && INSN_P (XEXP (PATTERN (next), 0)))
1412 next = NEXT_INSN (next);
1413 else if (code == BARRIER || INSN_P (next))
1414 /* Note: if this deletes a jump, it can cause more
1415 deletion of unreachable code, after a different label.
1416 As long as the value from this recursive call is correct,
1417 this invocation functions correctly. */
1418 next = delete_related_insns (next);
1424 /* I feel a little doubtful about this loop,
1425 but I see no clean and sure alternative way
1426 to find the first insn after INSN that is not now deleted.
1427 I hope this works. */
1428 while (next && next->deleted ())
1429 next = NEXT_INSN (next);
1433 /* Delete a range of insns from FROM to TO, inclusive.
1434 This is for the sake of peephole optimization, so assume
1435 that whatever these insns do will still be done by a new
1436 peephole insn that will replace them. */
1439 delete_for_peephole (rtx_insn *from, rtx_insn *to)
1441 rtx_insn *insn = from;
1445 rtx_insn *next = NEXT_INSN (insn);
1446 rtx_insn *prev = PREV_INSN (insn);
1450 insn->set_deleted();
1452 /* Patch this insn out of the chain. */
1453 /* We don't do this all at once, because we
1454 must preserve all NOTEs. */
1456 SET_NEXT_INSN (prev) = next;
1459 SET_PREV_INSN (next) = prev;
1467 /* Note that if TO is an unconditional jump
1468 we *do not* delete the BARRIER that follows,
1469 since the peephole that replaces this sequence
1470 is also an unconditional jump in that case. */
1473 /* A helper function for redirect_exp_1; examines its input X and returns
1474 either a LABEL_REF around a label, or a RETURN if X was NULL. */
1476 redirect_target (rtx x)
1480 if (!ANY_RETURN_P (x))
1481 return gen_rtx_LABEL_REF (Pmode, x);
1485 /* Throughout LOC, redirect OLABEL to NLABEL. Treat null OLABEL or
1486 NLABEL as a return. Accrue modifications into the change group. */
1489 redirect_exp_1 (rtx *loc, rtx olabel, rtx nlabel, rtx insn)
1492 RTX_CODE code = GET_CODE (x);
1496 if ((code == LABEL_REF && LABEL_REF_LABEL (x) == olabel)
1499 x = redirect_target (nlabel);
1500 if (GET_CODE (x) == LABEL_REF && loc == &PATTERN (insn))
1501 x = gen_rtx_SET (pc_rtx, x);
1502 validate_change (insn, loc, x, 1);
1506 if (code == SET && SET_DEST (x) == pc_rtx
1507 && ANY_RETURN_P (nlabel)
1508 && GET_CODE (SET_SRC (x)) == LABEL_REF
1509 && LABEL_REF_LABEL (SET_SRC (x)) == olabel)
1511 validate_change (insn, loc, nlabel, 1);
1515 if (code == IF_THEN_ELSE)
1517 /* Skip the condition of an IF_THEN_ELSE. We only want to
1518 change jump destinations, not eventual label comparisons. */
1519 redirect_exp_1 (&XEXP (x, 1), olabel, nlabel, insn);
1520 redirect_exp_1 (&XEXP (x, 2), olabel, nlabel, insn);
1524 fmt = GET_RTX_FORMAT (code);
1525 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1528 redirect_exp_1 (&XEXP (x, i), olabel, nlabel, insn);
1529 else if (fmt[i] == 'E')
1532 for (j = 0; j < XVECLEN (x, i); j++)
1533 redirect_exp_1 (&XVECEXP (x, i, j), olabel, nlabel, insn);
1538 /* Make JUMP go to NLABEL instead of where it jumps now. Accrue
1539 the modifications into the change group. Return false if we did
1540 not see how to do that. */
1543 redirect_jump_1 (rtx_insn *jump, rtx nlabel)
1545 int ochanges = num_validated_changes ();
1548 gcc_assert (nlabel != NULL_RTX);
1549 asmop = extract_asm_operands (PATTERN (jump));
1554 gcc_assert (ASM_OPERANDS_LABEL_LENGTH (asmop) == 1);
1555 loc = &ASM_OPERANDS_LABEL (asmop, 0);
1557 else if (GET_CODE (PATTERN (jump)) == PARALLEL)
1558 loc = &XVECEXP (PATTERN (jump), 0, 0);
1560 loc = &PATTERN (jump);
1562 redirect_exp_1 (loc, JUMP_LABEL (jump), nlabel, jump);
1563 return num_validated_changes () > ochanges;
1566 /* Make JUMP go to NLABEL instead of where it jumps now. If the old
1567 jump target label is unused as a result, it and the code following
1570 Normally, NLABEL will be a label, but it may also be a RETURN rtx;
1571 in that case we are to turn the jump into a (possibly conditional)
1574 The return value will be 1 if the change was made, 0 if it wasn't
1575 (this can only occur when trying to produce return insns). */
1578 redirect_jump (rtx_jump_insn *jump, rtx nlabel, int delete_unused)
1580 rtx olabel = jump->jump_label ();
1584 /* If there is no label, we are asked to redirect to the EXIT block.
1585 When before the epilogue is emitted, return/simple_return cannot be
1586 created so we return 0 immediately. After the epilogue is emitted,
1587 we always expect a label, either a non-null label, or a
1588 return/simple_return RTX. */
1590 if (!epilogue_completed)
1595 if (nlabel == olabel)
1598 if (! redirect_jump_1 (jump, nlabel) || ! apply_change_group ())
1601 redirect_jump_2 (jump, olabel, nlabel, delete_unused, 0);
1605 /* Fix up JUMP_LABEL and label ref counts after OLABEL has been replaced with
1607 If DELETE_UNUSED is positive, delete related insn to OLABEL if its ref
1608 count has dropped to zero. */
1610 redirect_jump_2 (rtx_jump_insn *jump, rtx olabel, rtx nlabel, int delete_unused,
1615 gcc_assert (JUMP_LABEL (jump) == olabel);
1617 /* Negative DELETE_UNUSED used to be used to signalize behavior on
1618 moving FUNCTION_END note. Just sanity check that no user still worry
1620 gcc_assert (delete_unused >= 0);
1621 JUMP_LABEL (jump) = nlabel;
1622 if (!ANY_RETURN_P (nlabel))
1623 ++LABEL_NUSES (nlabel);
1625 /* Update labels in any REG_EQUAL note. */
1626 if ((note = find_reg_note (jump, REG_EQUAL, NULL_RTX)) != NULL_RTX)
1628 if (ANY_RETURN_P (nlabel)
1629 || (invert && !invert_exp_1 (XEXP (note, 0), jump)))
1630 remove_note (jump, note);
1633 redirect_exp_1 (&XEXP (note, 0), olabel, nlabel, jump);
1634 confirm_change_group ();
1638 /* Handle the case where we had a conditional crossing jump to a return
1639 label and are now changing it into a direct conditional return.
1640 The jump is no longer crossing in that case. */
1641 if (ANY_RETURN_P (nlabel))
1642 CROSSING_JUMP_P (jump) = 0;
1644 if (!ANY_RETURN_P (olabel)
1645 && --LABEL_NUSES (olabel) == 0 && delete_unused > 0
1646 /* Undefined labels will remain outside the insn stream. */
1647 && INSN_UID (olabel))
1648 delete_related_insns (olabel);
1650 invert_br_probabilities (jump);
1653 /* Invert the jump condition X contained in jump insn INSN. Accrue the
1654 modifications into the change group. Return nonzero for success. */
1656 invert_exp_1 (rtx x, rtx insn)
1658 RTX_CODE code = GET_CODE (x);
1660 if (code == IF_THEN_ELSE)
1662 rtx comp = XEXP (x, 0);
1664 enum rtx_code reversed_code;
1666 /* We can do this in two ways: The preferable way, which can only
1667 be done if this is not an integer comparison, is to reverse
1668 the comparison code. Otherwise, swap the THEN-part and ELSE-part
1669 of the IF_THEN_ELSE. If we can't do either, fail. */
1671 reversed_code = reversed_comparison_code (comp, insn);
1673 if (reversed_code != UNKNOWN)
1675 validate_change (insn, &XEXP (x, 0),
1676 gen_rtx_fmt_ee (reversed_code,
1677 GET_MODE (comp), XEXP (comp, 0),
1684 validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
1685 validate_change (insn, &XEXP (x, 2), tem, 1);
1692 /* Invert the condition of the jump JUMP, and make it jump to label
1693 NLABEL instead of where it jumps now. Accrue changes into the
1694 change group. Return false if we didn't see how to perform the
1695 inversion and redirection. */
1698 invert_jump_1 (rtx_jump_insn *jump, rtx nlabel)
1700 rtx x = pc_set (jump);
1704 ochanges = num_validated_changes ();
1707 ok = invert_exp_1 (SET_SRC (x), jump);
1710 if (num_validated_changes () == ochanges)
1713 /* redirect_jump_1 will fail of nlabel == olabel, and the current use is
1714 in Pmode, so checking this is not merely an optimization. */
1715 return nlabel == JUMP_LABEL (jump) || redirect_jump_1 (jump, nlabel);
1718 /* Invert the condition of the jump JUMP, and make it jump to label
1719 NLABEL instead of where it jumps now. Return true if successful. */
1722 invert_jump (rtx_jump_insn *jump, rtx nlabel, int delete_unused)
1724 rtx olabel = JUMP_LABEL (jump);
1726 if (invert_jump_1 (jump, nlabel) && apply_change_group ())
1728 redirect_jump_2 (jump, olabel, nlabel, delete_unused, 1);
1736 /* Like rtx_equal_p except that it considers two REGs as equal
1737 if they renumber to the same value and considers two commutative
1738 operations to be the same if the order of the operands has been
1742 rtx_renumbered_equal_p (const_rtx x, const_rtx y)
1745 const enum rtx_code code = GET_CODE (x);
1751 if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
1752 && (REG_P (y) || (GET_CODE (y) == SUBREG
1753 && REG_P (SUBREG_REG (y)))))
1755 int reg_x = -1, reg_y = -1;
1756 int byte_x = 0, byte_y = 0;
1757 struct subreg_info info;
1759 if (GET_MODE (x) != GET_MODE (y))
1762 /* If we haven't done any renumbering, don't
1763 make any assumptions. */
1764 if (reg_renumber == 0)
1765 return rtx_equal_p (x, y);
1769 reg_x = REGNO (SUBREG_REG (x));
1770 byte_x = SUBREG_BYTE (x);
1772 if (reg_renumber[reg_x] >= 0)
1774 subreg_get_info (reg_renumber[reg_x],
1775 GET_MODE (SUBREG_REG (x)), byte_x,
1776 GET_MODE (x), &info);
1777 if (!info.representable_p)
1779 reg_x = info.offset;
1786 if (reg_renumber[reg_x] >= 0)
1787 reg_x = reg_renumber[reg_x];
1790 if (GET_CODE (y) == SUBREG)
1792 reg_y = REGNO (SUBREG_REG (y));
1793 byte_y = SUBREG_BYTE (y);
1795 if (reg_renumber[reg_y] >= 0)
1797 subreg_get_info (reg_renumber[reg_y],
1798 GET_MODE (SUBREG_REG (y)), byte_y,
1799 GET_MODE (y), &info);
1800 if (!info.representable_p)
1802 reg_y = info.offset;
1809 if (reg_renumber[reg_y] >= 0)
1810 reg_y = reg_renumber[reg_y];
1813 return reg_x >= 0 && reg_x == reg_y && byte_x == byte_y;
1816 /* Now we have disposed of all the cases
1817 in which different rtx codes can match. */
1818 if (code != GET_CODE (y))
1831 /* We can't assume nonlocal labels have their following insns yet. */
1832 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
1833 return LABEL_REF_LABEL (x) == LABEL_REF_LABEL (y);
1835 /* Two label-refs are equivalent if they point at labels
1836 in the same position in the instruction stream. */
1837 return (next_real_insn (LABEL_REF_LABEL (x))
1838 == next_real_insn (LABEL_REF_LABEL (y)));
1841 return XSTR (x, 0) == XSTR (y, 0);
1844 /* If we didn't match EQ equality above, they aren't the same. */
1851 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
1853 if (GET_MODE (x) != GET_MODE (y))
1856 /* MEMs referring to different address space are not equivalent. */
1857 if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
1860 /* For commutative operations, the RTX match if the operand match in any
1861 order. Also handle the simple binary and unary cases without a loop. */
1862 if (targetm.commutative_p (x, UNKNOWN))
1863 return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
1864 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
1865 || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
1866 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
1867 else if (NON_COMMUTATIVE_P (x))
1868 return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
1869 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
1870 else if (UNARY_P (x))
1871 return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
1873 /* Compare the elements. If any pair of corresponding elements
1874 fail to match, return 0 for the whole things. */
1876 fmt = GET_RTX_FORMAT (code);
1877 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1883 if (XWINT (x, i) != XWINT (y, i))
1888 if (XINT (x, i) != XINT (y, i))
1890 if (((code == ASM_OPERANDS && i == 6)
1891 || (code == ASM_INPUT && i == 1)))
1898 if (XTREE (x, i) != XTREE (y, i))
1903 if (strcmp (XSTR (x, i), XSTR (y, i)))
1908 if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
1913 if (XEXP (x, i) != XEXP (y, i))
1920 if (XVECLEN (x, i) != XVECLEN (y, i))
1922 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1923 if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
1934 /* If X is a hard register or equivalent to one or a subregister of one,
1935 return the hard register number. If X is a pseudo register that was not
1936 assigned a hard register, return the pseudo register number. Otherwise,
1937 return -1. Any rtx is valid for X. */
1940 true_regnum (const_rtx x)
1944 if (REGNO (x) >= FIRST_PSEUDO_REGISTER
1945 && (lra_in_progress || reg_renumber[REGNO (x)] >= 0))
1946 return reg_renumber[REGNO (x)];
1949 if (GET_CODE (x) == SUBREG)
1951 int base = true_regnum (SUBREG_REG (x));
1953 && base < FIRST_PSEUDO_REGISTER)
1955 struct subreg_info info;
1957 subreg_get_info (lra_in_progress
1958 ? (unsigned) base : REGNO (SUBREG_REG (x)),
1959 GET_MODE (SUBREG_REG (x)),
1960 SUBREG_BYTE (x), GET_MODE (x), &info);
1962 if (info.representable_p)
1963 return base + info.offset;
1969 /* Return regno of the register REG and handle subregs too. */
1971 reg_or_subregno (const_rtx reg)
1973 if (GET_CODE (reg) == SUBREG)
1974 reg = SUBREG_REG (reg);
1975 gcc_assert (REG_P (reg));