1 /* Post-reload compare elimination.
2 Copyright (C) 2010-2022 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* There is a set of targets whose general-purpose move or addition
21 instructions clobber the flags. These targets cannot split their
22 CBRANCH/CSTORE etc patterns before reload is complete, lest reload
23 itself insert these instructions in between the flags setter and user.
24 Because these targets cannot split the compare from the use, they
25 cannot make use of the comparison elimination offered by the combine pass.
27 This is a small pass intended to provide comparison elimination similar to
28 what was available via NOTICE_UPDATE_CC for cc0 targets.
32 (0) CBRANCH/CSTORE etc have been split in pass_split_after_reload.
34 (1) All comparison patterns are represented as
36 [(set (reg:CC) (compare:CC (reg) (reg_or_immediate)))]
38 (2) All insn patterns that modify the flags are represented as
40 [(set (reg) (operation)
43 (3) If an insn of form (2) can usefully set the flags, there is
44 another pattern of the form
46 [(set (reg:CCM) (compare:CCM (operation) (immediate)))
47 (set (reg) (operation)]
49 The mode CCM will be chosen as if by SELECT_CC_MODE.
51 Note that unlike NOTICE_UPDATE_CC, we do not handle memory operands.
52 This could be handled as a future enhancement.
57 #include "coretypes.h"
64 #include "insn-config.h"
68 #include "tree-pass.h"
72 /* These structures describe a comparison and how it is used. */
74 /* The choice of maximum 3 uses comes from wanting to eliminate the two
75 duplicate compares from a three-way branch on the sign of a value.
76 This is also sufficient to eliminate the duplicate compare against the
77 high-part of a double-word comparison. */
82 /* The instruction in which the result of the compare is used. */
84 /* The location of the flags register within the use. */
86 /* The comparison code applied against the flags register. */
92 /* The comparison instruction. */
95 /* The insn prior to the comparison insn that clobbers the flags. */
96 rtx_insn *prev_clobber;
98 /* The insn prior to the comparison insn that sets in_a REG. */
99 rtx_insn *in_a_setter;
101 /* The two values being compared. These will be either REGs or
105 /* The REG_EH_REGION of the comparison. */
108 /* Information about how this comparison is used. */
109 struct comparison_use uses[MAX_CMP_USE];
111 /* The original CC_MODE for this comparison. */
112 machine_mode orig_mode;
114 /* The number of uses identified for this comparison. */
115 unsigned short n_uses;
117 /* True if not all uses of this comparison have been identified.
118 This can happen either for overflowing the array above, or if
119 the flags register is used in some unusual context. */
122 /* True if its inputs are still valid at the end of the block. */
125 /* Whether IN_A is wrapped in a NOT before being compared. */
129 static vec<comparison *> all_compares;
131 /* Return whether X is a NOT unary expression. */
136 return GET_CODE (x) == NOT;
139 /* Strip a NOT unary expression around X, if any. */
150 /* Look for a "conforming" comparison, as defined above. If valid, return
151 the rtx for the COMPARE itself. */
154 conforming_compare (rtx_insn *insn)
158 set = single_set (insn);
163 if (GET_CODE (src) != COMPARE)
166 dest = SET_DEST (set);
167 if (!REG_P (dest) || REGNO (dest) != targetm.flags_regnum)
170 if (!REG_P (strip_not (XEXP (src, 0))))
173 if (CONSTANT_P (XEXP (src, 1)) || REG_P (XEXP (src, 1)))
176 if (GET_CODE (XEXP (src, 1)) == UNSPEC)
178 for (int i = 0; i < XVECLEN (XEXP (src, 1), 0); i++)
179 if (!REG_P (XVECEXP (XEXP (src, 1), 0, i)))
187 /* Look for a pattern of the "correct" form for an insn with a flags clobber
188 for which we may be able to eliminate a compare later. We're not looking
189 to validate any inputs at this time, merely see that the basic shape is
190 correct. The term "arithmetic" may be somewhat misleading... */
193 arithmetic_flags_clobber_p (rtx_insn *insn)
197 if (!NONJUMP_INSN_P (insn))
199 pat = PATTERN (insn);
200 if (asm_noperands (pat) >= 0)
203 if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) == 2)
205 x = XVECEXP (pat, 0, 0);
206 if (GET_CODE (x) != SET)
212 x = XVECEXP (pat, 0, 1);
213 if (GET_CODE (x) == CLOBBER)
216 if (REG_P (x) && REGNO (x) == targetm.flags_regnum)
224 /* Look for uses of FLAGS in INSN. If we find one we can analyze, record
225 it in CMP; otherwise indicate that we've missed a use. */
228 find_flags_uses_in_insn (struct comparison *cmp, rtx_insn *insn)
232 /* If we've already lost track of uses, don't bother collecting more. */
233 if (cmp->missing_uses)
236 /* Find a USE of the flags register. */
237 FOR_EACH_INSN_USE (use, insn)
238 if (DF_REF_REGNO (use) == targetm.flags_regnum)
242 /* If this is an unusual use, quit. */
243 if (DF_REF_TYPE (use) != DF_REF_REG_USE)
246 /* If we've run out of slots to record uses, quit. */
247 if (cmp->n_uses == MAX_CMP_USE)
250 /* Unfortunately the location of the flags register, while present
251 in the reference structure, doesn't help. We need to find the
252 comparison code that is outer to the actual flags use. */
253 loc = DF_REF_LOC (use);
255 if (GET_CODE (x) == PARALLEL)
256 x = XVECEXP (x, 0, 0);
258 if (GET_CODE (x) == IF_THEN_ELSE)
261 && loc == &XEXP (x, 0)
262 && XEXP (x, 1) == const0_rtx)
264 /* We've found a use of the flags that we understand. */
265 struct comparison_use *cuse = &cmp->uses[cmp->n_uses++];
268 cuse->code = GET_CODE (x);
276 /* We failed to recognize this use of the flags register. */
277 cmp->missing_uses = true;
280 class find_comparison_dom_walker : public dom_walker
283 find_comparison_dom_walker (cdi_direction direction)
284 : dom_walker (direction) {}
286 edge before_dom_children (basic_block) final override;
289 /* Return true if conforming COMPARE with EH_NOTE is redundant with comparison
290 CMP and can thus be eliminated. */
293 can_eliminate_compare (rtx compare, rtx eh_note, struct comparison *cmp)
295 /* Take care that it's in the same EH region. */
296 if (cfun->can_throw_non_call_exceptions
297 && !rtx_equal_p (eh_note, cmp->eh_note))
300 /* Make sure the compare is redundant with the previous. */
301 if (!rtx_equal_p (strip_not (XEXP (compare, 0)), cmp->in_a)
302 || !rtx_equal_p (XEXP (compare, 1), cmp->in_b))
305 if (is_not (XEXP (compare, 0)) != cmp->not_in_a)
308 /* New mode must be compatible with the previous compare mode. */
309 machine_mode new_mode
310 = targetm.cc_modes_compatible (GET_MODE (compare), cmp->orig_mode);
312 if (new_mode == VOIDmode)
315 if (cmp->orig_mode != new_mode)
317 /* Generate new comparison for substitution. */
318 rtx flags = gen_rtx_REG (new_mode, targetm.flags_regnum);
319 rtx x = gen_rtx_COMPARE (new_mode, cmp->in_a, cmp->in_b);
320 x = gen_rtx_SET (flags, x);
322 if (!validate_change (cmp->insn, &PATTERN (cmp->insn), x, false))
325 cmp->orig_mode = new_mode;
331 /* Identify comparison instructions within BB. If the flags from the last
332 compare in the BB is live at the end of the block, install the compare
333 in BB->AUX. Called via dom_walker.walk (). */
336 find_comparison_dom_walker::before_dom_children (basic_block bb)
338 rtx_insn *insn, *next;
339 bool need_purge = false;
340 rtx_insn *last_setter[FIRST_PSEUDO_REGISTER];
342 /* The last comparison that was made. Will be reset to NULL
343 once the flags are clobbered. */
344 struct comparison *last_cmp = NULL;
346 /* True iff the last comparison has not been clobbered, nor
347 have its inputs. Used to eliminate duplicate compares. */
348 bool last_cmp_valid = false;
350 /* The last insn that clobbered the flags, if that insn is of
351 a form that may be valid for eliminating a following compare.
352 To be reset to NULL once the flags are set otherwise. */
353 rtx_insn *last_clobber = NULL;
355 /* Propagate the last live comparison throughout the extended basic block. */
356 if (single_pred_p (bb))
358 last_cmp = (struct comparison *) single_pred (bb)->aux;
360 last_cmp_valid = last_cmp->inputs_valid;
363 memset (last_setter, 0, sizeof (last_setter));
364 for (insn = BB_HEAD (bb); insn; insn = next)
368 next = (insn == BB_END (bb) ? NULL : NEXT_INSN (insn));
369 if (!NONDEBUG_INSN_P (insn))
372 src = conforming_compare (insn);
377 if (cfun->can_throw_non_call_exceptions)
378 eh_note = find_reg_note (insn, REG_EH_REGION, NULL);
380 if (last_cmp_valid && can_eliminate_compare (src, eh_note, last_cmp))
388 last_cmp = XCNEW (struct comparison);
389 last_cmp->insn = insn;
390 last_cmp->prev_clobber = last_clobber;
391 last_cmp->in_a = strip_not (XEXP (src, 0));
392 last_cmp->in_b = XEXP (src, 1);
393 last_cmp->not_in_a = is_not (XEXP (src, 0));
394 last_cmp->eh_note = eh_note;
395 last_cmp->orig_mode = GET_MODE (src);
396 if (last_cmp->in_b == const0_rtx
397 && last_setter[REGNO (last_cmp->in_a)])
399 rtx set = single_set (last_setter[REGNO (last_cmp->in_a)]);
400 if (set && rtx_equal_p (SET_DEST (set), last_cmp->in_a))
401 last_cmp->in_a_setter = last_setter[REGNO (last_cmp->in_a)];
403 all_compares.safe_push (last_cmp);
405 /* It's unusual, but be prepared for comparison patterns that
406 also clobber an input, or perhaps a scratch. */
408 last_cmp_valid = true;
413 /* Notice if this instruction uses the flags register. */
415 find_flags_uses_in_insn (last_cmp, insn);
417 /* Notice if this instruction kills the flags register. */
419 FOR_EACH_INSN_DEF (def, insn)
420 if (DF_REF_REGNO (def) == targetm.flags_regnum)
422 /* See if this insn could be the "clobber" that eliminates
423 a future comparison. */
424 last_clobber = (arithmetic_flags_clobber_p (insn)
427 /* In either case, the previous compare is no longer valid. */
429 last_cmp_valid = false;
434 /* Notice if any of the inputs to the comparison have changed
435 and remember last insn that sets each register. */
437 FOR_EACH_INSN_DEF (def, insn)
440 && (DF_REF_REGNO (def) == REGNO (last_cmp->in_a)
441 || (REG_P (last_cmp->in_b)
442 && DF_REF_REGNO (def) == REGNO (last_cmp->in_b))))
443 last_cmp_valid = false;
444 last_setter[DF_REF_REGNO (def)] = insn;
448 /* Remember the live comparison for subsequent members of
449 the extended basic block. */
453 last_cmp->inputs_valid = last_cmp_valid;
455 /* Look to see if the flags register is live outgoing here, and
456 incoming to any successor not part of the extended basic block. */
457 if (bitmap_bit_p (df_get_live_out (bb), targetm.flags_regnum))
462 FOR_EACH_EDGE (e, ei, bb->succs)
464 basic_block dest = e->dest;
465 if (bitmap_bit_p (df_get_live_in (bb), targetm.flags_regnum)
466 && !single_pred_p (dest))
468 last_cmp->missing_uses = true;
475 /* If we deleted a compare with a REG_EH_REGION note, we may need to
478 purge_dead_edges (bb);
483 /* Find all comparisons in the function. */
486 find_comparisons (void)
488 calculate_dominance_info (CDI_DOMINATORS);
490 find_comparison_dom_walker (CDI_DOMINATORS)
491 .walk (cfun->cfg->x_entry_block_ptr);
493 clear_aux_for_blocks ();
494 free_dominance_info (CDI_DOMINATORS);
497 /* Select an alternate CC_MODE for a comparison insn comparing A and B.
498 Note that inputs are almost certainly different than the IN_A and IN_B
499 stored in CMP -- we're called while attempting to eliminate the compare
500 after all. Return the new FLAGS rtx if successful, else return NULL.
501 Note that this function may start a change group. */
504 maybe_select_cc_mode (struct comparison *cmp, rtx a ATTRIBUTE_UNUSED,
505 rtx b ATTRIBUTE_UNUSED)
507 machine_mode sel_mode;
508 const int n = cmp->n_uses;
511 #ifndef SELECT_CC_MODE
512 /* Minimize code differences when this target macro is undefined. */
514 #define SELECT_CC_MODE(A,B,C) (gcc_unreachable (), VOIDmode)
517 /* If we don't have access to all of the uses, we can't validate. */
518 if (cmp->missing_uses || n == 0)
521 /* Find a new mode that works for all of the uses. Special case the
522 common case of exactly one use. */
525 sel_mode = SELECT_CC_MODE (cmp->uses[0].code, a, b);
526 if (sel_mode != cmp->orig_mode)
528 flags = gen_rtx_REG (sel_mode, targetm.flags_regnum);
529 validate_change (cmp->uses[0].insn, cmp->uses[0].loc, flags, true);
536 sel_mode = SELECT_CC_MODE (cmp->uses[0].code, a, b);
537 for (i = 1; i < n; ++i)
539 machine_mode new_mode = SELECT_CC_MODE (cmp->uses[i].code, a, b);
540 if (new_mode != sel_mode)
542 sel_mode = targetm.cc_modes_compatible (sel_mode, new_mode);
543 if (sel_mode == VOIDmode)
548 if (sel_mode != cmp->orig_mode)
550 flags = gen_rtx_REG (sel_mode, targetm.flags_regnum);
551 for (i = 0; i < n; ++i)
552 validate_change (cmp->uses[i].insn, cmp->uses[i].loc, flags, true);
559 /* Return a register RTX holding the same value at START as REG at END, or
560 NULL_RTX if there is none. */
563 equivalent_reg_at_start (rtx reg, rtx_insn *end, rtx_insn *start)
565 machine_mode orig_mode = GET_MODE (reg);
566 rtx_insn *bb_head = BB_HEAD (BLOCK_FOR_INSN (end));
568 for (rtx_insn *insn = PREV_INSN (end);
570 insn = PREV_INSN (insn))
572 const int abnormal_flags
573 = (DF_REF_CONDITIONAL | DF_REF_PARTIAL | DF_REF_MAY_CLOBBER
574 | DF_REF_MUST_CLOBBER | DF_REF_SIGN_EXTRACT
575 | DF_REF_ZERO_EXTRACT | DF_REF_STRICT_LOW_PART
576 | DF_REF_PRE_POST_MODIFY);
579 /* Note that the BB_HEAD is always either a note or a label, but in
580 any case it means that REG is defined outside the block. */
583 if (NOTE_P (insn) || DEBUG_INSN_P (insn))
586 /* Find a possible def of REG in INSN. */
587 FOR_EACH_INSN_DEF (def, insn)
588 if (DF_REF_REGNO (def) == REGNO (reg))
591 /* No definitions of REG; continue searching. */
595 /* Bail if this is not a totally normal set of REG. */
596 if (DF_REF_IS_ARTIFICIAL (def))
598 if (DF_REF_FLAGS (def) & abnormal_flags)
601 /* We've found an insn between the compare and the clobber that sets
602 REG. Given that pass_cprop_hardreg has not yet run, we still find
603 situations in which we can usefully look through a copy insn. */
604 rtx x = single_set (insn);
612 if (GET_MODE (reg) != orig_mode)
618 /* Return true if it is okay to merge the comparison CMP_INSN with
619 the instruction ARITH_INSN. Both instructions are assumed to be in the
620 same basic block with ARITH_INSN appearing before CMP_INSN. This checks
621 that there are no uses or defs of the condition flags or control flow
622 changes between the two instructions. */
625 can_merge_compare_into_arith (rtx_insn *cmp_insn, rtx_insn *arith_insn)
627 for (rtx_insn *insn = PREV_INSN (cmp_insn);
628 insn && insn != arith_insn;
629 insn = PREV_INSN (insn))
631 if (!NONDEBUG_INSN_P (insn))
633 /* Bail if there are jumps or calls in between. */
634 if (!NONJUMP_INSN_P (insn))
637 /* Bail on old-style asm statements because they lack
638 data flow information. */
639 if (GET_CODE (PATTERN (insn)) == ASM_INPUT)
643 /* Find a USE of the flags register. */
644 FOR_EACH_INSN_USE (ref, insn)
645 if (DF_REF_REGNO (ref) == targetm.flags_regnum)
648 /* Find a DEF of the flags register. */
649 FOR_EACH_INSN_DEF (ref, insn)
650 if (DF_REF_REGNO (ref) == targetm.flags_regnum)
656 /* Given two SET expressions, SET_A and SET_B determine whether they form
657 a recognizable pattern when emitted in parallel. Return that parallel
658 if so. Otherwise return NULL. */
661 try_validate_parallel (rtx set_a, rtx set_b)
663 rtx par = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set_a, set_b));
664 rtx_insn *insn = make_insn_raw (par);
666 if (insn_invalid_p (insn, false))
668 crtl->emit.x_cur_insn_uid--;
672 SET_PREV_INSN (insn) = NULL_RTX;
673 SET_NEXT_INSN (insn) = NULL_RTX;
674 INSN_LOCATION (insn) = 0;
678 /* For a comparison instruction described by CMP check if it compares a
679 register with zero i.e. it is of the form CC := CMP R1, 0.
680 If it is, find the instruction defining R1 (say I1) and try to create a
681 PARALLEL consisting of I1 and the comparison, representing a flag-setting
682 arithmetic instruction. Example:
684 <instructions that don't read the condition register>
686 I2 can be merged with I1 into:
687 I1: { CC := CMP (R2 + R3) 0 ; R1 := R2 + R3 }
688 This catches cases where R1 is used between I1 and I2 and therefore
689 combine and other RTL optimisations will not try to propagate it into
690 I2. Return true if we succeeded in merging CMP. */
693 try_merge_compare (struct comparison *cmp)
695 rtx_insn *cmp_insn = cmp->insn;
697 if (cmp->in_b != const0_rtx || cmp->in_a_setter == NULL)
699 rtx in_a = cmp->in_a;
702 FOR_EACH_INSN_USE (use, cmp_insn)
703 if (DF_REF_REGNO (use) == REGNO (in_a))
708 rtx_insn *def_insn = cmp->in_a_setter;
709 rtx set = single_set (def_insn);
713 if (!can_merge_compare_into_arith (cmp_insn, def_insn))
716 rtx src = SET_SRC (set);
718 /* If the source uses addressing modes with side effects, we can't
719 do the merge because we'd end up with a PARALLEL that has two
720 instances of that side effect in it. */
721 if (side_effects_p (src))
724 rtx flags = maybe_select_cc_mode (cmp, src, CONST0_RTX (GET_MODE (src)));
727 /* We may already have a change group going through maybe_select_cc_mode.
728 Discard it properly. */
734 = gen_rtx_SET (flags, gen_rtx_COMPARE (GET_MODE (flags),
736 CONST0_RTX (GET_MODE (src))));
737 rtx arith_set = copy_rtx (PATTERN (def_insn));
738 rtx par = try_validate_parallel (flag_set, arith_set);
741 /* We may already have a change group going through maybe_select_cc_mode.
742 Discard it properly. */
746 if (!apply_change_group ())
748 emit_insn_after (par, def_insn);
749 delete_insn (def_insn);
750 delete_insn (cmp->insn);
754 /* Attempt to replace a comparison with a prior arithmetic insn that can
755 compute the same flags value as the comparison itself. Return true if
756 successful, having made all rtl modifications necessary. */
759 try_eliminate_compare (struct comparison *cmp)
761 rtx flags, in_a, in_b, cmp_a, cmp_b;
763 if (try_merge_compare (cmp))
766 /* We must have found an interesting "clobber" preceding the compare. */
767 if (cmp->prev_clobber == NULL)
770 /* Verify that IN_A is not clobbered in between CMP and PREV_CLOBBER.
771 Given that this target requires this pass, we can assume that most
772 insns do clobber the flags, and so the distance between the compare
773 and the clobber is likely to be small. */
774 /* ??? This is one point at which one could argue that DF_REF_CHAIN would
775 be useful, but it is thought to be too heavy-weight a solution here. */
776 in_a = equivalent_reg_at_start (cmp->in_a, cmp->insn, cmp->prev_clobber);
780 /* Likewise for IN_B if need be. */
781 if (CONSTANT_P (cmp->in_b))
783 else if (REG_P (cmp->in_b))
785 in_b = equivalent_reg_at_start (cmp->in_b, cmp->insn, cmp->prev_clobber);
789 else if (GET_CODE (cmp->in_b) == UNSPEC)
791 const int len = XVECLEN (cmp->in_b, 0);
792 rtvec v = rtvec_alloc (len);
793 for (int i = 0; i < len; i++)
795 rtx r = equivalent_reg_at_start (XVECEXP (cmp->in_b, 0, i),
796 cmp->insn, cmp->prev_clobber);
799 RTVEC_ELT (v, i) = r;
801 in_b = gen_rtx_UNSPEC (GET_MODE (cmp->in_b), v, XINT (cmp->in_b, 1));
806 /* We've reached PREV_CLOBBER without finding a modification of IN_A.
807 Validate that PREV_CLOBBER itself does in fact refer to IN_A. Do
808 recall that we've already validated the shape of PREV_CLOBBER. */
809 rtx_insn *insn = cmp->prev_clobber;
811 rtx x = XVECEXP (PATTERN (insn), 0, 0);
812 if (rtx_equal_p (SET_DEST (x), in_a))
815 /* Also check operations with implicit extensions, e.g.:
817 (zero_extend:DI (plus:SI (reg:SI) (reg:SI))))
819 (compare:CCZ (plus:SI (reg:SI) (reg:SI))
821 else if (REG_P (SET_DEST (x))
823 && REGNO (SET_DEST (x)) == REGNO (in_a)
824 && (GET_CODE (SET_SRC (x)) == ZERO_EXTEND
825 || GET_CODE (SET_SRC (x)) == SIGN_EXTEND)
826 && GET_MODE (XEXP (SET_SRC (x), 0)) == GET_MODE (in_a))
827 cmp_a = XEXP (SET_SRC (x), 0);
829 /* Also check fully redundant comparisons, e.g.:
831 (minus:SI (reg:SI) (reg:SI))))
833 (compare:CC (reg:SI) (reg:SI)))] */
834 else if (REG_P (in_b)
835 && GET_CODE (SET_SRC (x)) == MINUS
836 && rtx_equal_p (XEXP (SET_SRC (x), 0), in_a)
837 && rtx_equal_p (XEXP (SET_SRC (x), 1), in_b))
843 /* If the source uses addressing modes with side effects, we can't
844 do the merge because we'd end up with a PARALLEL that has two
845 instances of that side effect in it. */
846 if (side_effects_p (cmp_a))
851 else if (rtx_equal_p (SET_DEST (x), in_b))
855 if (side_effects_p (cmp_b))
858 /* Determine if we ought to use a different CC_MODE here. */
859 flags = maybe_select_cc_mode (cmp, cmp_a, cmp_b);
861 flags = gen_rtx_REG (cmp->orig_mode, targetm.flags_regnum);
863 /* Generate a new comparison for installation in the setter. */
864 rtx y = cmp->not_in_a
865 ? gen_rtx_NOT (GET_MODE (cmp_a), copy_rtx (cmp_a))
867 y = gen_rtx_COMPARE (GET_MODE (flags), y, copy_rtx (cmp_b));
868 y = gen_rtx_SET (flags, y);
870 /* Canonicalize instruction to:
871 [(set (reg:CCM) (compare:CCM (operation) (immediate)))
872 (set (reg) (operation)] */
874 rtvec v = rtvec_alloc (2);
875 RTVEC_ELT (v, 0) = y;
876 RTVEC_ELT (v, 1) = x;
878 rtx pat = gen_rtx_PARALLEL (VOIDmode, v);
880 /* Succeed if the new instruction is valid. Note that we may have started
881 a change group within maybe_select_cc_mode, therefore we must continue. */
882 validate_change (insn, &PATTERN (insn), pat, true);
884 if (!apply_change_group ())
887 /* Success. Delete the compare insn... */
888 delete_insn (cmp->insn);
890 /* ... and any notes that are now invalid due to multiple sets. */
891 x = find_regno_note (insn, REG_UNUSED, targetm.flags_regnum);
893 remove_note (insn, x);
894 x = find_reg_note (insn, REG_EQUAL, NULL);
896 remove_note (insn, x);
897 x = find_reg_note (insn, REG_EQUIV, NULL);
899 remove_note (insn, x);
904 /* Main entry point to the pass. */
907 execute_compare_elim_after_reload (void)
909 df_set_flags (DF_LR_RUN_DCE);
912 gcc_checking_assert (!all_compares.exists ());
914 /* Locate all comparisons and their uses, and eliminate duplicates. */
916 if (all_compares.exists ())
918 struct comparison *cmp;
921 /* Eliminate comparisons that are redundant with flags computation. */
922 FOR_EACH_VEC_ELT (all_compares, i, cmp)
924 try_eliminate_compare (cmp);
928 all_compares.release ();
936 const pass_data pass_data_compare_elim_after_reload =
939 "cmpelim", /* name */
940 OPTGROUP_NONE, /* optinfo_flags */
942 0, /* properties_required */
943 0, /* properties_provided */
944 0, /* properties_destroyed */
945 0, /* todo_flags_start */
946 ( TODO_df_finish | TODO_df_verify ), /* todo_flags_finish */
949 class pass_compare_elim_after_reload : public rtl_opt_pass
952 pass_compare_elim_after_reload (gcc::context *ctxt)
953 : rtl_opt_pass (pass_data_compare_elim_after_reload, ctxt)
956 /* opt_pass methods: */
957 bool gate (function *) final override
959 /* Setting this target hook value is how a backend indicates the need. */
960 if (targetm.flags_regnum == INVALID_REGNUM)
962 return flag_compare_elim_after_reload;
965 unsigned int execute (function *) final override
967 return execute_compare_elim_after_reload ();
970 }; // class pass_compare_elim_after_reload
975 make_pass_compare_elim_after_reload (gcc::context *ctxt)
977 return new pass_compare_elim_after_reload (ctxt);