1 /* If-conversion support.
2 Copyright (C) 2000 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
28 #include "insn-config.h"
30 #include "hard-reg-set.h"
31 #include "basic-block.h"
39 #ifndef HAVE_conditional_execution
40 #define HAVE_conditional_execution 0
42 #ifndef HAVE_conditional_move
43 #define HAVE_conditional_move 0
52 #ifndef MAX_CONDITIONAL_EXECUTE
53 #define MAX_CONDITIONAL_EXECUTE (BRANCH_COST + 1)
56 #define NULL_EDGE ((struct edge_def *)NULL)
57 #define NULL_BLOCK ((struct basic_block_def *)NULL)
59 /* # of IF-THEN or IF-THEN-ELSE blocks we looked at */
60 static int num_possible_if_blocks;
62 /* # of IF-THEN or IF-THEN-ELSE blocks were converted to conditional
64 static int num_updated_if_blocks;
66 /* # of basic blocks that were removed. */
67 static int num_removed_blocks;
69 /* True if life data ok at present. */
70 static bool life_data_ok;
72 /* The post-dominator relation on the original block numbers. */
73 static sbitmap *post_dominators;
75 /* Forward references. */
76 static int count_bb_insns PARAMS ((basic_block));
77 static rtx first_active_insn PARAMS ((basic_block));
78 static int last_active_insn_p PARAMS ((basic_block, rtx));
79 static int seq_contains_jump PARAMS ((rtx));
81 static int cond_exec_process_insns PARAMS ((rtx, rtx, rtx, rtx, int));
82 static rtx cond_exec_get_condition PARAMS ((rtx));
83 static int cond_exec_process_if_block PARAMS ((basic_block, basic_block,
84 basic_block, basic_block));
86 static rtx noce_get_condition PARAMS ((rtx, rtx *));
87 static int noce_operand_ok PARAMS ((rtx));
88 static int noce_process_if_block PARAMS ((basic_block, basic_block,
89 basic_block, basic_block));
91 static int process_if_block PARAMS ((basic_block, basic_block,
92 basic_block, basic_block));
93 static void merge_if_block PARAMS ((basic_block, basic_block,
94 basic_block, basic_block));
96 static int find_if_header PARAMS ((basic_block));
97 static int find_if_block PARAMS ((basic_block, edge, edge));
98 static int find_if_case_1 PARAMS ((basic_block, edge, edge));
99 static int find_if_case_2 PARAMS ((basic_block, edge, edge));
100 static int find_memory PARAMS ((rtx *, void *));
101 static int dead_or_predicable PARAMS ((basic_block, basic_block,
102 basic_block, rtx, int));
103 static void noce_emit_move_insn PARAMS ((rtx, rtx));
105 /* Abuse the basic_block AUX field to store the original block index,
106 as well as a flag indicating that the block should be rescaned for
109 #define SET_ORIG_INDEX(BB,I) ((BB)->aux = (void *)((size_t)(I) << 1))
110 #define ORIG_INDEX(BB) ((size_t)(BB)->aux >> 1)
111 #define SET_UPDATE_LIFE(BB) ((BB)->aux = (void *)((size_t)(BB)->aux | 1))
112 #define UPDATE_LIFE(BB) ((size_t)(BB)->aux & 1)
115 /* Count the number of non-jump active insns in BB. */
126 if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == INSN)
131 insn = NEXT_INSN (insn);
137 /* Return the first non-jump active insn in the basic block. */
140 first_active_insn (bb)
145 if (GET_CODE (insn) == CODE_LABEL)
149 insn = NEXT_INSN (insn);
152 while (GET_CODE (insn) == NOTE)
156 insn = NEXT_INSN (insn);
159 if (GET_CODE (insn) == JUMP_INSN)
165 /* Return true if INSN is the last active non-jump insn in BB. */
168 last_active_insn_p (bb, insn)
176 insn = NEXT_INSN (insn);
178 while (GET_CODE (insn) == NOTE);
180 return GET_CODE (insn) == JUMP_INSN;
183 /* It is possible, especially when having dealt with multi-word
184 arithmetic, for the expanders to have emitted jumps. Search
185 through the sequence and return TRUE if a jump exists so that
186 we can abort the conversion. */
189 seq_contains_jump (insn)
194 if (GET_CODE (insn) == JUMP_INSN)
196 insn = NEXT_INSN (insn);
201 /* Go through a bunch of insns, converting them to conditional
202 execution format if possible. Return TRUE if all of the non-note
203 insns were processed. */
206 cond_exec_process_insns (start, end, test, prob_val, mod_ok)
207 rtx start; /* first insn to look at */
208 rtx end; /* last insn to look at */
209 rtx test; /* conditional execution test */
210 rtx prob_val; /* probability of branch taken. */
211 int mod_ok; /* true if modifications ok last insn. */
213 int must_be_last = FALSE;
217 for (insn = start; ; insn = NEXT_INSN (insn))
219 if (GET_CODE (insn) == NOTE)
222 if (GET_CODE (insn) != INSN && GET_CODE (insn) != CALL_INSN)
225 /* Remove USE insns that get in the way. */
226 if (reload_completed && GET_CODE (PATTERN (insn)) == USE)
228 /* ??? Ug. Actually unlinking the thing is problematic,
229 given what we'd have to coordinate with our callers. */
230 PUT_CODE (insn, NOTE);
231 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
232 NOTE_SOURCE_FILE (insn) = 0;
236 /* Last insn wasn't last? */
240 if (modified_in_p (test, insn))
247 /* Now build the conditional form of the instruction. */
248 pattern = PATTERN (insn);
250 /* If the machine needs to modify the insn being conditionally executed,
251 say for example to force a constant integer operand into a temp
252 register, do so here. */
253 #ifdef IFCVT_MODIFY_INSN
254 IFCVT_MODIFY_INSN (pattern, insn);
259 validate_change (insn, &PATTERN (insn),
260 gen_rtx_COND_EXEC (VOIDmode, copy_rtx (test),
263 if (GET_CODE (insn) == CALL_INSN && prob_val)
264 validate_change (insn, ®_NOTES (insn),
265 alloc_EXPR_LIST (REG_BR_PROB, prob_val,
266 REG_NOTES (insn)), 1);
276 /* Return the condition for a jump. Do not do any special processing. */
279 cond_exec_get_condition (jump)
284 if (any_condjump_p (jump))
285 test_if = SET_SRC (pc_set (jump));
288 cond = XEXP (test_if, 0);
290 /* If this branches to JUMP_LABEL when the condition is false,
291 reverse the condition. */
292 if (GET_CODE (XEXP (test_if, 2)) == LABEL_REF
293 && XEXP (XEXP (test_if, 2), 0) == JUMP_LABEL (jump))
295 enum rtx_code rev = reversed_comparison_code (cond, jump);
299 cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
306 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
307 to conditional execution. Return TRUE if we were successful at
308 converting the the block. */
311 cond_exec_process_if_block (test_bb, then_bb, else_bb, join_bb)
312 basic_block test_bb; /* Basic block test is in */
313 basic_block then_bb; /* Basic block for THEN block */
314 basic_block else_bb; /* Basic block for ELSE block */
315 basic_block join_bb; /* Basic block the join label is in */
317 rtx test_expr; /* expression in IF_THEN_ELSE that is tested */
318 rtx then_start; /* first insn in THEN block */
319 rtx then_end; /* last insn + 1 in THEN block */
320 rtx else_start = NULL_RTX; /* first insn in ELSE block or NULL */
321 rtx else_end = NULL_RTX; /* last insn + 1 in ELSE block */
322 int max; /* max # of insns to convert. */
323 int then_mod_ok; /* whether conditional mods are ok in THEN */
324 rtx true_expr; /* test for else block insns */
325 rtx false_expr; /* test for then block insns */
326 rtx true_prob_val; /* probability of else block */
327 rtx false_prob_val; /* probability of then block */
329 enum rtx_code false_code;
331 /* Find the conditional jump to the ELSE or JOIN part, and isolate
333 test_expr = cond_exec_get_condition (test_bb->end);
337 /* If the conditional jump is more than just a conditional jump,
338 then we can not do conditional execution conversion on this block. */
339 if (!onlyjump_p (test_bb->end))
342 /* Collect the bounds of where we're to search. */
344 then_start = then_bb->head;
345 then_end = then_bb->end;
347 /* Skip a label heading THEN block. */
348 if (GET_CODE (then_start) == CODE_LABEL)
349 then_start = NEXT_INSN (then_start);
351 /* Skip a (use (const_int 0)) or branch as the final insn. */
352 if (GET_CODE (then_end) == INSN
353 && GET_CODE (PATTERN (then_end)) == USE
354 && GET_CODE (XEXP (PATTERN (then_end), 0)) == CONST_INT)
355 then_end = PREV_INSN (then_end);
356 else if (GET_CODE (then_end) == JUMP_INSN)
357 then_end = PREV_INSN (then_end);
361 /* Skip the ELSE block's label. */
362 else_start = NEXT_INSN (else_bb->head);
363 else_end = else_bb->end;
365 /* Skip a (use (const_int 0)) or branch as the final insn. */
366 if (GET_CODE (else_end) == INSN
367 && GET_CODE (PATTERN (else_end)) == USE
368 && GET_CODE (XEXP (PATTERN (else_end), 0)) == CONST_INT)
369 else_end = PREV_INSN (else_end);
370 else if (GET_CODE (else_end) == JUMP_INSN)
371 else_end = PREV_INSN (else_end);
374 /* How many instructions should we convert in total? */
378 max = 2 * MAX_CONDITIONAL_EXECUTE;
379 n_insns = count_bb_insns (else_bb);
382 max = MAX_CONDITIONAL_EXECUTE;
383 n_insns += count_bb_insns (then_bb);
387 /* Map test_expr/test_jump into the appropriate MD tests to use on
388 the conditionally executed code. */
390 true_expr = test_expr;
392 false_code = reversed_comparison_code (true_expr, test_bb->end);
393 if (false_code != UNKNOWN)
394 false_expr = gen_rtx_fmt_ee (false_code, GET_MODE (true_expr),
395 XEXP (true_expr, 0), XEXP (true_expr, 1));
397 false_expr = NULL_RTX;
399 #ifdef IFCVT_MODIFY_TESTS
400 /* If the machine description needs to modify the tests, such as setting a
401 conditional execution register from a comparison, it can do so here. */
402 IFCVT_MODIFY_TESTS (true_expr, false_expr, test_bb, then_bb, else_bb,
405 /* See if the conversion failed */
406 if (!true_expr || !false_expr)
410 true_prob_val = find_reg_note (test_bb->end, REG_BR_PROB, NULL_RTX);
413 true_prob_val = XEXP (true_prob_val, 0);
414 false_prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (true_prob_val));
417 false_prob_val = NULL_RTX;
419 /* For IF-THEN-ELSE blocks, we don't allow modifications of the test
420 on then THEN block. */
421 then_mod_ok = (else_bb == NULL_BLOCK);
423 /* Go through the THEN and ELSE blocks converting the insns if possible
424 to conditional execution. */
428 || ! cond_exec_process_insns (then_start, then_end, false_expr,
429 false_prob_val, then_mod_ok)))
433 && ! cond_exec_process_insns (else_start, else_end,
434 true_expr, true_prob_val, TRUE))
437 if (! apply_change_group ())
440 #ifdef IFCVT_MODIFY_FINAL
441 /* Do any machine dependent final modifications */
442 IFCVT_MODIFY_FINAL (test_bb, then_bb, else_bb, join_bb);
445 /* Conversion succeeded. */
447 fprintf (rtl_dump_file, "%d insn%s converted to conditional execution.\n",
448 n_insns, (n_insns == 1) ? " was" : "s were");
450 /* Merge the blocks! */
451 merge_if_block (test_bb, then_bb, else_bb, join_bb);
455 #ifdef IFCVT_MODIFY_CANCEL
456 /* Cancel any machine dependent changes. */
457 IFCVT_MODIFY_CANCEL (test_bb, then_bb, else_bb, join_bb);
464 /* Used by noce_process_if_block to communicate with its subroutines.
466 The subroutines know that A and B may be evaluated freely. They
467 know that X is a register. They should insert new instructions
468 before cond_earliest. */
475 rtx jump, cond, cond_earliest;
478 static rtx noce_emit_store_flag PARAMS ((struct noce_if_info *,
480 static int noce_try_store_flag PARAMS ((struct noce_if_info *));
481 static int noce_try_store_flag_inc PARAMS ((struct noce_if_info *));
482 static int noce_try_store_flag_constants PARAMS ((struct noce_if_info *));
483 static int noce_try_store_flag_mask PARAMS ((struct noce_if_info *));
484 static rtx noce_emit_cmove PARAMS ((struct noce_if_info *,
485 rtx, enum rtx_code, rtx,
487 static int noce_try_cmove PARAMS ((struct noce_if_info *));
488 static int noce_try_cmove_arith PARAMS ((struct noce_if_info *));
489 static rtx noce_get_alt_condition PARAMS ((struct noce_if_info *,
491 static int noce_try_minmax PARAMS ((struct noce_if_info *));
492 static int noce_try_abs PARAMS ((struct noce_if_info *));
494 /* Helper function for noce_try_store_flag*. */
497 noce_emit_store_flag (if_info, x, reversep, normalize)
498 struct noce_if_info *if_info;
500 int reversep, normalize;
502 rtx cond = if_info->cond;
506 cond_complex = (! general_operand (XEXP (cond, 0), VOIDmode)
507 || ! general_operand (XEXP (cond, 1), VOIDmode));
509 /* If earliest == jump, or when the condition is complex, try to
510 build the store_flag insn directly. */
513 cond = XEXP (SET_SRC (pc_set (if_info->jump)), 0);
516 code = reversed_comparison_code (cond, if_info->jump);
518 code = GET_CODE (cond);
520 if ((if_info->cond_earliest == if_info->jump || cond_complex)
521 && (normalize == 0 || STORE_FLAG_VALUE == normalize))
525 tmp = gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (cond, 0),
527 tmp = gen_rtx_SET (VOIDmode, x, tmp);
530 tmp = emit_insn (tmp);
532 if (recog_memoized (tmp) >= 0)
538 if_info->cond_earliest = if_info->jump;
546 /* Don't even try if the comparison operands are weird. */
550 return emit_store_flag (x, code, XEXP (cond, 0),
551 XEXP (cond, 1), VOIDmode,
552 (code == LTU || code == LEU
553 || code == GEU || code == GTU), normalize);
556 /* Emit instruction to move a rtx into STRICT_LOW_PART. */
558 noce_emit_move_insn (x, y)
561 enum machine_mode outmode, inmode;
565 if (GET_CODE (x) != STRICT_LOW_PART)
567 emit_move_insn (x, y);
572 inner = XEXP (outer, 0);
573 outmode = GET_MODE (outer);
574 inmode = GET_MODE (inner);
575 bitpos = SUBREG_BYTE (outer) * BITS_PER_UNIT;
576 store_bit_field (inner, GET_MODE_BITSIZE (outmode),
577 bitpos, outmode, y, GET_MODE_BITSIZE (inmode),
578 GET_MODE_BITSIZE (inmode));
581 /* Convert "if (test) x = 1; else x = 0".
583 Only try 0 and STORE_FLAG_VALUE here. Other combinations will be
584 tried in noce_try_store_flag_constants after noce_try_cmove has had
585 a go at the conversion. */
588 noce_try_store_flag (if_info)
589 struct noce_if_info *if_info;
594 if (GET_CODE (if_info->b) == CONST_INT
595 && INTVAL (if_info->b) == STORE_FLAG_VALUE
596 && if_info->a == const0_rtx)
598 else if (if_info->b == const0_rtx
599 && GET_CODE (if_info->a) == CONST_INT
600 && INTVAL (if_info->a) == STORE_FLAG_VALUE
601 && (reversed_comparison_code (if_info->cond, if_info->jump)
609 target = noce_emit_store_flag (if_info, if_info->x, reversep, 0);
612 if (target != if_info->x)
613 noce_emit_move_insn (if_info->x, target);
617 emit_insns_before (seq, if_info->cond_earliest);
628 /* Convert "if (test) x = a; else x = b", for A and B constant. */
631 noce_try_store_flag_constants (if_info)
632 struct noce_if_info *if_info;
636 HOST_WIDE_INT itrue, ifalse, diff, tmp;
637 int normalize, can_reverse;
640 && GET_CODE (if_info->a) == CONST_INT
641 && GET_CODE (if_info->b) == CONST_INT)
643 ifalse = INTVAL (if_info->a);
644 itrue = INTVAL (if_info->b);
645 diff = itrue - ifalse;
647 can_reverse = (reversed_comparison_code (if_info->cond, if_info->jump)
651 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
653 else if (ifalse == 0 && exact_log2 (itrue) >= 0
654 && (STORE_FLAG_VALUE == 1
655 || BRANCH_COST >= 2))
657 else if (itrue == 0 && exact_log2 (ifalse) >= 0 && can_reverse
658 && (STORE_FLAG_VALUE == 1 || BRANCH_COST >= 2))
659 normalize = 1, reversep = 1;
661 && (STORE_FLAG_VALUE == -1
662 || BRANCH_COST >= 2))
664 else if (ifalse == -1 && can_reverse
665 && (STORE_FLAG_VALUE == -1 || BRANCH_COST >= 2))
666 normalize = -1, reversep = 1;
667 else if ((BRANCH_COST >= 2 && STORE_FLAG_VALUE == -1)
675 tmp = itrue; itrue = ifalse; ifalse = tmp;
680 target = noce_emit_store_flag (if_info, if_info->x, reversep, normalize);
687 /* if (test) x = 3; else x = 4;
688 => x = 3 + (test == 0); */
689 if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
691 target = expand_binop (GET_MODE (if_info->x),
692 (diff == STORE_FLAG_VALUE
693 ? add_optab : sub_optab),
694 GEN_INT (ifalse), target, if_info->x, 0,
698 /* if (test) x = 8; else x = 0;
699 => x = (test != 0) << 3; */
700 else if (ifalse == 0 && (tmp = exact_log2 (itrue)) >= 0)
702 target = expand_binop (GET_MODE (if_info->x), ashl_optab,
703 target, GEN_INT (tmp), if_info->x, 0,
707 /* if (test) x = -1; else x = b;
708 => x = -(test != 0) | b; */
709 else if (itrue == -1)
711 target = expand_binop (GET_MODE (if_info->x), ior_optab,
712 target, GEN_INT (ifalse), if_info->x, 0,
716 /* if (test) x = a; else x = b;
717 => x = (-(test != 0) & (b - a)) + a; */
720 target = expand_binop (GET_MODE (if_info->x), and_optab,
721 target, GEN_INT (diff), if_info->x, 0,
724 target = expand_binop (GET_MODE (if_info->x), add_optab,
725 target, GEN_INT (ifalse), if_info->x, 0,
735 if (target != if_info->x)
736 noce_emit_move_insn (if_info->x, target);
741 if (seq_contains_jump (seq))
744 emit_insns_before (seq, if_info->cond_earliest);
752 /* Convert "if (test) foo++" into "foo += (test != 0)", and
753 similarly for "foo--". */
756 noce_try_store_flag_inc (if_info)
757 struct noce_if_info *if_info;
760 int subtract, normalize;
766 /* Should be no `else' case to worry about. */
767 && if_info->b == if_info->x
768 && GET_CODE (if_info->a) == PLUS
769 && (XEXP (if_info->a, 1) == const1_rtx
770 || XEXP (if_info->a, 1) == constm1_rtx)
771 && rtx_equal_p (XEXP (if_info->a, 0), if_info->x)
772 && (reversed_comparison_code (if_info->cond, if_info->jump)
775 if (STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
776 subtract = 0, normalize = 0;
777 else if (-STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
778 subtract = 1, normalize = 0;
780 subtract = 0, normalize = INTVAL (XEXP (if_info->a, 1));
784 target = noce_emit_store_flag (if_info,
785 gen_reg_rtx (GET_MODE (if_info->x)),
789 target = expand_binop (GET_MODE (if_info->x),
790 subtract ? sub_optab : add_optab,
791 if_info->x, target, if_info->x, 0, OPTAB_WIDEN);
794 if (target != if_info->x)
795 noce_emit_move_insn (if_info->x, target);
800 if (seq_contains_jump (seq))
803 emit_insns_before (seq, if_info->cond_earliest);
814 /* Convert "if (test) x = 0;" to "x &= -(test == 0);" */
817 noce_try_store_flag_mask (if_info)
818 struct noce_if_info *if_info;
826 || STORE_FLAG_VALUE == -1)
827 && ((if_info->a == const0_rtx
828 && rtx_equal_p (if_info->b, if_info->x))
829 || ((reversep = (reversed_comparison_code (if_info->cond,
832 && if_info->b == const0_rtx
833 && rtx_equal_p (if_info->a, if_info->x))))
836 target = noce_emit_store_flag (if_info,
837 gen_reg_rtx (GET_MODE (if_info->x)),
840 target = expand_binop (GET_MODE (if_info->x), and_optab,
841 if_info->x, target, if_info->x, 0,
846 if (target != if_info->x)
847 noce_emit_move_insn (if_info->x, target);
852 if (seq_contains_jump (seq))
855 emit_insns_before (seq, if_info->cond_earliest);
866 /* Helper function for noce_try_cmove and noce_try_cmove_arith. */
869 noce_emit_cmove (if_info, x, code, cmp_a, cmp_b, vfalse, vtrue)
870 struct noce_if_info *if_info;
871 rtx x, cmp_a, cmp_b, vfalse, vtrue;
874 /* If earliest == jump, try to build the cmove insn directly.
875 This is helpful when combine has created some complex condition
876 (like for alpha's cmovlbs) that we can't hope to regenerate
877 through the normal interface. */
879 if (if_info->cond_earliest == if_info->jump)
883 tmp = gen_rtx_fmt_ee (code, GET_MODE (if_info->cond), cmp_a, cmp_b);
884 tmp = gen_rtx_IF_THEN_ELSE (GET_MODE (x), tmp, vtrue, vfalse);
885 tmp = gen_rtx_SET (VOIDmode, x, tmp);
888 tmp = emit_insn (tmp);
890 if (recog_memoized (tmp) >= 0)
902 /* Don't even try if the comparison operands are weird. */
903 if (! general_operand (cmp_a, GET_MODE (cmp_a))
904 || ! general_operand (cmp_b, GET_MODE (cmp_b)))
907 #if HAVE_conditional_move
908 return emit_conditional_move (x, code, cmp_a, cmp_b, VOIDmode,
909 vtrue, vfalse, GET_MODE (x),
910 (code == LTU || code == GEU
911 || code == LEU || code == GTU));
913 /* We'll never get here, as noce_process_if_block doesn't call the
914 functions involved. Ifdef code, however, should be discouraged
915 because it leads to typos in the code not selected. However,
916 emit_conditional_move won't exist either. */
921 /* Try only simple constants and registers here. More complex cases
922 are handled in noce_try_cmove_arith after noce_try_store_flag_arith
923 has had a go at it. */
926 noce_try_cmove (if_info)
927 struct noce_if_info *if_info;
932 if ((CONSTANT_P (if_info->a) || register_operand (if_info->a, VOIDmode))
933 && (CONSTANT_P (if_info->b) || register_operand (if_info->b, VOIDmode)))
937 code = GET_CODE (if_info->cond);
938 target = noce_emit_cmove (if_info, if_info->x, code,
939 XEXP (if_info->cond, 0),
940 XEXP (if_info->cond, 1),
941 if_info->a, if_info->b);
945 if (target != if_info->x)
946 noce_emit_move_insn (if_info->x, target);
950 emit_insns_before (seq, if_info->cond_earliest);
963 /* Try more complex cases involving conditional_move. */
966 noce_try_cmove_arith (if_info)
967 struct noce_if_info *if_info;
977 /* A conditional move from two memory sources is equivalent to a
978 conditional on their addresses followed by a load. Don't do this
979 early because it'll screw alias analysis. Note that we've
980 already checked for no side effects. */
981 if (! no_new_pseudos && cse_not_expected
982 && GET_CODE (a) == MEM && GET_CODE (b) == MEM
987 x = gen_reg_rtx (Pmode);
991 /* ??? We could handle this if we knew that a load from A or B could
992 not fault. This is also true if we've already loaded
993 from the address along the path from ENTRY. */
994 else if (may_trap_p (a) || may_trap_p (b))
997 /* if (test) x = a + b; else x = c - d;
1004 code = GET_CODE (if_info->cond);
1005 insn_a = if_info->insn_a;
1006 insn_b = if_info->insn_b;
1008 /* Possibly rearrange operands to make things come out more natural. */
1009 if (reversed_comparison_code (if_info->cond, if_info->jump) != UNKNOWN)
1012 if (rtx_equal_p (b, x))
1014 else if (general_operand (b, GET_MODE (b)))
1019 code = reversed_comparison_code (if_info->cond, if_info->jump);
1020 tmp = a, a = b, b = tmp;
1021 tmp = insn_a, insn_a = insn_b, insn_b = tmp;
1027 /* If either operand is complex, load it into a register first.
1028 The best way to do this is to copy the original insn. In this
1029 way we preserve any clobbers etc that the insn may have had.
1030 This is of course not possible in the IS_MEM case. */
1031 if (! general_operand (a, GET_MODE (a)))
1036 goto end_seq_and_fail;
1040 tmp = gen_reg_rtx (GET_MODE (a));
1041 tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, a));
1044 goto end_seq_and_fail;
1047 a = gen_reg_rtx (GET_MODE (a));
1048 tmp = copy_rtx (insn_a);
1049 set = single_set (tmp);
1051 tmp = emit_insn (PATTERN (tmp));
1053 if (recog_memoized (tmp) < 0)
1054 goto end_seq_and_fail;
1056 if (! general_operand (b, GET_MODE (b)))
1061 goto end_seq_and_fail;
1065 tmp = gen_reg_rtx (GET_MODE (b));
1066 tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, b));
1069 goto end_seq_and_fail;
1072 b = gen_reg_rtx (GET_MODE (b));
1073 tmp = copy_rtx (insn_b);
1074 set = single_set (tmp);
1076 tmp = emit_insn (PATTERN (tmp));
1078 if (recog_memoized (tmp) < 0)
1079 goto end_seq_and_fail;
1082 target = noce_emit_cmove (if_info, x, code, XEXP (if_info->cond, 0),
1083 XEXP (if_info->cond, 1), a, b);
1086 goto end_seq_and_fail;
1088 /* If we're handling a memory for above, emit the load now. */
1091 tmp = gen_rtx_MEM (GET_MODE (if_info->x), target);
1093 /* Copy over flags as appropriate. */
1094 if (MEM_VOLATILE_P (if_info->a) || MEM_VOLATILE_P (if_info->b))
1095 MEM_VOLATILE_P (tmp) = 1;
1096 if (MEM_IN_STRUCT_P (if_info->a) && MEM_IN_STRUCT_P (if_info->b))
1097 MEM_IN_STRUCT_P (tmp) = 1;
1098 if (MEM_SCALAR_P (if_info->a) && MEM_SCALAR_P (if_info->b))
1099 MEM_SCALAR_P (tmp) = 1;
1100 if (MEM_ALIAS_SET (if_info->a) == MEM_ALIAS_SET (if_info->b))
1101 set_mem_alias_set (tmp, MEM_ALIAS_SET (if_info->a));
1103 noce_emit_move_insn (if_info->x, tmp);
1105 else if (target != x)
1106 noce_emit_move_insn (x, target);
1110 emit_insns_before (tmp, if_info->cond_earliest);
1118 /* For most cases, the simplified condition we found is the best
1119 choice, but this is not the case for the min/max/abs transforms.
1120 For these we wish to know that it is A or B in the condition. */
1123 noce_get_alt_condition (if_info, target, earliest)
1124 struct noce_if_info *if_info;
1128 rtx cond, set, insn;
1131 /* If target is already mentioned in the known condition, return it. */
1132 if (reg_mentioned_p (target, if_info->cond))
1134 *earliest = if_info->cond_earliest;
1135 return if_info->cond;
1138 set = pc_set (if_info->jump);
1139 cond = XEXP (SET_SRC (set), 0);
1141 = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1142 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (if_info->jump);
1144 cond = canonicalize_condition (if_info->jump, cond, reverse,
1146 if (! cond || ! reg_mentioned_p (target, cond))
1149 /* We almost certainly searched back to a different place.
1150 Need to re-verify correct lifetimes. */
1152 /* X may not be mentioned in the range (cond_earliest, jump]. */
1153 for (insn = if_info->jump; insn != *earliest; insn = PREV_INSN (insn))
1154 if (INSN_P (insn) && reg_mentioned_p (if_info->x, insn))
1157 /* A and B may not be modified in the range [cond_earliest, jump). */
1158 for (insn = *earliest; insn != if_info->jump; insn = NEXT_INSN (insn))
1160 && (modified_in_p (if_info->a, insn)
1161 || modified_in_p (if_info->b, insn)))
1167 /* Convert "if (a < b) x = a; else x = b;" to "x = min(a, b);", etc. */
1170 noce_try_minmax (if_info)
1171 struct noce_if_info *if_info;
1173 rtx cond, earliest, target, seq;
1178 /* ??? Can't guarantee that expand_binop won't create pseudos. */
1182 /* ??? Reject FP modes since we don't know how 0 vs -0 or NaNs
1183 will be resolved with an SMIN/SMAX. It wouldn't be too hard
1184 to get the target to tell us... */
1185 if (FLOAT_MODE_P (GET_MODE (if_info->x))
1186 && TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
1187 && ! flag_unsafe_math_optimizations)
1190 cond = noce_get_alt_condition (if_info, if_info->a, &earliest);
1194 /* Verify the condition is of the form we expect, and canonicalize
1195 the comparison code. */
1196 code = GET_CODE (cond);
1197 if (rtx_equal_p (XEXP (cond, 0), if_info->a))
1199 if (! rtx_equal_p (XEXP (cond, 1), if_info->b))
1202 else if (rtx_equal_p (XEXP (cond, 1), if_info->a))
1204 if (! rtx_equal_p (XEXP (cond, 0), if_info->b))
1206 code = swap_condition (code);
1211 /* Determine what sort of operation this is. Note that the code is for
1212 a taken branch, so the code->operation mapping appears backwards. */
1245 target = expand_binop (GET_MODE (if_info->x), op, if_info->a, if_info->b,
1246 if_info->x, unsignedp, OPTAB_WIDEN);
1252 if (target != if_info->x)
1253 noce_emit_move_insn (if_info->x, target);
1258 if (seq_contains_jump (seq))
1261 emit_insns_before (seq, earliest);
1262 if_info->cond = cond;
1263 if_info->cond_earliest = earliest;
1268 /* Convert "if (a < 0) x = -a; else x = a;" to "x = abs(a);", etc. */
1271 noce_try_abs (if_info)
1272 struct noce_if_info *if_info;
1274 rtx cond, earliest, target, seq, a, b, c;
1277 /* ??? Can't guarantee that expand_binop won't create pseudos. */
1281 /* Recognize A and B as constituting an ABS or NABS. */
1284 if (GET_CODE (a) == NEG && rtx_equal_p (XEXP (a, 0), b))
1286 else if (GET_CODE (b) == NEG && rtx_equal_p (XEXP (b, 0), a))
1288 c = a; a = b; b = c;
1294 cond = noce_get_alt_condition (if_info, b, &earliest);
1298 /* Verify the condition is of the form we expect. */
1299 if (rtx_equal_p (XEXP (cond, 0), b))
1301 else if (rtx_equal_p (XEXP (cond, 1), b))
1306 /* Verify that C is zero. Search backward through the block for
1307 a REG_EQUAL note if necessary. */
1310 rtx insn, note = NULL;
1311 for (insn = earliest;
1312 insn != if_info->test_bb->head;
1313 insn = PREV_INSN (insn))
1315 && ((note = find_reg_note (insn, REG_EQUAL, c))
1316 || (note = find_reg_note (insn, REG_EQUIV, c))))
1322 if (GET_CODE (c) == MEM
1323 && GET_CODE (XEXP (c, 0)) == SYMBOL_REF
1324 && CONSTANT_POOL_ADDRESS_P (XEXP (c, 0)))
1325 c = get_pool_constant (XEXP (c, 0));
1327 /* Work around funny ideas get_condition has wrt canonicalization.
1328 Note that these rtx constants are known to be CONST_INT, and
1329 therefore imply integer comparisons. */
1330 if (c == constm1_rtx && GET_CODE (cond) == GT)
1332 else if (c == const1_rtx && GET_CODE (cond) == LT)
1334 else if (c != CONST0_RTX (GET_MODE (b)))
1337 /* Determine what sort of operation this is. */
1338 switch (GET_CODE (cond))
1357 target = expand_unop (GET_MODE (if_info->x), abs_optab, b, if_info->x, 0);
1359 /* ??? It's a quandry whether cmove would be better here, especially
1360 for integers. Perhaps combine will clean things up. */
1361 if (target && negate)
1362 target = expand_unop (GET_MODE (target), neg_optab, target, if_info->x, 0);
1370 if (target != if_info->x)
1371 noce_emit_move_insn (if_info->x, target);
1376 if (seq_contains_jump (seq))
1379 emit_insns_before (seq, earliest);
1380 if_info->cond = cond;
1381 if_info->cond_earliest = earliest;
1386 /* Look for the condition for the jump first. We'd prefer to avoid
1387 get_condition if we can -- it tries to look back for the contents
1388 of an original compare. On targets that use normal integers for
1389 comparisons, e.g. alpha, this is wasteful. */
1392 noce_get_condition (jump, earliest)
1399 /* If the condition variable is a register and is MODE_INT, accept it.
1400 Otherwise, fall back on get_condition. */
1402 if (! any_condjump_p (jump))
1405 set = pc_set (jump);
1407 cond = XEXP (SET_SRC (set), 0);
1408 if (GET_CODE (XEXP (cond, 0)) == REG
1409 && GET_MODE_CLASS (GET_MODE (XEXP (cond, 0))) == MODE_INT)
1413 /* If this branches to JUMP_LABEL when the condition is false,
1414 reverse the condition. */
1415 if (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1416 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump))
1417 cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
1418 GET_MODE (cond), XEXP (cond, 0),
1422 cond = get_condition (jump, earliest);
1427 /* Return true if OP is ok for if-then-else processing. */
1430 noce_operand_ok (op)
1433 /* We special-case memories, so handle any of them with
1434 no address side effects. */
1435 if (GET_CODE (op) == MEM)
1436 return ! side_effects_p (XEXP (op, 0));
1438 if (side_effects_p (op))
1441 /* ??? Unfortuantely may_trap_p can't look at flag_trapping_math, due to
1442 being linked into the genfoo programs. This is probably a mistake.
1443 With finite operands, most fp operations don't trap. */
1444 if (!flag_trapping_math && FLOAT_MODE_P (GET_MODE (op)))
1445 switch (GET_CODE (op))
1451 /* ??? This is kinda lame -- almost every target will have forced
1452 the constant into a register first. But given the expense of
1453 division, this is probably for the best. */
1454 return (CONSTANT_P (XEXP (op, 1))
1455 && XEXP (op, 1) != CONST0_RTX (GET_MODE (op))
1456 && ! may_trap_p (XEXP (op, 0)));
1459 switch (GET_RTX_CLASS (GET_CODE (op)))
1462 return ! may_trap_p (XEXP (op, 0));
1465 return ! may_trap_p (XEXP (op, 0)) && ! may_trap_p (XEXP (op, 1));
1470 return ! may_trap_p (op);
1473 /* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
1474 without using conditional execution. Return TRUE if we were
1475 successful at converting the the block. */
1478 noce_process_if_block (test_bb, then_bb, else_bb, join_bb)
1479 basic_block test_bb; /* Basic block test is in */
1480 basic_block then_bb; /* Basic block for THEN block */
1481 basic_block else_bb; /* Basic block for ELSE block */
1482 basic_block join_bb; /* Basic block the join label is in */
1484 /* We're looking for patterns of the form
1486 (1) if (...) x = a; else x = b;
1487 (2) x = b; if (...) x = a;
1488 (3) if (...) x = a; // as if with an initial x = x.
1490 The later patterns require jumps to be more expensive.
1492 ??? For future expansion, look for multiple X in such patterns. */
1494 struct noce_if_info if_info;
1497 rtx orig_x, x, a, b;
1498 rtx jump, cond, insn;
1500 /* If this is not a standard conditional jump, we can't parse it. */
1501 jump = test_bb->end;
1502 cond = noce_get_condition (jump, &if_info.cond_earliest);
1506 /* If the conditional jump is more than just a conditional jump,
1507 then we can not do if-conversion on this block. */
1508 if (! onlyjump_p (jump))
1511 /* We must be comparing objects whose modes imply the size. */
1512 if (GET_MODE (XEXP (cond, 0)) == BLKmode)
1515 /* Look for one of the potential sets. */
1516 insn_a = first_active_insn (then_bb);
1518 || ! last_active_insn_p (then_bb, insn_a)
1519 || (set_a = single_set (insn_a)) == NULL_RTX)
1522 x = SET_DEST (set_a);
1523 a = SET_SRC (set_a);
1525 /* Look for the other potential set. Make sure we've got equivalent
1527 /* ??? This is overconservative. Storing to two different mems is
1528 as easy as conditionally computing the address. Storing to a
1529 single mem merely requires a scratch memory to use as one of the
1530 destination addresses; often the memory immediately below the
1531 stack pointer is available for this. */
1535 insn_b = first_active_insn (else_bb);
1537 || ! last_active_insn_p (else_bb, insn_b)
1538 || (set_b = single_set (insn_b)) == NULL_RTX
1539 || ! rtx_equal_p (x, SET_DEST (set_b)))
1544 insn_b = prev_nonnote_insn (if_info.cond_earliest);
1546 || GET_CODE (insn_b) != INSN
1547 || (set_b = single_set (insn_b)) == NULL_RTX
1548 || ! rtx_equal_p (x, SET_DEST (set_b))
1549 || reg_mentioned_p (x, cond)
1550 || reg_mentioned_p (x, a)
1551 || reg_mentioned_p (x, SET_SRC (set_b)))
1552 insn_b = set_b = NULL_RTX;
1554 b = (set_b ? SET_SRC (set_b) : x);
1556 /* X may not be mentioned in the range (cond_earliest, jump]. */
1557 for (insn = jump; insn != if_info.cond_earliest; insn = PREV_INSN (insn))
1558 if (INSN_P (insn) && reg_mentioned_p (x, insn))
1561 /* A and B may not be modified in the range [cond_earliest, jump). */
1562 for (insn = if_info.cond_earliest; insn != jump; insn = NEXT_INSN (insn))
1564 && (modified_in_p (a, insn) || modified_in_p (b, insn)))
1567 /* Only operate on register destinations, and even then avoid extending
1568 the lifetime of hard registers on small register class machines. */
1570 if (GET_CODE (x) != REG
1571 || (SMALL_REGISTER_CLASSES
1572 && REGNO (x) < FIRST_PSEUDO_REGISTER))
1576 x = gen_reg_rtx (GET_MODE (GET_CODE (x) == STRICT_LOW_PART
1577 ? XEXP (x, 0) : x));
1580 /* Don't operate on sources that may trap or are volatile. */
1581 if (! noce_operand_ok (a) || ! noce_operand_ok (b))
1584 /* Set up the info block for our subroutines. */
1585 if_info.test_bb = test_bb;
1586 if_info.cond = cond;
1587 if_info.jump = jump;
1588 if_info.insn_a = insn_a;
1589 if_info.insn_b = insn_b;
1594 /* Try optimizations in some approximation of a useful order. */
1595 /* ??? Should first look to see if X is live incoming at all. If it
1596 isn't, we don't need anything but an unconditional set. */
1598 /* Look and see if A and B are really the same. Avoid creating silly
1599 cmove constructs that no one will fix up later. */
1600 if (rtx_equal_p (a, b))
1602 /* If we have an INSN_B, we don't have to create any new rtl. Just
1603 move the instruction that we already have. If we don't have an
1604 INSN_B, that means that A == X, and we've got a noop move. In
1605 that case don't do anything and let the code below delete INSN_A. */
1606 if (insn_b && else_bb)
1610 if (else_bb && insn_b == else_bb->end)
1611 else_bb->end = PREV_INSN (insn_b);
1612 reorder_insns (insn_b, insn_b, PREV_INSN (if_info.cond_earliest));
1614 /* If there was a REG_EQUAL note, delete it since it may have been
1615 true due to this insn being after a jump. */
1616 if ((note = find_reg_note (insn_b, REG_EQUAL, NULL_RTX)) != 0)
1617 remove_note (insn_b, note);
1621 /* If we have "x = b; if (...) x = a;", and x has side-effects, then
1622 x must be executed twice. */
1623 else if (insn_b && side_effects_p (orig_x))
1630 if (noce_try_store_flag (&if_info))
1632 if (noce_try_minmax (&if_info))
1634 if (noce_try_abs (&if_info))
1636 if (HAVE_conditional_move
1637 && noce_try_cmove (&if_info))
1639 if (! HAVE_conditional_execution)
1641 if (noce_try_store_flag_constants (&if_info))
1643 if (noce_try_store_flag_inc (&if_info))
1645 if (noce_try_store_flag_mask (&if_info))
1647 if (HAVE_conditional_move
1648 && noce_try_cmove_arith (&if_info))
1655 /* The original sets may now be killed. */
1656 if (insn_a == then_bb->end)
1657 then_bb->end = PREV_INSN (insn_a);
1658 flow_delete_insn (insn_a);
1660 /* Several special cases here: First, we may have reused insn_b above,
1661 in which case insn_b is now NULL. Second, we want to delete insn_b
1662 if it came from the ELSE block, because follows the now correct
1663 write that appears in the TEST block. However, if we got insn_b from
1664 the TEST block, it may in fact be loading data needed for the comparison.
1665 We'll let life_analysis remove the insn if it's really dead. */
1666 if (insn_b && else_bb)
1668 if (insn_b == else_bb->end)
1669 else_bb->end = PREV_INSN (insn_b);
1670 flow_delete_insn (insn_b);
1673 /* The new insns will have been inserted before cond_earliest. We should
1674 be able to remove the jump with impunity, but the condition itself may
1675 have been modified by gcse to be shared across basic blocks. */
1676 test_bb->end = PREV_INSN (jump);
1677 flow_delete_insn (jump);
1679 /* If we used a temporary, fix it up now. */
1683 noce_emit_move_insn (orig_x, x);
1684 insn_b = gen_sequence ();
1687 test_bb->end = emit_insn_after (insn_b, test_bb->end);
1690 /* Merge the blocks! */
1691 merge_if_block (test_bb, then_bb, else_bb, join_bb);
1696 /* Attempt to convert an IF-THEN or IF-THEN-ELSE block into
1697 straight line code. Return true if successful. */
1700 process_if_block (test_bb, then_bb, else_bb, join_bb)
1701 basic_block test_bb; /* Basic block test is in */
1702 basic_block then_bb; /* Basic block for THEN block */
1703 basic_block else_bb; /* Basic block for ELSE block */
1704 basic_block join_bb; /* Basic block the join label is in */
1706 if (! reload_completed
1707 && noce_process_if_block (test_bb, then_bb, else_bb, join_bb))
1710 if (HAVE_conditional_execution
1712 && cond_exec_process_if_block (test_bb, then_bb, else_bb, join_bb))
1718 /* Merge the blocks and mark for local life update. */
1721 merge_if_block (test_bb, then_bb, else_bb, join_bb)
1722 basic_block test_bb; /* Basic block test is in */
1723 basic_block then_bb; /* Basic block for THEN block */
1724 basic_block else_bb; /* Basic block for ELSE block */
1725 basic_block join_bb; /* Basic block the join label is in */
1727 basic_block combo_bb;
1729 /* All block merging is done into the lower block numbers. */
1733 /* First merge TEST block into THEN block. This is a no-brainer since
1734 the THEN block did not have a code label to begin with. */
1737 COPY_REG_SET (combo_bb->global_live_at_end, then_bb->global_live_at_end);
1738 merge_blocks_nomove (combo_bb, then_bb);
1739 num_removed_blocks++;
1741 /* The ELSE block, if it existed, had a label. That label count
1742 will almost always be zero, but odd things can happen when labels
1743 get their addresses taken. */
1746 merge_blocks_nomove (combo_bb, else_bb);
1747 num_removed_blocks++;
1750 /* If there was no join block reported, that means it was not adjacent
1751 to the others, and so we cannot merge them. */
1755 /* The outgoing edge for the current COMBO block should already
1756 be correct. Verify this. */
1757 if (combo_bb->succ == NULL_EDGE)
1760 /* There should sill be a branch at the end of the THEN or ELSE
1761 blocks taking us to our final destination. */
1762 if (! any_uncondjump_p (combo_bb->end)
1763 && ! returnjump_p (combo_bb->end))
1767 /* The JOIN block may have had quite a number of other predecessors too.
1768 Since we've already merged the TEST, THEN and ELSE blocks, we should
1769 have only one remaining edge from our if-then-else diamond. If there
1770 is more than one remaining edge, it must come from elsewhere. There
1771 may be zero incoming edges if the THEN block didn't actually join
1772 back up (as with a call to abort). */
1773 else if (join_bb->pred == NULL || join_bb->pred->pred_next == NULL)
1775 /* We can merge the JOIN. */
1777 COPY_REG_SET (combo_bb->global_live_at_end,
1778 join_bb->global_live_at_end);
1779 merge_blocks_nomove (combo_bb, join_bb);
1780 num_removed_blocks++;
1784 /* We cannot merge the JOIN. */
1786 /* The outgoing edge for the current COMBO block should already
1787 be correct. Verify this. */
1788 if (combo_bb->succ->succ_next != NULL_EDGE
1789 || combo_bb->succ->dest != join_bb)
1792 /* Remove the jump and cruft from the end of the COMBO block. */
1793 tidy_fallthru_edge (combo_bb->succ, combo_bb, join_bb);
1796 /* Make sure we update life info properly. */
1797 SET_UPDATE_LIFE (combo_bb);
1799 num_updated_if_blocks++;
1802 /* Find a block ending in a simple IF condition. Return TRUE if
1803 we were able to transform it in some way. */
1806 find_if_header (test_bb)
1807 basic_block test_bb;
1812 /* The kind of block we're looking for has exactly two successors. */
1813 if ((then_edge = test_bb->succ) == NULL_EDGE
1814 || (else_edge = then_edge->succ_next) == NULL_EDGE
1815 || else_edge->succ_next != NULL_EDGE)
1818 /* Neither edge should be abnormal. */
1819 if ((then_edge->flags & EDGE_COMPLEX)
1820 || (else_edge->flags & EDGE_COMPLEX))
1823 /* The THEN edge is canonically the one that falls through. */
1824 if (then_edge->flags & EDGE_FALLTHRU)
1826 else if (else_edge->flags & EDGE_FALLTHRU)
1829 else_edge = then_edge;
1833 /* Otherwise this must be a multiway branch of some sort. */
1836 if (find_if_block (test_bb, then_edge, else_edge))
1839 && (! HAVE_conditional_execution || reload_completed))
1841 if (find_if_case_1 (test_bb, then_edge, else_edge))
1843 if (find_if_case_2 (test_bb, then_edge, else_edge))
1851 fprintf (rtl_dump_file, "Conversion succeeded.\n");
1855 /* Determine if a given basic block heads a simple IF-THEN or IF-THEN-ELSE
1856 block. If so, we'll try to convert the insns to not require the branch.
1857 Return TRUE if we were successful at converting the the block. */
1860 find_if_block (test_bb, then_edge, else_edge)
1861 basic_block test_bb;
1862 edge then_edge, else_edge;
1864 basic_block then_bb = then_edge->dest;
1865 basic_block else_bb = else_edge->dest;
1866 basic_block join_bb = NULL_BLOCK;
1867 edge then_succ = then_bb->succ;
1868 edge else_succ = else_bb->succ;
1871 /* The THEN block of an IF-THEN combo must have exactly one predecessor. */
1872 if (then_bb->pred->pred_next != NULL_EDGE)
1875 /* The THEN block of an IF-THEN combo must have zero or one successors. */
1876 if (then_succ != NULL_EDGE
1877 && (then_succ->succ_next != NULL_EDGE
1878 || (then_succ->flags & EDGE_COMPLEX)))
1881 /* If the THEN block has no successors, conditional execution can still
1882 make a conditional call. Don't do this unless the ELSE block has
1883 only one incoming edge -- the CFG manipulation is too ugly otherwise.
1884 Check for the last insn of the THEN block being an indirect jump, which
1885 is listed as not having any successors, but confuses the rest of the CE
1886 code processing. XXX we should fix this in the future. */
1887 if (then_succ == NULL)
1889 if (else_bb->pred->pred_next == NULL_EDGE)
1891 rtx last_insn = then_bb->end;
1894 && GET_CODE (last_insn) == NOTE
1895 && last_insn != then_bb->head)
1896 last_insn = PREV_INSN (last_insn);
1899 && GET_CODE (last_insn) == JUMP_INSN
1900 && ! simplejump_p (last_insn))
1904 else_bb = NULL_BLOCK;
1910 /* If the THEN block's successor is the other edge out of the TEST block,
1911 then we have an IF-THEN combo without an ELSE. */
1912 else if (then_succ->dest == else_bb)
1915 else_bb = NULL_BLOCK;
1918 /* If the THEN and ELSE block meet in a subsequent block, and the ELSE
1919 has exactly one predecessor and one successor, and the outgoing edge
1920 is not complex, then we have an IF-THEN-ELSE combo. */
1921 else if (else_succ != NULL_EDGE
1922 && then_succ->dest == else_succ->dest
1923 && else_bb->pred->pred_next == NULL_EDGE
1924 && else_succ->succ_next == NULL_EDGE
1925 && ! (else_succ->flags & EDGE_COMPLEX))
1926 join_bb = else_succ->dest;
1928 /* Otherwise it is not an IF-THEN or IF-THEN-ELSE combination. */
1932 num_possible_if_blocks++;
1937 fprintf (rtl_dump_file,
1938 "\nIF-THEN-ELSE block found, start %d, then %d, else %d, join %d\n",
1939 test_bb->index, then_bb->index, else_bb->index,
1942 fprintf (rtl_dump_file,
1943 "\nIF-THEN block found, start %d, then %d, join %d\n",
1944 test_bb->index, then_bb->index, join_bb->index);
1947 /* Make sure IF, THEN, and ELSE, blocks are adjacent. Actually, we
1948 get the first condition for free, since we've already asserted that
1949 there's a fallthru edge from IF to THEN. */
1950 /* ??? As an enhancement, move the ELSE block. Have to deal with
1951 BLOCK notes, if by no other means than aborting the merge if they
1952 exist. Sticky enough I don't want to think about it now. */
1953 next_index = then_bb->index;
1954 if (else_bb && ++next_index != else_bb->index)
1956 if (++next_index != join_bb->index)
1964 /* Do the real work. */
1965 return process_if_block (test_bb, then_bb, else_bb, join_bb);
1968 /* Look for IF-THEN-ELSE cases in which one of THEN or ELSE is
1969 transformable, but not necessarily the other. There need be no
1972 Return TRUE if we were successful at converting the the block.
1974 Cases we'd like to look at:
1977 if (test) goto over; // x not live
1985 if (! test) goto label;
1988 if (test) goto E; // x not live
2002 (3) // This one's really only interesting for targets that can do
2003 // multiway branching, e.g. IA-64 BBB bundles. For other targets
2004 // it results in multiple branches on a cache line, which often
2005 // does not sit well with predictors.
2007 if (test1) goto E; // predicted not taken
2023 (A) Don't do (2) if the branch is predicted against the block we're
2024 eliminating. Do it anyway if we can eliminate a branch; this requires
2025 that the sole successor of the eliminated block postdominate the other
2028 (B) With CE, on (3) we can steal from both sides of the if, creating
2037 Again, this is most useful if J postdominates.
2039 (C) CE substitutes for helpful life information.
2041 (D) These heuristics need a lot of work. */
2043 /* Tests for case 1 above. */
2046 find_if_case_1 (test_bb, then_edge, else_edge)
2047 basic_block test_bb;
2048 edge then_edge, else_edge;
2050 basic_block then_bb = then_edge->dest;
2051 basic_block else_bb = else_edge->dest;
2052 edge then_succ = then_bb->succ;
2055 /* THEN has one successor. */
2056 if (!then_succ || then_succ->succ_next != NULL)
2059 /* THEN does not fall through, but is not strange either. */
2060 if (then_succ->flags & (EDGE_COMPLEX | EDGE_FALLTHRU))
2063 /* THEN has one predecessor. */
2064 if (then_bb->pred->pred_next != NULL)
2067 /* ELSE follows THEN. (??? could be moved) */
2068 if (else_bb->index != then_bb->index + 1)
2071 num_possible_if_blocks++;
2073 fprintf (rtl_dump_file,
2074 "\nIF-CASE-1 found, start %d, then %d\n",
2075 test_bb->index, then_bb->index);
2077 /* THEN is small. */
2078 if (count_bb_insns (then_bb) > BRANCH_COST)
2081 /* Find the label for THEN's destination. */
2082 if (then_succ->dest == EXIT_BLOCK_PTR)
2086 new_lab = JUMP_LABEL (then_bb->end);
2091 /* Registers set are dead, or are predicable. */
2092 if (! dead_or_predicable (test_bb, then_bb, else_bb, new_lab, 1))
2095 /* Conversion went ok, including moving the insns and fixing up the
2096 jump. Adjust the CFG to match. */
2098 SET_UPDATE_LIFE (test_bb);
2099 bitmap_operation (test_bb->global_live_at_end,
2100 else_bb->global_live_at_start,
2101 then_bb->global_live_at_end, BITMAP_IOR);
2103 make_edge (NULL, test_bb, then_succ->dest, 0);
2104 flow_delete_block (then_bb);
2105 tidy_fallthru_edge (else_edge, test_bb, else_bb);
2107 num_removed_blocks++;
2108 num_updated_if_blocks++;
2113 /* Test for case 2 above. */
2116 find_if_case_2 (test_bb, then_edge, else_edge)
2117 basic_block test_bb;
2118 edge then_edge, else_edge;
2120 basic_block then_bb = then_edge->dest;
2121 basic_block else_bb = else_edge->dest;
2122 edge else_succ = else_bb->succ;
2125 /* ELSE has one successor. */
2126 if (!else_succ || else_succ->succ_next != NULL)
2129 /* ELSE outgoing edge is not complex. */
2130 if (else_succ->flags & EDGE_COMPLEX)
2133 /* ELSE has one predecessor. */
2134 if (else_bb->pred->pred_next != NULL)
2137 /* THEN is not EXIT. */
2138 if (then_bb->index < 0)
2141 /* ELSE is predicted or SUCC(ELSE) postdominates THEN. */
2142 note = find_reg_note (test_bb->end, REG_BR_PROB, NULL_RTX);
2143 if (note && INTVAL (XEXP (note, 0)) >= REG_BR_PROB_BASE / 2)
2145 else if (else_succ->dest->index < 0
2146 || TEST_BIT (post_dominators[ORIG_INDEX (then_bb)],
2147 ORIG_INDEX (else_succ->dest)))
2152 num_possible_if_blocks++;
2154 fprintf (rtl_dump_file,
2155 "\nIF-CASE-2 found, start %d, else %d\n",
2156 test_bb->index, else_bb->index);
2158 /* ELSE is small. */
2159 if (count_bb_insns (then_bb) > BRANCH_COST)
2162 /* Find the label for ELSE's destination. */
2163 if (else_succ->dest == EXIT_BLOCK_PTR)
2167 if (else_succ->flags & EDGE_FALLTHRU)
2169 new_lab = else_succ->dest->head;
2170 if (GET_CODE (new_lab) != CODE_LABEL)
2175 new_lab = JUMP_LABEL (else_bb->end);
2181 /* Registers set are dead, or are predicable. */
2182 if (! dead_or_predicable (test_bb, else_bb, then_bb, new_lab, 0))
2185 /* Conversion went ok, including moving the insns and fixing up the
2186 jump. Adjust the CFG to match. */
2188 SET_UPDATE_LIFE (test_bb);
2189 bitmap_operation (test_bb->global_live_at_end,
2190 then_bb->global_live_at_start,
2191 else_bb->global_live_at_end, BITMAP_IOR);
2193 remove_edge (else_edge);
2194 make_edge (NULL, test_bb, else_succ->dest, 0);
2195 flow_delete_block (else_bb);
2197 num_removed_blocks++;
2198 num_updated_if_blocks++;
2200 /* ??? We may now fallthru from one of THEN's successors into a join
2201 block. Rerun cleanup_cfg? Examine things manually? Wait? */
2206 /* A subroutine of dead_or_predicable called through for_each_rtx.
2207 Return 1 if a memory is found. */
2210 find_memory (px, data)
2212 void *data ATTRIBUTE_UNUSED;
2214 return GET_CODE (*px) == MEM;
2217 /* Used by the code above to perform the actual rtl transformations.
2218 Return TRUE if successful.
2220 TEST_BB is the block containing the conditional branch. MERGE_BB
2221 is the block containing the code to manipulate. NEW_DEST is the
2222 label TEST_BB should be branching to after the conversion.
2223 REVERSEP is true if the sense of the branch should be reversed. */
2226 dead_or_predicable (test_bb, merge_bb, other_bb, new_dest, reversep)
2227 basic_block test_bb, merge_bb, other_bb;
2231 rtx head, end, jump, earliest, old_dest;
2233 jump = test_bb->end;
2235 /* Find the extent of the real code in the merge block. */
2236 head = merge_bb->head;
2237 end = merge_bb->end;
2239 if (GET_CODE (head) == CODE_LABEL)
2240 head = NEXT_INSN (head);
2241 if (GET_CODE (head) == NOTE)
2245 head = end = NULL_RTX;
2248 head = NEXT_INSN (head);
2251 if (GET_CODE (end) == JUMP_INSN)
2255 head = end = NULL_RTX;
2258 end = PREV_INSN (end);
2261 /* Disable handling dead code by conditional execution if the machine needs
2262 to do anything funny with the tests, etc. */
2263 #ifndef IFCVT_MODIFY_TESTS
2264 if (HAVE_conditional_execution)
2266 /* In the conditional execution case, we have things easy. We know
2267 the condition is reversable. We don't have to check life info,
2268 becase we're going to conditionally execute the code anyway.
2269 All that's left is making sure the insns involved can actually
2274 cond = cond_exec_get_condition (jump);
2278 prob_val = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
2280 prob_val = XEXP (prob_val, 0);
2284 enum rtx_code rev = reversed_comparison_code (cond, jump);
2287 cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
2290 prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (prob_val));
2293 if (! cond_exec_process_insns (head, end, cond, prob_val, 0))
2301 /* In the non-conditional execution case, we have to verify that there
2302 are no trapping operations, no calls, no references to memory, and
2303 that any registers modified are dead at the branch site. */
2305 rtx insn, cond, prev;
2306 regset_head merge_set_head, tmp_head, test_live_head, test_set_head;
2307 regset merge_set, tmp, test_live, test_set;
2308 struct propagate_block_info *pbi;
2311 /* Check for no calls or trapping operations. */
2312 for (insn = head; ; insn = NEXT_INSN (insn))
2314 if (GET_CODE (insn) == CALL_INSN)
2318 if (may_trap_p (PATTERN (insn)))
2321 /* ??? Even non-trapping memories such as stack frame
2322 references must be avoided. For stores, we collect
2323 no lifetime info; for reads, we'd have to assert
2324 true_dependance false against every store in the
2326 if (for_each_rtx (&PATTERN (insn), find_memory, NULL))
2333 if (! any_condjump_p (jump))
2336 /* Find the extent of the conditional. */
2337 cond = noce_get_condition (jump, &earliest);
2342 MERGE_SET = set of registers set in MERGE_BB
2343 TEST_LIVE = set of registers live at EARLIEST
2344 TEST_SET = set of registers set between EARLIEST and the
2345 end of the block. */
2347 tmp = INITIALIZE_REG_SET (tmp_head);
2348 merge_set = INITIALIZE_REG_SET (merge_set_head);
2349 test_live = INITIALIZE_REG_SET (test_live_head);
2350 test_set = INITIALIZE_REG_SET (test_set_head);
2352 /* ??? bb->local_set is only valid during calculate_global_regs_live,
2353 so we must recompute usage for MERGE_BB. Not so bad, I suppose,
2354 since we've already asserted that MERGE_BB is small. */
2355 propagate_block (merge_bb, tmp, merge_set, merge_set, 0);
2357 /* For small register class machines, don't lengthen lifetimes of
2358 hard registers before reload. */
2359 if (SMALL_REGISTER_CLASSES && ! reload_completed)
2361 EXECUTE_IF_SET_IN_BITMAP
2364 if (i < FIRST_PSEUDO_REGISTER
2366 && ! global_regs[i])
2371 /* For TEST, we're interested in a range of insns, not a whole block.
2372 Moreover, we're interested in the insns live from OTHER_BB. */
2374 COPY_REG_SET (test_live, other_bb->global_live_at_start);
2375 pbi = init_propagate_block_info (test_bb, test_live, test_set, test_set,
2378 for (insn = jump; ; insn = prev)
2380 prev = propagate_one_insn (pbi, insn);
2381 if (insn == earliest)
2385 free_propagate_block_info (pbi);
2387 /* We can perform the transformation if
2388 MERGE_SET & (TEST_SET | TEST_LIVE)
2390 TEST_SET & merge_bb->global_live_at_start
2393 bitmap_operation (tmp, test_set, test_live, BITMAP_IOR);
2394 bitmap_operation (tmp, tmp, merge_set, BITMAP_AND);
2395 EXECUTE_IF_SET_IN_BITMAP(tmp, 0, i, fail = 1);
2397 bitmap_operation (tmp, test_set, merge_bb->global_live_at_start,
2399 EXECUTE_IF_SET_IN_BITMAP(tmp, 0, i, fail = 1);
2402 FREE_REG_SET (merge_set);
2403 FREE_REG_SET (test_live);
2404 FREE_REG_SET (test_set);
2411 /* We don't want to use normal invert_jump or redirect_jump because
2412 we don't want to delete_insn called. Also, we want to do our own
2413 change group management. */
2415 old_dest = JUMP_LABEL (jump);
2417 ? ! invert_jump_1 (jump, new_dest)
2418 : ! redirect_jump_1 (jump, new_dest))
2421 if (! apply_change_group ())
2425 LABEL_NUSES (old_dest) -= 1;
2427 LABEL_NUSES (new_dest) += 1;
2428 JUMP_LABEL (jump) = new_dest;
2431 invert_br_probabilities (jump);
2433 /* Move the insns out of MERGE_BB to before the branch. */
2436 if (end == merge_bb->end)
2437 merge_bb->end = PREV_INSN (head);
2439 head = squeeze_notes (head, end);
2440 if (GET_CODE (end) == NOTE
2441 && (NOTE_LINE_NUMBER (end) == NOTE_INSN_BLOCK_END
2442 || NOTE_LINE_NUMBER (end) == NOTE_INSN_BLOCK_BEG
2443 || NOTE_LINE_NUMBER (end) == NOTE_INSN_LOOP_BEG
2444 || NOTE_LINE_NUMBER (end) == NOTE_INSN_LOOP_END
2445 || NOTE_LINE_NUMBER (end) == NOTE_INSN_LOOP_CONT
2446 || NOTE_LINE_NUMBER (end) == NOTE_INSN_LOOP_VTOP))
2450 end = PREV_INSN (end);
2453 reorder_insns (head, end, PREV_INSN (earliest));
2462 /* Main entry point for all if-conversion. */
2465 if_convert (x_life_data_ok)
2470 num_possible_if_blocks = 0;
2471 num_updated_if_blocks = 0;
2472 num_removed_blocks = 0;
2473 life_data_ok = (x_life_data_ok != 0);
2475 /* Free up basic_block_for_insn so that we don't have to keep it
2476 up to date, either here or in merge_blocks_nomove. */
2477 free_basic_block_vars (1);
2479 /* Compute postdominators if we think we'll use them. */
2480 post_dominators = NULL;
2481 if (HAVE_conditional_execution || life_data_ok)
2483 post_dominators = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
2484 calculate_dominance_info (NULL, post_dominators, CDI_POST_DOMINATORS);
2487 /* Record initial block numbers. */
2488 for (block_num = 0; block_num < n_basic_blocks; block_num++)
2489 SET_ORIG_INDEX (BASIC_BLOCK (block_num), block_num);
2491 /* Go through each of the basic blocks looking for things to convert. */
2492 for (block_num = 0; block_num < n_basic_blocks; )
2494 basic_block bb = BASIC_BLOCK (block_num);
2495 if (find_if_header (bb))
2496 block_num = bb->index;
2501 if (post_dominators)
2502 sbitmap_vector_free (post_dominators);
2505 fflush (rtl_dump_file);
2507 /* Rebuild basic_block_for_insn for update_life_info and for gcse. */
2508 compute_bb_for_insn (get_max_uid ());
2510 /* Rebuild life info for basic blocks that require it. */
2511 if (num_removed_blocks && life_data_ok)
2513 sbitmap update_life_blocks = sbitmap_alloc (n_basic_blocks);
2514 sbitmap_zero (update_life_blocks);
2516 /* If we allocated new pseudos, we must resize the array for sched1. */
2517 if (max_regno < max_reg_num ())
2519 max_regno = max_reg_num ();
2520 allocate_reg_info (max_regno, FALSE, FALSE);
2523 for (block_num = 0; block_num < n_basic_blocks; block_num++)
2524 if (UPDATE_LIFE (BASIC_BLOCK (block_num)))
2525 SET_BIT (update_life_blocks, block_num);
2527 count_or_remove_death_notes (update_life_blocks, 1);
2528 /* ??? See about adding a mode that verifies that the initial
2529 set of blocks don't let registers come live. */
2530 update_life_info (update_life_blocks, UPDATE_LIFE_GLOBAL,
2531 PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE
2532 | PROP_KILL_DEAD_CODE);
2534 sbitmap_free (update_life_blocks);
2537 /* Write the final stats. */
2538 if (rtl_dump_file && num_possible_if_blocks > 0)
2540 fprintf (rtl_dump_file,
2541 "\n%d possible IF blocks searched.\n",
2542 num_possible_if_blocks);
2543 fprintf (rtl_dump_file,
2544 "%d IF blocks converted.\n",
2545 num_updated_if_blocks);
2546 fprintf (rtl_dump_file,
2547 "%d basic blocks deleted.\n\n\n",
2548 num_removed_blocks);
2551 #ifdef ENABLE_CHECKING
2552 verify_flow_info ();