/* Convert tree expression to rtl instructions, for GNU compiler.
- Copyright (C) 1988-2013 Free Software Foundation, Inc.
+ Copyright (C) 1988-2020 Free Software Foundation, Inc.
This file is part of GCC.
#include "config.h"
#include "system.h"
#include "coretypes.h"
-#include "tm.h"
+#include "backend.h"
+#include "target.h"
#include "rtl.h"
#include "tree.h"
-#include "flags.h"
-#include "function.h"
-#include "insn-config.h"
-#include "insn-attr.h"
+#include "predict.h"
+#include "memmodel.h"
+#include "tm_p.h"
+#include "optabs.h"
+#include "emit-rtl.h"
+#include "fold-const.h"
+#include "stor-layout.h"
/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
+#include "dojump.h"
+#include "explow.h"
#include "expr.h"
-#include "optabs.h"
#include "langhooks.h"
-#include "ggc.h"
-#include "basic-block.h"
-#include "tm_p.h"
-
-static bool prefer_and_bit_test (enum machine_mode, int);
-static void do_jump_by_parts_greater (tree, tree, int, rtx, rtx, int);
-static void do_jump_by_parts_equality (tree, tree, rtx, rtx, int);
-static void do_compare_and_jump (tree, tree, enum rtx_code, enum rtx_code, rtx,
- rtx, int);
-
-/* Invert probability if there is any. -1 stands for unknown. */
-static inline int
-inv (int prob)
-{
- return prob == -1 ? -1 : REG_BR_PROB_BASE - prob;
-}
+static bool prefer_and_bit_test (scalar_int_mode, int);
+static void do_jump (tree, rtx_code_label *, rtx_code_label *,
+ profile_probability);
+static void do_jump_by_parts_greater (scalar_int_mode, tree, tree, int,
+ rtx_code_label *, rtx_code_label *,
+ profile_probability);
+static void do_jump_by_parts_equality (scalar_int_mode, tree, tree,
+ rtx_code_label *, rtx_code_label *,
+ profile_probability);
+static void do_compare_and_jump (tree, tree, enum rtx_code, enum rtx_code,
+ rtx_code_label *, rtx_code_label *,
+ profile_probability);
/* At the start of a function, record that we have no previously-pushed
arguments waiting to be popped. */
{
if (inhibit_defer_pop == 0)
{
- if (pending_stack_adjust != 0)
- adjust_stack (GEN_INT (pending_stack_adjust));
+ if (maybe_ne (pending_stack_adjust, 0))
+ adjust_stack (gen_int_mode (pending_stack_adjust, Pmode));
pending_stack_adjust = 0;
}
}
-\f
-/* Expand conditional expressions. */
-/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
- LABEL is an rtx of code CODE_LABEL, in this function and all the
- functions here. */
+/* Remember pending_stack_adjust/stack_pointer_delta.
+ To be used around code that may call do_pending_stack_adjust (),
+ but the generated code could be discarded e.g. using delete_insns_since. */
void
-jumpifnot (tree exp, rtx label, int prob)
+save_pending_stack_adjust (saved_pending_stack_adjust *save)
{
- do_jump (exp, label, NULL_RTX, inv (prob));
+ save->x_pending_stack_adjust = pending_stack_adjust;
+ save->x_stack_pointer_delta = stack_pointer_delta;
}
-void
-jumpifnot_1 (enum tree_code code, tree op0, tree op1, rtx label, int prob)
-{
- do_jump_1 (code, op0, op1, label, NULL_RTX, inv (prob));
-}
-
-/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
-
-void
-jumpif (tree exp, rtx label, int prob)
-{
- do_jump (exp, NULL_RTX, label, prob);
-}
+/* Restore the saved pending_stack_adjust/stack_pointer_delta. */
void
-jumpif_1 (enum tree_code code, tree op0, tree op1, rtx label, int prob)
+restore_pending_stack_adjust (saved_pending_stack_adjust *save)
{
- do_jump_1 (code, op0, op1, NULL_RTX, label, prob);
+ if (inhibit_defer_pop == 0)
+ {
+ pending_stack_adjust = save->x_pending_stack_adjust;
+ stack_pointer_delta = save->x_stack_pointer_delta;
+ }
}
-
+\f
/* Used internally by prefer_and_bit_test. */
static GTY(()) rtx and_reg;
is preferred. */
static bool
-prefer_and_bit_test (enum machine_mode mode, int bitnum)
+prefer_and_bit_test (scalar_int_mode mode, int bitnum)
{
bool speed_p;
+ wide_int mask = wi::set_bit_in_zero (bitnum, GET_MODE_PRECISION (mode));
if (and_test == 0)
{
/* Set up rtxes for the two variations. Use NULL as a placeholder
for the BITNUM-based constants. */
- and_reg = gen_rtx_REG (mode, FIRST_PSEUDO_REGISTER);
+ and_reg = gen_rtx_REG (mode, LAST_VIRTUAL_REGISTER + 1);
and_test = gen_rtx_AND (mode, and_reg, NULL);
shift_test = gen_rtx_AND (mode, gen_rtx_ASHIFTRT (mode, and_reg, NULL),
const1_rtx);
}
/* Fill in the integers. */
- XEXP (and_test, 1)
- = immed_double_int_const (double_int_zero.set_bit (bitnum), mode);
+ XEXP (and_test, 1) = immed_wide_int_const (mask, mode);
XEXP (XEXP (shift_test, 0), 1) = GEN_INT (bitnum);
speed_p = optimize_insn_for_speed_p ();
- return (rtx_cost (and_test, IF_THEN_ELSE, 0, speed_p)
- <= rtx_cost (shift_test, IF_THEN_ELSE, 0, speed_p));
+ return (rtx_cost (and_test, mode, IF_THEN_ELSE, 0, speed_p)
+ <= rtx_cost (shift_test, mode, IF_THEN_ELSE, 0, speed_p));
}
/* Subroutine of do_jump, dealing with exploded comparisons of the type
OP0 CODE OP1 . IF_FALSE_LABEL and IF_TRUE_LABEL like in do_jump.
- PROB is probability of jump to if_true_label, or -1 if unknown. */
+ PROB is probability of jump to if_true_label. */
-void
+static void
do_jump_1 (enum tree_code code, tree op0, tree op1,
- rtx if_false_label, rtx if_true_label, int prob)
+ rtx_code_label *if_false_label, rtx_code_label *if_true_label,
+ profile_probability prob)
{
- enum machine_mode mode;
- rtx drop_through_label = 0;
+ machine_mode mode;
+ rtx_code_label *drop_through_label = 0;
+ scalar_int_mode int_mode;
switch (code)
{
!= MODE_COMPLEX_INT);
if (integer_zerop (op1))
- do_jump (op0, if_true_label, if_false_label, inv (prob));
- else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
- && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
- do_jump_by_parts_equality (op0, op1, if_false_label, if_true_label,
- prob);
+ do_jump (op0, if_true_label, if_false_label,
+ prob.invert ());
+ else if (is_int_mode (TYPE_MODE (inner_type), &int_mode)
+ && !can_compare_p (EQ, int_mode, ccp_jump))
+ do_jump_by_parts_equality (int_mode, op0, op1, if_false_label,
+ if_true_label, prob);
else
do_compare_and_jump (op0, op1, EQ, EQ, if_false_label, if_true_label,
prob);
if (integer_zerop (op1))
do_jump (op0, if_false_label, if_true_label, prob);
- else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
- && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
- do_jump_by_parts_equality (op0, op1, if_true_label, if_false_label,
- inv (prob));
+ else if (is_int_mode (TYPE_MODE (inner_type), &int_mode)
+ && !can_compare_p (NE, int_mode, ccp_jump))
+ do_jump_by_parts_equality (int_mode, op0, op1, if_true_label,
+ if_false_label, prob.invert ());
else
do_compare_and_jump (op0, op1, NE, NE, if_false_label, if_true_label,
prob);
case LT_EXPR:
mode = TYPE_MODE (TREE_TYPE (op0));
- if (GET_MODE_CLASS (mode) == MODE_INT
- && ! can_compare_p (LT, mode, ccp_jump))
- do_jump_by_parts_greater (op0, op1, 1, if_false_label, if_true_label,
- prob);
+ if (is_int_mode (mode, &int_mode)
+ && ! can_compare_p (LT, int_mode, ccp_jump))
+ do_jump_by_parts_greater (int_mode, op0, op1, 1, if_false_label,
+ if_true_label, prob);
else
do_compare_and_jump (op0, op1, LT, LTU, if_false_label, if_true_label,
prob);
case LE_EXPR:
mode = TYPE_MODE (TREE_TYPE (op0));
- if (GET_MODE_CLASS (mode) == MODE_INT
- && ! can_compare_p (LE, mode, ccp_jump))
- do_jump_by_parts_greater (op0, op1, 0, if_true_label, if_false_label,
- inv (prob));
+ if (is_int_mode (mode, &int_mode)
+ && ! can_compare_p (LE, int_mode, ccp_jump))
+ do_jump_by_parts_greater (int_mode, op0, op1, 0, if_true_label,
+ if_false_label, prob.invert ());
else
do_compare_and_jump (op0, op1, LE, LEU, if_false_label, if_true_label,
prob);
case GT_EXPR:
mode = TYPE_MODE (TREE_TYPE (op0));
- if (GET_MODE_CLASS (mode) == MODE_INT
- && ! can_compare_p (GT, mode, ccp_jump))
- do_jump_by_parts_greater (op0, op1, 0, if_false_label, if_true_label,
- prob);
+ if (is_int_mode (mode, &int_mode)
+ && ! can_compare_p (GT, int_mode, ccp_jump))
+ do_jump_by_parts_greater (int_mode, op0, op1, 0, if_false_label,
+ if_true_label, prob);
else
do_compare_and_jump (op0, op1, GT, GTU, if_false_label, if_true_label,
prob);
case GE_EXPR:
mode = TYPE_MODE (TREE_TYPE (op0));
- if (GET_MODE_CLASS (mode) == MODE_INT
- && ! can_compare_p (GE, mode, ccp_jump))
- do_jump_by_parts_greater (op0, op1, 1, if_true_label, if_false_label,
- inv (prob));
+ if (is_int_mode (mode, &int_mode)
+ && ! can_compare_p (GE, int_mode, ccp_jump))
+ do_jump_by_parts_greater (int_mode, op0, op1, 1, if_true_label,
+ if_false_label, prob.invert ());
else
do_compare_and_jump (op0, op1, GE, GEU, if_false_label, if_true_label,
prob);
break;
case TRUTH_ANDIF_EXPR:
- if (if_false_label == NULL_RTX)
- {
- drop_through_label = gen_label_rtx ();
- do_jump (op0, drop_through_label, NULL_RTX, prob);
- do_jump (op1, NULL_RTX, if_true_label, prob);
- }
- else
- {
- do_jump (op0, if_false_label, NULL_RTX, prob);
- do_jump (op1, if_false_label, if_true_label, prob);
- }
- break;
+ {
+ /* Spread the probability that the expression is false evenly between
+ the two conditions. So the first condition is false half the total
+ probability of being false. The second condition is false the other
+ half of the total probability of being false, so its jump has a false
+ probability of half the total, relative to the probability we
+ reached it (i.e. the first condition was true). */
+ profile_probability op0_prob = profile_probability::uninitialized ();
+ profile_probability op1_prob = profile_probability::uninitialized ();
+ if (prob.initialized_p ())
+ {
+ op1_prob = prob.invert ();
+ op0_prob = op1_prob.split (profile_probability::even ());
+ /* Get the probability that each jump below is true. */
+ op0_prob = op0_prob.invert ();
+ op1_prob = op1_prob.invert ();
+ }
+ if (if_false_label == NULL)
+ {
+ drop_through_label = gen_label_rtx ();
+ do_jump (op0, drop_through_label, NULL, op0_prob);
+ do_jump (op1, NULL, if_true_label, op1_prob);
+ }
+ else
+ {
+ do_jump (op0, if_false_label, NULL, op0_prob);
+ do_jump (op1, if_false_label, if_true_label, op1_prob);
+ }
+ break;
+ }
case TRUTH_ORIF_EXPR:
- if (if_true_label == NULL_RTX)
- {
- drop_through_label = gen_label_rtx ();
- do_jump (op0, NULL_RTX, drop_through_label, prob);
- do_jump (op1, if_false_label, NULL_RTX, prob);
- }
- else
- {
- do_jump (op0, NULL_RTX, if_true_label, prob);
- do_jump (op1, if_false_label, if_true_label, prob);
- }
- break;
+ {
+ /* Spread the probability evenly between the two conditions. So
+ the first condition has half the total probability of being true.
+ The second condition has the other half of the total probability,
+ so its jump has a probability of half the total, relative to
+ the probability we reached it (i.e. the first condition was false). */
+ profile_probability op0_prob = profile_probability::uninitialized ();
+ profile_probability op1_prob = profile_probability::uninitialized ();
+ if (prob.initialized_p ())
+ {
+ op1_prob = prob;
+ op0_prob = op1_prob.split (profile_probability::even ());
+ }
+ if (if_true_label == NULL)
+ {
+ drop_through_label = gen_label_rtx ();
+ do_jump (op0, NULL, drop_through_label, op0_prob);
+ do_jump (op1, if_false_label, NULL, op1_prob);
+ }
+ else
+ {
+ do_jump (op0, NULL, if_true_label, op0_prob);
+ do_jump (op1, if_false_label, if_true_label, op1_prob);
+ }
+ break;
+ }
default:
gcc_unreachable ();
actually perform a jump. An example where there is no jump
is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
- PROB is probability of jump to if_true_label, or -1 if unknown. */
+ PROB is probability of jump to if_true_label. */
-void
-do_jump (tree exp, rtx if_false_label, rtx if_true_label, int prob)
+static void
+do_jump (tree exp, rtx_code_label *if_false_label,
+ rtx_code_label *if_true_label, profile_probability prob)
{
enum tree_code code = TREE_CODE (exp);
rtx temp;
int i;
tree type;
- enum machine_mode mode;
- rtx drop_through_label = 0;
+ scalar_int_mode mode;
+ rtx_code_label *drop_through_label = NULL;
switch (code)
{
break;
case INTEGER_CST:
- temp = integer_zerop (exp) ? if_false_label : if_true_label;
- if (temp)
- emit_jump (temp);
- break;
+ {
+ rtx_code_label *lab = integer_zerop (exp) ? if_false_label
+ : if_true_label;
+ if (lab)
+ emit_jump (lab);
+ break;
+ }
#if 0
/* This is not true with #pragma weak */
|| TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
|| TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
goto normal;
+ /* FALLTHRU */
case CONVERT_EXPR:
/* If we are narrowing the operand, we have to do the compare in the
narrower mode. */
if ((TYPE_PRECISION (TREE_TYPE (exp))
< TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
goto normal;
+ /* FALLTHRU */
case NON_LVALUE_EXPR:
case ABS_EXPR:
+ case ABSU_EXPR:
case NEGATE_EXPR:
case LROTATE_EXPR:
case RROTATE_EXPR:
case TRUTH_NOT_EXPR:
do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label,
- inv (prob));
+ prob.invert ());
break;
case COND_EXPR:
{
- rtx label1 = gen_label_rtx ();
+ rtx_code_label *label1 = gen_label_rtx ();
if (!if_true_label || !if_false_label)
{
drop_through_label = gen_label_rtx ();
}
do_pending_stack_adjust ();
- do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX, -1);
+ do_jump (TREE_OPERAND (exp, 0), label1, NULL,
+ profile_probability::uninitialized ());
do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label, prob);
emit_label (label1);
do_jump (TREE_OPERAND (exp, 2), if_false_label, if_true_label, prob);
if (integer_onep (TREE_OPERAND (exp, 1)))
{
tree exp0 = TREE_OPERAND (exp, 0);
- rtx set_label, clr_label;
- int setclr_prob = prob;
+ rtx_code_label *set_label, *clr_label;
+ profile_probability setclr_prob = prob;
/* Strip narrowing integral type conversions. */
while (CONVERT_EXPR_P (exp0)
exp0 = TREE_OPERAND (exp0, 0);
clr_label = if_true_label;
set_label = if_false_label;
- setclr_prob = inv (prob);
+ setclr_prob = prob.invert ();
}
else
{
if (TREE_CODE (shift) == INTEGER_CST
&& compare_tree_int (shift, 0) >= 0
&& compare_tree_int (shift, HOST_BITS_PER_WIDE_INT) < 0
- && prefer_and_bit_test (TYPE_MODE (argtype),
+ && prefer_and_bit_test (SCALAR_INT_TYPE_MODE (argtype),
TREE_INT_CST_LOW (shift)))
{
unsigned HOST_WIDE_INT mask
- = (unsigned HOST_WIDE_INT) 1 << TREE_INT_CST_LOW (shift);
+ = HOST_WIDE_INT_1U << TREE_INT_CST_LOW (shift);
do_jump (build2 (BIT_AND_EXPR, argtype, arg,
build_int_cstu (argtype, mask)),
clr_label, set_label, setclr_prob);
&& TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
&& TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
&& (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
- && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
+ && int_mode_for_size (i + 1, 0).exists (&mode)
&& (type = lang_hooks.types.type_for_mode (mode, 1)) != 0
&& TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
&& have_insn_for (COMPARE, TYPE_MODE (type)))
goto normal;
/* Boolean comparisons can be compiled as TRUTH_AND_EXPR. */
+ /* FALLTHRU */
case TRUTH_AND_EXPR:
/* High branch cost, expand as the bitwise AND of the conditions.
Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
static void
-do_jump_by_parts_greater_rtx (enum machine_mode mode, int unsignedp, rtx op0,
- rtx op1, rtx if_false_label, rtx if_true_label,
- int prob)
+do_jump_by_parts_greater_rtx (scalar_int_mode mode, int unsignedp, rtx op0,
+ rtx op1, rtx_code_label *if_false_label,
+ rtx_code_label *if_true_label,
+ profile_probability prob)
{
int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
- rtx drop_through_label = 0;
+ rtx_code_label *drop_through_label = 0;
bool drop_through_if_true = false, drop_through_if_false = false;
enum rtx_code code = GT;
int i;
code = LE;
if_true_label = if_false_label;
if_false_label = drop_through_label;
- drop_through_if_true = false;
- drop_through_if_false = true;
+ prob = prob.invert ();
}
/* Compare a word at a time, high order first. */
/* All but high-order word must be compared as unsigned. */
do_compare_rtx_and_jump (op0_word, op1_word, code, (unsignedp || i > 0),
- word_mode, NULL_RTX, NULL_RTX, if_true_label,
+ word_mode, NULL_RTX, NULL, if_true_label,
prob);
/* Emit only one comparison for 0. Do not emit the last cond jump. */
/* Consider lower words only if these are equal. */
do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
- NULL_RTX, NULL_RTX, if_false_label, inv (prob));
+ NULL_RTX, NULL, if_false_label,
+ prob.invert ());
}
if (!drop_through_if_false)
/* Given a comparison expression EXP for values too wide to be compared
with one insn, test the comparison and jump to the appropriate label.
The code of EXP is ignored; we always test GT if SWAP is 0,
- and LT if SWAP is 1. */
+ and LT if SWAP is 1. MODE is the mode of the two operands. */
static void
-do_jump_by_parts_greater (tree treeop0, tree treeop1, int swap,
- rtx if_false_label, rtx if_true_label, int prob)
+do_jump_by_parts_greater (scalar_int_mode mode, tree treeop0, tree treeop1,
+ int swap, rtx_code_label *if_false_label,
+ rtx_code_label *if_true_label,
+ profile_probability prob)
{
rtx op0 = expand_normal (swap ? treeop1 : treeop0);
rtx op1 = expand_normal (swap ? treeop0 : treeop1);
- enum machine_mode mode = TYPE_MODE (TREE_TYPE (treeop0));
int unsignedp = TYPE_UNSIGNED (TREE_TYPE (treeop0));
do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label,
\f
/* Jump according to whether OP0 is 0. We assume that OP0 has an integer
mode, MODE, that is too wide for the available compare insns. Either
- Either (but not both) of IF_TRUE_LABEL and IF_FALSE_LABEL may be NULL_RTX
+ Either (but not both) of IF_TRUE_LABEL and IF_FALSE_LABEL may be NULL
to indicate drop through. */
static void
-do_jump_by_parts_zero_rtx (enum machine_mode mode, rtx op0,
- rtx if_false_label, rtx if_true_label, int prob)
+do_jump_by_parts_zero_rtx (scalar_int_mode mode, rtx op0,
+ rtx_code_label *if_false_label,
+ rtx_code_label *if_true_label,
+ profile_probability prob)
{
int nwords = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
rtx part;
int i;
- rtx drop_through_label = 0;
+ rtx_code_label *drop_through_label = NULL;
/* The fastest way of doing this comparison on almost any machine is to
"or" all the words and compare the result. If all have to be loaded
/* If we couldn't do the "or" simply, do this with a series of compares. */
if (! if_false_label)
- drop_through_label = if_false_label = gen_label_rtx ();
+ if_false_label = drop_through_label = gen_label_rtx ();
for (i = 0; i < nwords; i++)
do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
const0_rtx, EQ, 1, word_mode, NULL_RTX,
- if_false_label, NULL_RTX, prob);
+ if_false_label, NULL, prob);
if (if_true_label)
emit_jump (if_true_label);
to indicate drop through. */
static void
-do_jump_by_parts_equality_rtx (enum machine_mode mode, rtx op0, rtx op1,
- rtx if_false_label, rtx if_true_label, int prob)
+do_jump_by_parts_equality_rtx (scalar_int_mode mode, rtx op0, rtx op1,
+ rtx_code_label *if_false_label,
+ rtx_code_label *if_true_label,
+ profile_probability prob)
{
int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
- rtx drop_through_label = 0;
+ rtx_code_label *drop_through_label = NULL;
int i;
if (op1 == const0_rtx)
do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
operand_subword_force (op1, i, mode),
EQ, 0, word_mode, NULL_RTX,
- if_false_label, NULL_RTX, prob);
+ if_false_label, NULL, prob);
if (if_true_label)
emit_jump (if_true_label);
}
/* Given an EQ_EXPR expression EXP for values too wide to be compared
- with one insn, test the comparison and jump to the appropriate label. */
+ with one insn, test the comparison and jump to the appropriate label.
+ MODE is the mode of the two operands. */
static void
-do_jump_by_parts_equality (tree treeop0, tree treeop1, rtx if_false_label,
- rtx if_true_label, int prob)
+do_jump_by_parts_equality (scalar_int_mode mode, tree treeop0, tree treeop1,
+ rtx_code_label *if_false_label,
+ rtx_code_label *if_true_label,
+ profile_probability prob)
{
rtx op0 = expand_normal (treeop0);
rtx op1 = expand_normal (treeop1);
- enum machine_mode mode = TYPE_MODE (TREE_TYPE (treeop0));
do_jump_by_parts_equality_rtx (mode, op0, op1, if_false_label,
if_true_label, prob);
}
the conditions must be ANDed, false if they must be ORed. */
bool
-split_comparison (enum rtx_code code, enum machine_mode mode,
+split_comparison (enum rtx_code code, machine_mode mode,
enum rtx_code *code1, enum rtx_code *code2)
{
switch (code)
return false;
case LTGT:
/* Do not turn a trapping comparison into a non-trapping one. */
- if (HONOR_SNANS (mode))
+ if (HONOR_NANS (mode))
{
*code1 = LT;
*code2 = GT;
}
}
+/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero.
+ PROB is probability of jump to LABEL. */
+
+void
+jumpif (tree exp, rtx_code_label *label, profile_probability prob)
+{
+ do_jump (exp, NULL, label, prob);
+}
+
+/* Similar to jumpif but dealing with exploded comparisons of the type
+ OP0 CODE OP1 . LABEL and PROB are like in jumpif. */
+
+void
+jumpif_1 (enum tree_code code, tree op0, tree op1, rtx_code_label *label,
+ profile_probability prob)
+{
+ do_jump_1 (code, op0, op1, NULL, label, prob);
+}
+
+/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
+ PROB is probability of jump to LABEL. */
+
+void
+jumpifnot (tree exp, rtx_code_label *label, profile_probability prob)
+{
+ do_jump (exp, label, NULL, prob.invert ());
+}
+
+/* Similar to jumpifnot but dealing with exploded comparisons of the type
+ OP0 CODE OP1 . LABEL and PROB are like in jumpifnot. */
+
+void
+jumpifnot_1 (enum tree_code code, tree op0, tree op1, rtx_code_label *label,
+ profile_probability prob)
+{
+ do_jump_1 (code, op0, op1, label, NULL, prob.invert ());
+}
/* Like do_compare_and_jump but expects the values to compare as two rtx's.
The decision as to signed or unsigned comparison must be made by the caller.
void
do_compare_rtx_and_jump (rtx op0, rtx op1, enum rtx_code code, int unsignedp,
- enum machine_mode mode, rtx size, rtx if_false_label,
- rtx if_true_label, int prob)
+ machine_mode mode, rtx size,
+ rtx_code_label *if_false_label,
+ rtx_code_label *if_true_label,
+ profile_probability prob)
{
rtx tem;
- rtx dummy_label = NULL_RTX;
+ rtx_code_label *dummy_label = NULL;
/* Reverse the comparison if that is safe and we want to jump if it is
false. Also convert to the reverse comparison if the target can
if (can_compare_p (rcode, mode, ccp_jump)
|| (code == ORDERED && ! can_compare_p (ORDERED, mode, ccp_jump)))
{
- tem = if_true_label;
- if_true_label = if_false_label;
- if_false_label = tem;
+ std::swap (if_true_label, if_false_label);
code = rcode;
- prob = inv (prob);
+ prob = prob.invert ();
}
}
if (swap_commutative_operands_p (op0, op1))
{
- tem = op0;
- op0 = op1;
- op1 = tem;
+ std::swap (op0, op1);
code = swap_condition (code);
}
do_pending_stack_adjust ();
code = unsignedp ? unsigned_condition (code) : code;
- if (0 != (tem = simplify_relational_operation (code, mode, VOIDmode,
- op0, op1)))
+ if ((tem = simplify_relational_operation (code, mode, VOIDmode,
+ op0, op1)) != 0)
{
if (CONSTANT_P (tem))
{
- rtx label = (tem == const0_rtx || tem == CONST0_RTX (mode))
- ? if_false_label : if_true_label;
+ rtx_code_label *label = (tem == const0_rtx
+ || tem == CONST0_RTX (mode))
+ ? if_false_label : if_true_label;
if (label)
emit_jump (label);
return;
if (! if_true_label)
dummy_label = if_true_label = gen_label_rtx ();
- if (GET_MODE_CLASS (mode) == MODE_INT
- && ! can_compare_p (code, mode, ccp_jump))
+ scalar_int_mode int_mode;
+ if (is_int_mode (mode, &int_mode)
+ && ! can_compare_p (code, int_mode, ccp_jump))
{
switch (code)
{
case LTU:
- do_jump_by_parts_greater_rtx (mode, 1, op1, op0,
+ do_jump_by_parts_greater_rtx (int_mode, 1, op1, op0,
if_false_label, if_true_label, prob);
break;
case LEU:
- do_jump_by_parts_greater_rtx (mode, 1, op0, op1,
+ do_jump_by_parts_greater_rtx (int_mode, 1, op0, op1,
if_true_label, if_false_label,
- inv (prob));
+ prob.invert ());
break;
case GTU:
- do_jump_by_parts_greater_rtx (mode, 1, op0, op1,
+ do_jump_by_parts_greater_rtx (int_mode, 1, op0, op1,
if_false_label, if_true_label, prob);
break;
case GEU:
- do_jump_by_parts_greater_rtx (mode, 1, op1, op0,
+ do_jump_by_parts_greater_rtx (int_mode, 1, op1, op0,
if_true_label, if_false_label,
- inv (prob));
+ prob.invert ());
break;
case LT:
- do_jump_by_parts_greater_rtx (mode, 0, op1, op0,
+ do_jump_by_parts_greater_rtx (int_mode, 0, op1, op0,
if_false_label, if_true_label, prob);
break;
case LE:
- do_jump_by_parts_greater_rtx (mode, 0, op0, op1,
+ do_jump_by_parts_greater_rtx (int_mode, 0, op0, op1,
if_true_label, if_false_label,
- inv (prob));
+ prob.invert ());
break;
case GT:
- do_jump_by_parts_greater_rtx (mode, 0, op0, op1,
+ do_jump_by_parts_greater_rtx (int_mode, 0, op0, op1,
if_false_label, if_true_label, prob);
break;
case GE:
- do_jump_by_parts_greater_rtx (mode, 0, op1, op0,
+ do_jump_by_parts_greater_rtx (int_mode, 0, op1, op0,
if_true_label, if_false_label,
- inv (prob));
+ prob.invert ());
break;
case EQ:
- do_jump_by_parts_equality_rtx (mode, op0, op1, if_false_label,
+ do_jump_by_parts_equality_rtx (int_mode, op0, op1, if_false_label,
if_true_label, prob);
break;
case NE:
- do_jump_by_parts_equality_rtx (mode, op0, op1, if_true_label,
- if_false_label, inv (prob));
+ do_jump_by_parts_equality_rtx (int_mode, op0, op1, if_true_label,
+ if_false_label,
+ prob.invert ());
break;
default:
&& ! can_compare_p (code, mode, ccp_jump)
&& can_compare_p (swap_condition (code), mode, ccp_jump))
{
- rtx tmp;
code = swap_condition (code);
- tmp = op0;
- op0 = op1;
- op1 = tmp;
+ std::swap (op0, op1);
}
else if (SCALAR_FLOAT_MODE_P (mode)
&& ! can_compare_p (code, mode, ccp_jump)
else
{
+ profile_probability cprob
+ = profile_probability::guessed_always ();
+ if (first_code == UNORDERED)
+ cprob = cprob.apply_scale (1, 100);
+ else if (first_code == ORDERED)
+ cprob = cprob.apply_scale (99, 100);
+ else
+ cprob = profile_probability::even ();
+ /* We want to split:
+ if (x) goto t; // prob;
+ into
+ if (a) goto t; // first_prob;
+ if (b) goto t; // prob;
+ such that the overall probability of jumping to t
+ remains the same and first_prob is prob * cprob. */
if (and_them)
{
- rtx dest_label;
+ rtx_code_label *dest_label;
+ prob = prob.invert ();
+ profile_probability first_prob = prob.split (cprob).invert ();
+ prob = prob.invert ();
/* If we only jump if true, just bypass the second jump. */
if (! if_false_label)
{
else
dest_label = if_false_label;
do_compare_rtx_and_jump (op0, op1, first_code, unsignedp, mode,
- size, dest_label, NULL_RTX, prob);
+ size, dest_label, NULL, first_prob);
}
else
- do_compare_rtx_and_jump (op0, op1, first_code, unsignedp, mode,
- size, NULL_RTX, if_true_label, prob);
+ {
+ profile_probability first_prob = prob.split (cprob);
+ do_compare_rtx_and_jump (op0, op1, first_code, unsignedp, mode,
+ size, NULL, if_true_label, first_prob);
+ }
}
}
static void
do_compare_and_jump (tree treeop0, tree treeop1, enum rtx_code signed_code,
- enum rtx_code unsigned_code, rtx if_false_label,
- rtx if_true_label, int prob)
+ enum rtx_code unsigned_code,
+ rtx_code_label *if_false_label,
+ rtx_code_label *if_true_label, profile_probability prob)
{
rtx op0, op1;
tree type;
- enum machine_mode mode;
+ machine_mode mode;
int unsignedp;
enum rtx_code code;
return;
type = TREE_TYPE (treeop0);
- mode = TYPE_MODE (type);
if (TREE_CODE (treeop0) == INTEGER_CST
&& (TREE_CODE (treeop1) != INTEGER_CST
- || (GET_MODE_BITSIZE (mode)
- > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (treeop1))))))
- {
- /* op0 might have been replaced by promoted constant, in which
- case the type of second argument should be used. */
- type = TREE_TYPE (treeop1);
- mode = TYPE_MODE (type);
- }
+ || (GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type))
+ > GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (treeop1))))))
+ /* op0 might have been replaced by promoted constant, in which
+ case the type of second argument should be used. */
+ type = TREE_TYPE (treeop1);
+ mode = TYPE_MODE (type);
unsignedp = TYPE_UNSIGNED (type);
code = unsignedp ? unsigned_code : signed_code;
-#ifdef HAVE_canonicalize_funcptr_for_compare
/* If function pointers need to be "canonicalized" before they can
- be reliably compared, then canonicalize them.
- Only do this if *both* sides of the comparison are function pointers.
- If one side isn't, we want a noncanonicalized comparison. See PR
- middle-end/17564. */
- if (HAVE_canonicalize_funcptr_for_compare
- && TREE_CODE (TREE_TYPE (treeop0)) == POINTER_TYPE
- && TREE_CODE (TREE_TYPE (TREE_TYPE (treeop0)))
- == FUNCTION_TYPE
- && TREE_CODE (TREE_TYPE (treeop1)) == POINTER_TYPE
- && TREE_CODE (TREE_TYPE (TREE_TYPE (treeop1)))
- == FUNCTION_TYPE)
+ be reliably compared, then canonicalize them. Canonicalize the
+ expression when one of the operands is a function pointer. This
+ handles the case where the other operand is a void pointer. See
+ PR middle-end/17564. */
+ if (targetm.have_canonicalize_funcptr_for_compare ()
+ && ((POINTER_TYPE_P (TREE_TYPE (treeop0))
+ && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0))))
+ || (POINTER_TYPE_P (TREE_TYPE (treeop1))
+ && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (TREE_TYPE (treeop1))))))
{
rtx new_op0 = gen_reg_rtx (mode);
rtx new_op1 = gen_reg_rtx (mode);
- emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
+ emit_insn (targetm.gen_canonicalize_funcptr_for_compare (new_op0, op0));
op0 = new_op0;
- emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
+ emit_insn (targetm.gen_canonicalize_funcptr_for_compare (new_op1, op1));
op1 = new_op1;
}
-#endif
do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
((mode == BLKmode)