2013-11-18 Richard Sandiford <rdsandiford@googlemail.com>
+ * builtins.c, config/alpha/alpha.c, config/c6x/predicates.md,
+ config/ia64/predicates.md, config/iq2000/iq2000.c, config/mips/mips.c,
+ config/s390/s390.c, dbxout.c, dwarf2out.c, except.c, explow.c, expr.c,
+ expr.h, fold-const.c, gimple-fold.c, gimple-ssa-strength-reduction.c,
+ gimple.c, godump.c, graphite-scop-detection.c, graphite-sese-to-poly.c,
+ omp-low.c, predict.c, rtlanal.c, sdbout.c, simplify-rtx.c,
+ stor-layout.c, tree-data-ref.c, tree-dfa.c, tree-pretty-print.c,
+ tree-sra.c, tree-ssa-alias.c, tree-ssa-forwprop.c,
+ tree-ssa-loop-ivopts.c, tree-ssa-loop-prefetch.c, tree-ssa-math-opts.c,
+ tree-ssa-phiopt.c, tree-ssa-reassoc.c, tree-ssa-sccvn.c,
+ tree-ssa-strlen.c, tree-ssa-structalias.c, tree-vect-data-refs.c,
+ tree-vect-patterns.c, tree-vectorizer.h, tree.c, var-tracking.c,
+ varasm.c: Replace host_integerp (..., 0) with tree_fits_shwi_p
+ throughout.
+
+2013-11-18 Richard Sandiford <rdsandiford@googlemail.com>
+
* tree.h (tree_fits_shwi_p, tree_fits_uhwi_p): Declare.
* tree.c (tree_fits_shwi_p, tree_fits_uhwi_p): Define.
+2013-11-18 Richard Sandiford <rdsandiford@googlemail.com>
+
+ * gcc-interface/cuintp.c: Replace host_integerp (..., 0) with
+ tree_fits_shwi_p throughout.
+
2013-11-18 Eric Botcazou <ebotcazou@adacore.com>
* gcc-interface/trans.c (TARGET_ABI_OPEN_VMS): Delete as redundant.
/* On 64-bit hosts, host_integerp tells whether the input fits in a
signed 64-bit integer. Then a truncation tells whether it fits
in a signed 32-bit integer. */
- if (host_integerp (Input, 0))
+ if (tree_fits_shwi_p (Input))
{
HOST_WIDE_INT hw_input = TREE_INT_CST_LOW (Input);
if (hw_input == (int) hw_input)
/* On 32-bit hosts, host_integerp tells whether the input fits in a
signed 32-bit integer. Then a sign test tells whether it fits
in a signed 64-bit integer. */
- if (host_integerp (Input, 0))
+ if (tree_fits_shwi_p (Input))
return UI_From_Int (TREE_INT_CST_LOW (Input));
else if (TREE_INT_CST_HIGH (Input) < 0 && TYPE_UNSIGNED (gnu_type))
return No_Uint;
a null character if we can represent it as a single HOST_WIDE_INT. */
if (offset_node == 0)
offset = 0;
- else if (! host_integerp (offset_node, 0))
+ else if (! tree_fits_shwi_p (offset_node))
offset = -1;
else
offset = tree_low_cst (offset_node, 0);
weak = CALL_EXPR_ARG (exp, 3);
is_weak = false;
- if (host_integerp (weak, 0) && tree_low_cst (weak, 0) != 0)
+ if (tree_fits_shwi_p (weak) && tree_low_cst (weak, 0) != 0)
is_weak = true;
oldval = expect;
if (real_onep (arg0))
return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
- if (host_integerp (arg1, 0))
+ if (tree_fits_shwi_p (arg1))
{
HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
return NULL_TREE;
}
- if (!host_integerp (len, 0))
+ if (!tree_fits_shwi_p (len))
return NULL_TREE;
/* FIXME:
This logic lose for arguments like (type *)malloc (sizeof (type)),
/* If both arguments are constant, then try to evaluate it. */
if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
&& TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
- && host_integerp (arg1, 0))
+ && tree_fits_shwi_p (arg1))
{
/* Bound the maximum adjustment to twice the range of the
mode's valid exponents. Use abs to ensure the range is
/* To proceed, MPFR must exactly represent the target floating point
format, which only happens when the target base equals two. */
if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
- && host_integerp (arg1, 0)
+ && tree_fits_shwi_p (arg1)
&& TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
{
const HOST_WIDE_INT n = tree_low_cst (arg1, 0);
+2013-11-18 Richard Sandiford <rdsandiford@googlemail.com>
+
+ * c-ada-spec.c, c-common.c, c-format.c, c-pretty-print.c: Replace
+ host_integerp (..., 0) with tree_fits_shwi_p throughout.
+
2013-11-15 Aldy Hernandez <aldyh@redhat.com>
* c-cilkplus.c: New file.
if (TREE_CODE (int_val) != INTEGER_CST)
int_val = DECL_INITIAL (int_val);
- if (!host_integerp (int_val, 0))
+ if (!tree_fits_shwi_p (int_val))
return false;
else if (TREE_INT_CST_LOW (int_val) != count)
return false;
to generate the (0 .. -1) range for flexible array members. */
if (TREE_TYPE (node) == sizetype)
node = fold_convert (ssizetype, node);
- if (host_integerp (node, 0))
+ if (tree_fits_shwi_p (node))
pp_wide_integer (buffer, TREE_INT_CST_LOW (node));
else if (host_integerp (node, 1))
pp_unsigned_wide_integer (buffer, TREE_INT_CST_LOW (node));
arg = TREE_VALUE (args);
arg = default_conversion (arg);
- if (!host_integerp (arg, /*pos=*/0)
+ if (!tree_fits_shwi_p (arg)
|| !INTEGRAL_TYPE_P (TREE_TYPE (arg)))
goto invalid;
if (TREE_CODE (op1) == BIT_NOT_EXPR)
op1 = c_common_get_narrower (TREE_OPERAND (op1, 0), &unsignedp1);
- if (host_integerp (op0, 0) || host_integerp (op1, 0))
+ if (tree_fits_shwi_p (op0) || tree_fits_shwi_p (op1))
{
tree primop;
HOST_WIDE_INT constant, mask;
int unsignedp;
unsigned int bits;
- if (host_integerp (op0, 0))
+ if (tree_fits_shwi_p (op0))
{
primop = op1;
unsignedp = unsignedp1;
res->number_non_literal++;
return;
}
- if (!host_integerp (arg1, 0)
+ if (!tree_fits_shwi_p (arg1)
|| (offset = tree_low_cst (arg1, 0)) < 0)
{
res->number_non_literal++;
return;
}
if (TREE_CODE (format_tree) == ARRAY_REF
- && host_integerp (TREE_OPERAND (format_tree, 1), 0)
+ && tree_fits_shwi_p (TREE_OPERAND (format_tree, 1))
&& (offset += tree_low_cst (TREE_OPERAND (format_tree, 1), 0)) >= 0)
format_tree = TREE_OPERAND (format_tree, 0);
if (TREE_CODE (format_tree) == VAR_DECL
/* Variable length arrays can't be initialized. */
gcc_assert (TREE_CODE (array_size) == INTEGER_CST);
- if (host_integerp (array_size, 0))
+ if (tree_fits_shwi_p (array_size))
{
HOST_WIDE_INT array_size_value = TREE_INT_CST_LOW (array_size);
if (array_size_value > 0
tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (t));
tree type = TREE_TYPE (maxval);
- if (host_integerp (maxval, 0))
+ if (tree_fits_shwi_p (maxval))
pp_wide_integer (this, tree_low_cst (maxval, 0) + 1);
else
expression (fold_build2 (PLUS_EXPR, type, maxval,
? TYPE_CANONICAL (TREE_TYPE (i))
: TREE_TYPE (i);
- if (host_integerp (i, 0))
+ if (tree_fits_shwi_p (i))
pp_wide_integer (pp, TREE_INT_CST_LOW (i));
else if (host_integerp (i, 1))
pp_unsigned_wide_integer (pp, TREE_INT_CST_LOW (i));
+2013-11-18 Richard Sandiford <rdsandiford@googlemail.com>
+
+ * c-parser.c: Replace host_integerp (..., 0) with tree_fits_shwi_p
+ throughout.
+
2013-11-15 Aldy Hernandez <aldyh@redhat.com>
* c-parser.c (c_parser_cilk_simd): New.
mark_exp_read (num);
num = c_fully_fold (num, false, NULL);
if (!INTEGRAL_TYPE_P (TREE_TYPE (num))
- || !host_integerp (num, 0)
+ || !tree_fits_shwi_p (num)
|| (n = tree_low_cst (num, 0)) <= 0
|| (int) n != n)
{
else
goto escapes;
- if (!host_integerp (gimple_assign_rhs2 (arg2_stmt), 0))
+ if (!tree_fits_shwi_p (gimple_assign_rhs2 (arg2_stmt)))
goto escapes;
sub = tree_low_cst (gimple_assign_rhs2 (arg2_stmt), 0);
t = DECL_SIZE_UNIT (t);
else
t = TYPE_SIZE_UNIT (TREE_TYPE (t));
- if (t && host_integerp (t, 0))
+ if (t && tree_fits_shwi_p (t))
{
size = tree_low_cst (t, 0);
if (size < 0)
t = DECL_SIZE_UNIT (t);
else
t = TYPE_SIZE_UNIT (TREE_TYPE (t));
- if (t && host_integerp (t, 0))
+ if (t && tree_fits_shwi_p (t))
{
size = tree_low_cst (t, 0);
if (size < 0)
if (TREE_CODE (field) == FIELD_DECL
&& TREE_CODE (TREE_TYPE (field)) == REAL_TYPE
&& TYPE_PRECISION (TREE_TYPE (field)) == BITS_PER_WORD
- && host_integerp (bit_position (field), 0)
+ && tree_fits_shwi_p (bit_position (field))
&& int_bit_position (field) % BITS_PER_WORD == 0)
break;
if (TREE_CODE (field) == FIELD_DECL
&& SCALAR_FLOAT_TYPE_P (TREE_TYPE (field))
&& TYPE_PRECISION (TREE_TYPE (field)) == BITS_PER_WORD
- && host_integerp (bit_position (field), 0)
+ && tree_fits_shwi_p (bit_position (field))
&& int_bit_position (field) % BITS_PER_WORD == 0)
break;
SYMBOL_REF_FLAGS (XEXP (rtl, 0)) |= SYMBOL_FLAG_ALIGN1;
if (!DECL_SIZE (decl)
|| !DECL_ALIGN (decl)
- || !host_integerp (DECL_SIZE (decl), 0)
+ || !tree_fits_shwi_p (DECL_SIZE (decl))
|| (DECL_ALIGN (decl) <= 64
&& DECL_ALIGN (decl) != tree_low_cst (DECL_SIZE (decl), 0)))
SYMBOL_REF_FLAGS (XEXP (rtl, 0)) |= SYMBOL_FLAG_NOT_NATURALLY_ALIGNED;
+2013-11-18 Richard Sandiford <rdsandiford@googlemail.com>
+
+ * error.c, init.c, parser.c, semantics.c: Replace
+ host_integerp (..., 0) with tree_fits_shwi_p throughout.
+
2013-11-17 Paolo Carlini <paolo.carlini@oracle.com>
PR c++/59123
tree max = TYPE_MAX_VALUE (dtype);
if (integer_all_onesp (max))
pp_character (pp, '0');
- else if (host_integerp (max, 0))
+ else if (tree_fits_shwi_p (max))
pp_wide_integer (pp, tree_low_cst (max, 0) + 1);
else
{
pp_cxx_right_paren (pp);
break;
}
- else if (host_integerp (idx, 0))
+ else if (tree_fits_shwi_p (idx))
{
tree virtuals;
unsigned HOST_WIDE_INT n;
if (from_array
|| ((type_build_ctor_call (type) || init || explicit_value_init_p)
- && ! (host_integerp (maxindex, 0)
+ && ! (tree_fits_shwi_p (maxindex)
&& (num_initialized_elts
== tree_low_cst (maxindex, 0) + 1))))
{
return list;
num = fold_non_dependent_expr (num);
if (!INTEGRAL_TYPE_P (TREE_TYPE (num))
- || !host_integerp (num, 0)
+ || !tree_fits_shwi_p (num)
|| (n = tree_low_cst (num, 0)) <= 0
|| (int) n != n)
{
return value;
if (TREE_CODE (TREE_TYPE (field)) == INTEGER_TYPE
&& TREE_CODE (value) == INTEGER_CST
- && host_integerp (bitpos, 0)
- && host_integerp (DECL_SIZE (field), 0))
+ && tree_fits_shwi_p (bitpos)
+ && tree_fits_shwi_p (DECL_SIZE (field)))
{
HOST_WIDE_INT bit = tree_low_cst (bitpos, 0);
HOST_WIDE_INT sz = tree_low_cst (DECL_SIZE (field), 0);
/* Omit fields whose position or size are variable or too large to
represent. */
|| (TREE_CODE (tem) == FIELD_DECL
- && (! host_integerp (bit_position (tem), 0)
+ && (! tree_fits_shwi_p (bit_position (tem))
|| ! DECL_SIZE (tem)
|| ! host_integerp (DECL_SIZE (tem), 1))))
continue;
stabstr_C (c1);
stabstr_C (c2);
- if (DECL_VINDEX (decl) && host_integerp (DECL_VINDEX (decl), 0))
+ if (DECL_VINDEX (decl) && tree_fits_shwi_p (DECL_VINDEX (decl)))
{
stabstr_D (tree_low_cst (DECL_VINDEX (decl), 0));
stabstr_C (';');
}
stabstr_C (';');
- if (low && host_integerp (low, 0))
+ if (low && tree_fits_shwi_p (low))
{
if (print_int_cst_bounds_in_octal_p (type, low, high))
stabstr_O (low);
stabstr_C ('0');
stabstr_C (';');
- if (high && host_integerp (high, 0))
+ if (high && tree_fits_shwi_p (high))
{
if (print_int_cst_bounds_in_octal_p (type, low, high))
stabstr_O (high);
return NULL;
if (offset != NULL)
{
- if (!host_integerp (offset, 0))
+ if (!tree_fits_shwi_p (offset))
return NULL;
x = adjust_address_nv (x, mode, tree_low_cst (offset, 0));
}
??? Why do we skip emitting the type and location in this case? */
if (TREE_STATIC (decl) && TREE_READONLY (decl)
&& DECL_INITIAL (decl) != 0
- && host_integerp (DECL_INITIAL (decl), 0)
+ && tree_fits_shwi_p (DECL_INITIAL (decl))
&& ! TREE_ASM_WRITTEN (decl)
&& (DECL_FILE_SCOPE_P (decl)
|| TREE_CODE (DECL_CONTEXT (decl)) == BLOCK
}
case INTEGER_CST:
- if ((want_address || !host_integerp (loc, 0))
+ if ((want_address || !tree_fits_shwi_p (loc))
&& (ret = cst_pool_loc_descr (loc)))
have_address = 1;
else if (want_address == 2
- && host_integerp (loc, 0)
+ && tree_fits_shwi_p (loc)
&& (ret = address_of_int_loc_descriptor
(int_size_in_bytes (TREE_TYPE (loc)),
tree_low_cst (loc, 0))))
have_address = 1;
- else if (host_integerp (loc, 0))
+ else if (tree_fits_shwi_p (loc))
ret = int_loc_descriptor (tree_low_cst (loc, 0));
else
{
case POINTER_PLUS_EXPR:
case PLUS_EXPR:
- if (host_integerp (TREE_OPERAND (loc, 1), 0))
+ if (tree_fits_shwi_p (TREE_OPERAND (loc, 1)))
{
list_ret = loc_list_from_tree (TREE_OPERAND (loc, 0), 0);
if (list_ret == 0)
*value = 0;
if (offset != NULL)
{
- if (!host_integerp (offset, 0))
+ if (!tree_fits_shwi_p (offset))
return NULL_TREE;
*value = tree_low_cst (offset, 0);
}
constructor_elt *ce;
if (TYPE_DOMAIN (type) == NULL_TREE
- || !host_integerp (TYPE_MIN_VALUE (TYPE_DOMAIN (type)), 0))
+ || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
return false;
fieldsize = int_size_in_bytes (TREE_TYPE (type));
&& ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
return false;
else if (DECL_SIZE_UNIT (field) == NULL_TREE
- || !host_integerp (DECL_SIZE_UNIT (field), 0))
+ || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
return false;
fieldsize = tree_low_cst (DECL_SIZE_UNIT (field), 0);
pos = int_byte_position (field);
/* Use the default if possible. */
if (bound_attr == DW_AT_lower_bound
- && host_integerp (bound, 0)
+ && tree_fits_shwi_p (bound)
&& (dflt = lower_bound_default ()) != -1
&& tree_low_cst (bound, 0) == dflt)
;
/* We can't yet handle bit-fields whose offsets are variable, so if we
encounter such things, just return without generating any attribute
whatsoever. Likewise for variable or too large size. */
- if (! host_integerp (bit_position (decl), 0)
+ if (! tree_fits_shwi_p (bit_position (decl))
|| ! host_integerp (DECL_SIZE (decl), 1))
return;
{
add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
- if (host_integerp (DECL_VINDEX (func_decl), 0))
+ if (tree_fits_shwi_p (DECL_VINDEX (func_decl)))
add_AT_loc (die, DW_AT_vtable_elem_location,
new_loc_descr (DW_OP_constu,
tree_low_cst (DECL_VINDEX (func_decl), 0),
case VAR_DECL:
return loc_descriptor_from_tree (val, 0);
case INTEGER_CST:
- if (host_integerp (val, 0))
+ if (tree_fits_shwi_p (val))
return int_loc_descriptor (tree_low_cst (val, 0));
break;
case INDIRECT_REF:
{
dw_loc_descr_ref loc;
- if (host_integerp (val, 0))
+ if (tree_fits_shwi_p (val))
{
add_AT_unsigned (die, attr, tree_low_cst (val, 0));
return;
/* If it is the default value, omit it. */
int dflt;
- if (host_integerp (info->dimen[dim].lower_bound, 0)
+ if (tree_fits_shwi_p (info->dimen[dim].lower_bound)
&& (dflt = lower_bound_default ()) != -1
&& tree_low_cst (info->dimen[dim].lower_bound, 0) == dflt)
;
value = DECL_INITIAL (value);
if (simple_type_size_in_bits (TREE_TYPE (value))
- <= HOST_BITS_PER_WIDE_INT || host_integerp (value, 0))
+ <= HOST_BITS_PER_WIDE_INT || tree_fits_shwi_p (value))
/* DWARF2 does not provide a way of indicating whether or
not enumeration constants are signed or unsigned. GDB
always assumes the values are signed, so we output all
we can add DW_OP_GNU_implicit_pointer. */
STRIP_NOPS (init);
if (TREE_CODE (init) == POINTER_PLUS_EXPR
- && host_integerp (TREE_OPERAND (init, 1), 0))
+ && tree_fits_shwi_p (TREE_OPERAND (init, 1)))
{
offset = tree_low_cst (TREE_OPERAND (init, 1), 0);
init = TREE_OPERAND (init, 0);
HOST_WIDE_INT region_nr;
eh_region region;
- gcc_assert (host_integerp (region_nr_t, 0));
+ gcc_assert (tree_fits_shwi_p (region_nr_t));
region_nr = tree_low_cst (region_nr_t, 0);
region = (*cfun->eh->region_array)[region_nr];
gcc_assert (size);
}
- if (size == 0 || !host_integerp (size, 0))
+ if (size == 0 || !tree_fits_shwi_p (size))
return -1;
return tree_low_cst (size, 0);
mode = VOIDmode;
offset = DECL_FIELD_OFFSET (field);
- if (host_integerp (offset, 0)
- && host_integerp (bit_position (field), 0))
+ if (tree_fits_shwi_p (offset)
+ && tree_fits_shwi_p (bit_position (field)))
{
bitpos = int_bit_position (field);
offset = 0;
domain = TYPE_DOMAIN (type);
const_bounds_p = (TYPE_MIN_VALUE (domain)
&& TYPE_MAX_VALUE (domain)
- && host_integerp (TYPE_MIN_VALUE (domain), 0)
- && host_integerp (TYPE_MAX_VALUE (domain), 0));
+ && tree_fits_shwi_p (TYPE_MIN_VALUE (domain))
+ && tree_fits_shwi_p (TYPE_MAX_VALUE (domain)));
/* If we have constant bounds for the range of the type, get them. */
if (const_bounds_p)
/* If the range is constant and "small", unroll the loop. */
if (const_bounds_p
- && host_integerp (lo_index, 0)
- && host_integerp (hi_index, 0)
+ && tree_fits_shwi_p (lo_index)
+ && tree_fits_shwi_p (hi_index)
&& (lo = tree_low_cst (lo_index, 0),
hi = tree_low_cst (hi_index, 0),
count = hi - lo + 1,
emit_label (loop_end);
}
}
- else if ((index != 0 && ! host_integerp (index, 0))
+ else if ((index != 0 && ! tree_fits_shwi_p (index))
|| ! host_integerp (TYPE_SIZE (elttype), 1))
{
tree position;
indexed address, for machines that support that. */
if (modifier == EXPAND_SUM && mode == ptr_mode
- && host_integerp (treeop1, 0))
+ && tree_fits_shwi_p (treeop1))
{
tree exp1 = treeop1;
#define ADD_PARM_SIZE(TO, INC) \
do { \
tree inc = (INC); \
- if (host_integerp (inc, 0)) \
+ if (tree_fits_shwi_p (inc)) \
(TO).constant += tree_low_cst (inc, 0); \
else if ((TO).var == 0) \
(TO).var = fold_convert (ssizetype, inc); \
#define SUB_PARM_SIZE(TO, DEC) \
do { \
tree dec = (DEC); \
- if (host_integerp (dec, 0)) \
+ if (tree_fits_shwi_p (dec)) \
(TO).constant -= tree_low_cst (dec, 0); \
else if ((TO).var == 0) \
(TO).var = size_binop (MINUS_EXPR, ssize_int (0), \
tree size = TYPE_SIZE (TREE_TYPE (inner));
if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
|| POINTER_TYPE_P (TREE_TYPE (inner)))
- && host_integerp (size, 0)
+ && tree_fits_shwi_p (size)
&& tree_low_cst (size, 0) == bitsize)
return fold_convert_loc (loc, type, inner);
}
/* No identical multiplicands; see if we can find a common
power-of-two factor in non-power-of-two multiplies. This
can help in multi-dimensional array access. */
- else if (host_integerp (arg01, 0)
- && host_integerp (arg11, 0))
+ else if (tree_fits_shwi_p (arg01)
+ && tree_fits_shwi_p (arg11))
{
HOST_WIDE_INT int01, int11, tmp;
bool swap = false;
if (TREE_CODE (type) != ARRAY_TYPE
|| TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
|| GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
- || !host_integerp (TYPE_SIZE_UNIT (type), 0))
+ || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
return 0;
total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
if (total_bytes > len)
indirect_base0 = true;
}
offset0 = TREE_OPERAND (arg0, 1);
- if (host_integerp (offset0, 0))
+ if (tree_fits_shwi_p (offset0))
{
HOST_WIDE_INT off = size_low_cst (offset0);
if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
indirect_base1 = true;
}
offset1 = TREE_OPERAND (arg1, 1);
- if (host_integerp (offset1, 0))
+ if (tree_fits_shwi_p (offset1))
{
HOST_WIDE_INT off = size_low_cst (offset1);
if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
|| (TYPE_UNSIGNED (type)
&& code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
- && host_integerp (arg1, false)
+ && tree_fits_shwi_p (arg1)
&& TREE_INT_CST_LOW (arg1) < prec
- && host_integerp (TREE_OPERAND (arg0, 1), false)
+ && tree_fits_shwi_p (TREE_OPERAND (arg0, 1))
&& TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < prec)
{
HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
{
if (!integer_zerop (TREE_OPERAND (base, 1)))
{
- if (!host_integerp (TREE_OPERAND (base, 1), 0))
+ if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
return NULL_TREE;
*bit_offset += (mem_ref_offset (base).low
* BITS_PER_UNIT);
case MULT_EXPR:
rhs2 = gimple_assign_rhs2 (gs);
- if (host_integerp (rhs2, 0))
+ if (tree_fits_shwi_p (rhs2))
return mult_by_coeff_cost (TREE_INT_CST_LOW (rhs2), lhs_mode, speed);
gcc_assert (TREE_CODE (rhs1) != INTEGER_CST);
/* Fortran and C do not always agree on what DECL_OFFSET_ALIGN
should be, so handle differing ones specially by decomposing
the offset into a byte and bit offset manually. */
- if (host_integerp (DECL_FIELD_OFFSET (f1), 0)
- && host_integerp (DECL_FIELD_OFFSET (f2), 0))
+ if (tree_fits_shwi_p (DECL_FIELD_OFFSET (f1))
+ && tree_fits_shwi_p (DECL_FIELD_OFFSET (f2)))
{
unsigned HOST_WIDE_INT byte_offset1, byte_offset2;
unsigned HOST_WIDE_INT bit_offset1, bit_offset2;
+2013-11-18 Richard Sandiford <rdsandiford@googlemail.com>
+
+ * gofrontend/expressions.cc: Replace host_integerp (..., 0) with
+ tree_fits_shwi_p throughout.
+
2013-11-14 Andrew MacLeod <amacleod@redhat.com>
* go-lang.c: Include only gimplify.h and gimple.h as needed.
tree int_type_tree = type_to_tree(int_type->get_backend(gogo));
expr_tree = fold_convert(int_type_tree, expr_tree);
- if (host_integerp(expr_tree, 0))
+ if (tree_fits_shwi_p (expr_tree))
{
HOST_WIDE_INT intval = tree_low_cst(expr_tree, 0);
std::string s;
&& tree_int_cst_sgn (TYPE_MIN_VALUE (TYPE_DOMAIN (type))) == 0
&& TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != NULL_TREE
&& TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) == INTEGER_CST
- && host_integerp (TYPE_MAX_VALUE (TYPE_DOMAIN (type)), 0))
+ && tree_fits_shwi_p (TYPE_MAX_VALUE (TYPE_DOMAIN (type))))
{
char buf[100];
if (*slot != NULL)
macro_hash_del (*slot);
- if (host_integerp (TREE_VALUE (element), 0))
+ if (tree_fits_shwi_p (TREE_VALUE (element)))
snprintf (buf, sizeof buf, HOST_WIDE_INT_PRINT_DEC,
tree_low_cst (TREE_VALUE (element), 0));
else if (host_integerp (TREE_VALUE (element), 1))
case MULT_EXPR:
if (chrec_contains_symbols (TREE_OPERAND (e, 0)))
return graphite_can_represent_init (TREE_OPERAND (e, 0))
- && host_integerp (TREE_OPERAND (e, 1), 0);
+ && tree_fits_shwi_p (TREE_OPERAND (e, 1));
else
return graphite_can_represent_init (TREE_OPERAND (e, 1))
- && host_integerp (TREE_OPERAND (e, 0), 0);
+ && tree_fits_shwi_p (TREE_OPERAND (e, 0));
case PLUS_EXPR:
case POINTER_PLUS_EXPR:
subscript - low >= 0 and high - subscript >= 0 in case one of
the two bounds isn't known. Do the same here? */
- if (host_integerp (low, 0)
+ if (tree_fits_shwi_p (low)
&& high
- && host_integerp (high, 0)
+ && tree_fits_shwi_p (high)
/* 1-element arrays at end of structures may extend over
their declared size. */
&& !(array_at_struct_end_p (ref)
+2013-11-18 Richard Sandiford <rdsandiford@googlemail.com>
+
+ * class.c, expr.c: Replace host_integerp (..., 0) with
+ tree_fits_shwi_p throughout.
+
2013-11-14 Andrew MacLeod <amacleod@redhat.com>
* java-gimplify.c: Include only gimplify.h and gimple.h as needed.
{
tree method_index = get_method_index (method);
if (method_index != NULL_TREE
- && host_integerp (method_index, 0))
+ && tree_fits_shwi_p (method_index))
TREE_VEC_ELT (vtable, tree_low_cst (method_index, 0)) = method;
}
}
tree prim_type = decode_newarray_type (atype_value);
tree type
= build_java_array_type (prim_type,
- host_integerp (length, 0) == INTEGER_CST
+ tree_fits_shwi_p (length) == INTEGER_CST
? tree_low_cst (length, 0) : -1);
/* Pass a reference to the primitive type class and save the runtime
{
tree type
= build_java_array_type (class_type,
- host_integerp (length, 0)
+ tree_fits_shwi_p (length)
? tree_low_cst (length, 0) : -1);
return build_call_nary (promote_type (type),
: "#pragma omp cancellation point");
return false;
}
- switch (host_integerp (gimple_call_arg (stmt, 0), 0)
+ switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
? tree_low_cst (gimple_call_arg (stmt, 0), 0)
: 0)
{
/* When possible, use a strict equality expression. This can let VRP
type optimizations deduce the value and remove a copy. */
- if (host_integerp (fd->loop.step, 0))
+ if (tree_fits_shwi_p (fd->loop.step))
{
HOST_WIDE_INT step = TREE_INT_CST_LOW (fd->loop.step);
if (step == 1 || step == -1)
/* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
vinit = fd->loop.n1;
if (cond_code == EQ_EXPR
- && host_integerp (fd->loop.n2, 0)
+ && tree_fits_shwi_p (fd->loop.n2)
&& ! integer_zerop (fd->loop.n2))
vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
else
return NULL;
else if (TREE_CODE (t1) == SSA_NAME)
ret = t1;
- else if (host_integerp (t1, 0))
+ else if (tree_fits_shwi_p (t1))
value = tree_low_cst (t1, 0);
else
return NULL;
if (!t2)
return ret;
- else if (host_integerp (t2, 0))
+ else if (tree_fits_shwi_p (t2))
value = tree_low_cst (t2, 0);
else if (TREE_CODE (t2) == SSA_NAME)
{
code = invert_tree_comparison (code, false);
bound = iv0.base;
base = iv1.base;
- if (host_integerp (iv1.step, 0))
+ if (tree_fits_shwi_p (iv1.step))
step = iv1.step;
else
return false;
{
bound = iv1.base;
base = iv0.base;
- if (host_integerp (iv0.step, 0))
+ if (tree_fits_shwi_p (iv0.step))
step = iv0.step;
else
return false;
/* If loop bound, base and compare bound are all constants, we can
calculate the probability directly. */
- if (host_integerp (loop_bound_var, 0)
- && host_integerp (compare_var, 0)
- && host_integerp (compare_base, 0))
+ if (tree_fits_shwi_p (loop_bound_var)
+ && tree_fits_shwi_p (compare_var)
+ && tree_fits_shwi_p (compare_base))
{
int probability;
bool of, overflow = false;
if (!decl)
decl_size = -1;
else if (DECL_P (decl) && DECL_SIZE_UNIT (decl))
- decl_size = (host_integerp (DECL_SIZE_UNIT (decl), 0)
+ decl_size = (tree_fits_shwi_p (DECL_SIZE_UNIT (decl))
? tree_low_cst (DECL_SIZE_UNIT (decl), 0)
: -1);
else if (TREE_CODE (decl) == STRING_CST)
= (TYPE_DOMAIN (type)
&& TYPE_MIN_VALUE (TYPE_DOMAIN (type)) != 0
&& TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != 0
- && host_integerp (TYPE_MAX_VALUE (TYPE_DOMAIN (type)), 0)
- && host_integerp (TYPE_MIN_VALUE (TYPE_DOMAIN (type)), 0)
+ && tree_fits_shwi_p (TYPE_MAX_VALUE (TYPE_DOMAIN (type)))
+ && tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type)))
? (tree_low_cst (TYPE_MAX_VALUE (TYPE_DOMAIN (type)), 0)
- tree_low_cst (TYPE_MIN_VALUE (TYPE_DOMAIN (type)), 0) + 1)
: 0);
&& DECL_NAME (tail)
&& DECL_SIZE (tail)
&& host_integerp (DECL_SIZE (tail), 1)
- && host_integerp (bit_position (tail), 0))
+ && tree_fits_shwi_p (bit_position (tail)))
{
if (POINTER_TYPE_P (TREE_TYPE (tail)))
sdbout_one_type (TREE_TYPE (TREE_TYPE (tail)));
if (TREE_CODE (value) == CONST_DECL)
value = DECL_INITIAL (value);
- if (host_integerp (value, 0))
+ if (tree_fits_shwi_p (value))
{
PUT_SDB_DEF (IDENTIFIER_POINTER (TREE_PURPOSE (tem)));
PUT_SDB_INT_VAL (tree_low_cst (value, 0));
&& DECL_NAME (tem)
&& DECL_SIZE (tem)
&& host_integerp (DECL_SIZE (tem), 1)
- && host_integerp (bit_position (tem), 0))
+ && tree_fits_shwi_p (bit_position (tem)))
{
const char *name;
&mode, &unsignedp, &volatilep, false);
if (bitsize != GET_MODE_BITSIZE (mode)
|| (bitpos % BITS_PER_UNIT)
- || (toffset && !host_integerp (toffset, 0)))
+ || (toffset && !tree_fits_shwi_p (toffset)))
decl = NULL;
else
{
if (DECL_BIT_FIELD_TYPE (field)
&& !integer_zerop (DECL_SIZE (field))
&& !integer_zerop (DECL_SIZE (rli->prev_field))
- && host_integerp (DECL_SIZE (rli->prev_field), 0)
- && host_integerp (TYPE_SIZE (type), 0)
+ && tree_fits_shwi_p (DECL_SIZE (rli->prev_field))
+ && tree_fits_shwi_p (TYPE_SIZE (type))
&& simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (prev_type)))
{
/* We're in the middle of a run of equal type size fields; make
HOST_WIDE_INT cd = 0, val;
tree step;
- if (!host_integerp (cst, 0))
+ if (!tree_fits_shwi_p (cst))
return true;
val = tree_low_cst (cst, 0);
while (TREE_CODE (chrec) == POLYNOMIAL_CHREC)
{
step = CHREC_RIGHT (chrec);
- if (!host_integerp (step, 0))
+ if (!tree_fits_shwi_p (step))
return true;
cd = gcd (cd, tree_low_cst (step, 0));
chrec = CHREC_LEFT (chrec);
{
tree fsize = DECL_SIZE_UNIT (field);
tree ssize = TYPE_SIZE_UNIT (stype);
- if (host_integerp (fsize, 0)
- && host_integerp (ssize, 0)
+ if (tree_fits_shwi_p (fsize)
+ && tree_fits_shwi_p (ssize)
&& doffset.fits_shwi ())
maxsize += ((TREE_INT_CST_LOW (ssize)
- TREE_INT_CST_LOW (fsize))
if (min && max
&& integer_zerop (min)
- && host_integerp (max, 0))
+ && tree_fits_shwi_p (max))
pp_wide_integer (buffer, TREE_INT_CST_LOW (max) + 1);
else
{
*msg = "structure field size not fixed";
return true;
}
- if (!host_integerp (bit_position (fld), 0))
+ if (!tree_fits_shwi_p (bit_position (fld)))
{
*msg = "structure field size too big";
return true;
while (handled_component_p (expr))
{
if (TREE_CODE (expr) == ARRAY_REF
- && !host_integerp (array_ref_low_bound (expr), 0))
+ && !tree_fits_shwi_p (array_ref_low_bound (expr)))
return true;
expr = TREE_OPERAND (expr, 0);
}
}
ref->offset += extra_offset;
if (size
- && host_integerp (size, 0)
+ && tree_fits_shwi_p (size)
&& TREE_INT_CST_LOW (size) * BITS_PER_UNIT / BITS_PER_UNIT
== TREE_INT_CST_LOW (size))
ref->max_size = ref->size = TREE_INT_CST_LOW (size) * BITS_PER_UNIT;
{
tree dest = gimple_call_arg (stmt, 0);
tree len = gimple_call_arg (stmt, 2);
- if (!host_integerp (len, 0))
+ if (!tree_fits_shwi_p (len))
return false;
tree rbase = ref->base;
double_int roffset = double_int::from_shwi (ref->offset);
char *src_buf;
use_operand_p use_p;
- if (!host_integerp (val2, 0)
+ if (!tree_fits_shwi_p (val2)
|| !host_integerp (len2, 1))
break;
if (is_gimple_call (stmt1))
src1 = gimple_assign_rhs1 (stmt1);
if (TREE_CODE (ptr1) != MEM_REF
|| TYPE_MODE (TREE_TYPE (ptr1)) != TYPE_MODE (char_type_node)
- || !host_integerp (src1, 0))
+ || !tree_fits_shwi_p (src1))
break;
ptr1 = build_fold_addr_expr (ptr1);
callee1 = NULL_TREE;
/* Check for one shift count being Y and the other B - Y,
with optional casts. */
if (cdef_code[i] == MINUS_EXPR
- && host_integerp (cdef_arg1[i], 0)
+ && tree_fits_shwi_p (cdef_arg1[i])
&& tree_low_cst (cdef_arg1[i], 0) == TYPE_PRECISION (rtype)
&& TREE_CODE (cdef_arg2[i]) == SSA_NAME)
{
This alternative is safe even for rotation count of 0.
One shift count is Y and the other (-Y) & (B - 1). */
else if (cdef_code[i] == BIT_AND_EXPR
- && host_integerp (cdef_arg2[i], 0)
+ && tree_fits_shwi_p (cdef_arg2[i])
&& tree_low_cst (cdef_arg2[i], 0)
== TYPE_PRECISION (rtype) - 1
&& TREE_CODE (cdef_arg1[i]) == SSA_NAME
{
tree ind = TREE_OPERAND (usym, 1);
if (TREE_CODE (ind) == INTEGER_CST
- && host_integerp (ind, 0)
+ && tree_fits_shwi_p (ind)
&& TREE_INT_CST_LOW (ind) == 0)
usym = TREE_OPERAND (usym, 0);
}
{
tree ind = TREE_OPERAND (csym, 1);
if (TREE_CODE (ind) == INTEGER_CST
- && host_integerp (ind, 0)
+ && tree_fits_shwi_p (ind)
&& TREE_INT_CST_LOW (ind) == 0)
csym = TREE_OPERAND (csym, 0);
}
if ((unsigned) loop_depth (aloop) <= min_depth)
continue;
- if (host_integerp (step, 0))
+ if (tree_fits_shwi_p (step))
astep = tree_low_cst (step, 0);
else
astep = L1_CACHE_LINE_SIZE;
}
else
{
- if (!host_integerp (arg1, 0))
+ if (!tree_fits_shwi_p (arg1))
break;
n = TREE_INT_CST_LOW (arg1);
if (TREE_CODE (exp) == MEM_REF
&& TREE_CODE (TREE_OPERAND (exp, 0)) == SSA_NAME
- && host_integerp (TREE_OPERAND (exp, 1), 0)
+ && tree_fits_shwi_p (TREE_OPERAND (exp, 1))
&& (size = int_size_in_bytes (TREE_TYPE (exp))) > 0)
{
tree name = TREE_OPERAND (exp, 0);
*base = gimple_call_arg (stmt, 0);
arg1 = gimple_call_arg (stmt, 1);
- if (!host_integerp (arg1, 0))
+ if (!tree_fits_shwi_p (arg1))
return false;
*exponent = TREE_INT_CST_LOW (arg1);
case MEM_REF:
/* The base address gets its own vn_reference_op_s structure. */
temp.op0 = TREE_OPERAND (ref, 1);
- if (host_integerp (TREE_OPERAND (ref, 1), 0))
+ if (tree_fits_shwi_p (TREE_OPERAND (ref, 1)))
temp.off = TREE_INT_CST_LOW (TREE_OPERAND (ref, 1));
break;
case BIT_FIELD_REF:
case ARRAY_RANGE_REF:
case ARRAY_REF:
/* We recorded the lower bound and the element size. */
- if (!host_integerp (op->op0, 0)
- || !host_integerp (op->op1, 0)
- || !host_integerp (op->op2, 0))
+ if (!tree_fits_shwi_p (op->op0)
+ || !tree_fits_shwi_p (op->op1)
+ || !tree_fits_shwi_p (op->op2))
max_size = -1;
else
{
off += double_int::from_shwi (addr_offset);
mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
op->op0 = build_fold_addr_expr (addr_base);
- if (host_integerp (mem_op->op0, 0))
+ if (tree_fits_shwi_p (mem_op->op0))
mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
else
mem_op->off = -1;
}
mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
- if (host_integerp (mem_op->op0, 0))
+ if (tree_fits_shwi_p (mem_op->op0))
mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
else
mem_op->off = -1;
s = string_constant (exp, &o);
if (s != NULL_TREE
- && (o == NULL_TREE || host_integerp (o, 0))
+ && (o == NULL_TREE || tree_fits_shwi_p (o))
&& TREE_STRING_LENGTH (s) > 0)
{
HOST_WIDE_INT offset = o ? tree_low_cst (o, 0) : 0;
static HOST_WIDE_INT
bitpos_of_field (const tree fdecl)
{
- if (!host_integerp (DECL_FIELD_OFFSET (fdecl), 0)
- || !host_integerp (DECL_FIELD_BIT_OFFSET (fdecl), 0))
+ if (!tree_fits_shwi_p (DECL_FIELD_OFFSET (fdecl))
+ || !tree_fits_shwi_p (DECL_FIELD_BIT_OFFSET (fdecl)))
return -1;
return (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (fdecl)) * BITS_PER_UNIT
if (!host_integerp (sza, 1)
|| !host_integerp (szb, 1)
|| !tree_int_cst_equal (sza, szb)
- || !host_integerp (DR_STEP (dra), 0)
- || !host_integerp (DR_STEP (drb), 0)
+ || !tree_fits_shwi_p (DR_STEP (dra))
+ || !tree_fits_shwi_p (DR_STEP (drb))
|| !tree_int_cst_equal (DR_STEP (dra), DR_STEP (drb)))
break;
if (!operand_equal_p (DR_BASE_ADDRESS (dr_a1->dr),
DR_BASE_ADDRESS (dr_a2->dr),
0)
- || !host_integerp (dr_a1->offset, 0)
- || !host_integerp (dr_a2->offset, 0))
+ || !tree_fits_shwi_p (dr_a1->offset)
+ || !tree_fits_shwi_p (dr_a2->offset))
continue;
HOST_WIDE_INT diff = TREE_INT_CST_LOW (dr_a2->offset) -
}
break;
case MULT_EXPR:
- if (scale == 1 && host_integerp (op1, 0))
+ if (scale == 1 && tree_fits_shwi_p (op1))
{
scale = tree_low_cst (op1, 0);
off = op0;
*type_out = NULL_TREE;
/* Catch squaring. */
- if ((host_integerp (exp, 0)
+ if ((tree_fits_shwi_p (exp)
&& tree_low_cst (exp, 0) == 2)
|| (TREE_CODE (exp) == REAL_CST
&& REAL_VALUES_EQUAL (TREE_REAL_CST (exp), dconst2)))
(L)->may_alias_ddrs.length () > 0
#define NITERS_KNOWN_P(n) \
-(host_integerp ((n),0) \
+(tree_fits_shwi_p ((n)) \
&& TREE_INT_CST_LOW ((n)) > 0)
#define LOOP_VINFO_NITERS_KNOWN_P(L) \
DECL_VINDEX referring to itself into a vtable slot number as it
should. Happens with functions that are copied and then forgotten
about. Just clear it, it won't matter anymore. */
- if (DECL_VINDEX (decl) && !host_integerp (DECL_VINDEX (decl), 0))
+ if (DECL_VINDEX (decl) && !tree_fits_shwi_p (DECL_VINDEX (decl)))
DECL_VINDEX (decl) = NULL_TREE;
}
else if (TREE_CODE (decl) == VAR_DECL)
&& DECL_INITIAL (SYMBOL_REF_DECL (l->loc)))
{
initial = DECL_INITIAL (SYMBOL_REF_DECL (l->loc));
- if (host_integerp (initial, 0))
+ if (tree_fits_shwi_p (initial))
{
item = GEN_INT (tree_low_cst (initial, 0));
item = gen_rtx_CONCAT (indmode, mem, item);
while (1)
{
if (TREE_CODE (target) == COMPONENT_REF
- && host_integerp (byte_position (TREE_OPERAND (target, 1)), 0))
+ && tree_fits_shwi_p (byte_position (TREE_OPERAND (target, 1))))
{
offset += int_byte_position (TREE_OPERAND (target, 1));
target = TREE_OPERAND (target, 0);