/* Operations with affine combinations of trees.
- Copyright (C) 2005, 2007, 2008, 2010 Free Software Foundation, Inc.
+ Copyright (C) 2005-2015 Free Software Foundation, Inc.
This file is part of GCC.
#include "config.h"
#include "system.h"
#include "coretypes.h"
+#include "alias.h"
+#include "backend.h"
#include "tree.h"
-#include "output.h"
-#include "tree-pretty-print.h"
-#include "tree-dump.h"
-#include "pointer-set.h"
-#include "tree-affine.h"
#include "gimple.h"
+#include "rtl.h"
+#include "options.h"
+#include "fold-const.h"
#include "flags.h"
+#include "insn-config.h"
+#include "expmed.h"
+#include "dojump.h"
+#include "explow.h"
+#include "calls.h"
+#include "emit-rtl.h"
+#include "varasm.h"
+#include "stmt.h"
+#include "expr.h"
+#include "tree-pretty-print.h"
+#include "tree-affine.h"
+#include "internal-fn.h"
+#include "gimplify.h"
+#include "dumpfile.h"
+#include "cfgexpand.h"
+#include "wide-int-print.h"
/* Extends CST as appropriate for the affine combinations COMB. */
-double_int
-double_int_ext_for_comb (double_int cst, aff_tree *comb)
+widest_int
+wide_int_ext_for_comb (const widest_int &cst, aff_tree *comb)
{
- return double_int_sext (cst, TYPE_PRECISION (comb->type));
+ return wi::sext (cst, TYPE_PRECISION (comb->type));
}
/* Initializes affine combination COMB so that its value is zero in TYPE. */
static void
aff_combination_zero (aff_tree *comb, tree type)
{
+ int i;
comb->type = type;
- comb->offset = double_int_zero;
+ comb->offset = 0;
comb->n = 0;
+ for (i = 0; i < MAX_AFF_ELTS; i++)
+ comb->elts[i].coef = 0;
comb->rest = NULL_TREE;
}
/* Sets COMB to CST. */
void
-aff_combination_const (aff_tree *comb, tree type, double_int cst)
+aff_combination_const (aff_tree *comb, tree type, const widest_int &cst)
{
aff_combination_zero (comb, type);
- comb->offset = double_int_ext_for_comb (cst, comb);
+ comb->offset = wide_int_ext_for_comb (cst, comb);;
}
/* Sets COMB to single element ELT. */
comb->n = 1;
comb->elts[0].val = elt;
- comb->elts[0].coef = double_int_one;
+ comb->elts[0].coef = 1;
}
/* Scales COMB by SCALE. */
void
-aff_combination_scale (aff_tree *comb, double_int scale)
+aff_combination_scale (aff_tree *comb, const widest_int &scale_in)
{
unsigned i, j;
- scale = double_int_ext_for_comb (scale, comb);
- if (double_int_one_p (scale))
+ widest_int scale = wide_int_ext_for_comb (scale_in, comb);
+ if (scale == 1)
return;
- if (double_int_zero_p (scale))
+ if (scale == 0)
{
aff_combination_zero (comb, comb->type);
return;
}
- comb->offset
- = double_int_ext_for_comb (double_int_mul (scale, comb->offset), comb);
+ comb->offset = wide_int_ext_for_comb (scale * comb->offset, comb);
for (i = 0, j = 0; i < comb->n; i++)
{
- double_int new_coef;
-
- new_coef
- = double_int_ext_for_comb (double_int_mul (scale, comb->elts[i].coef),
- comb);
+ widest_int new_coef
+ = wide_int_ext_for_comb (scale * comb->elts[i].coef, comb);
/* A coefficient may become zero due to overflow. Remove the zero
elements. */
- if (double_int_zero_p (new_coef))
+ if (new_coef == 0)
continue;
comb->elts[j].coef = new_coef;
comb->elts[j].val = comb->elts[i].val;
}
else
comb->rest = fold_build2 (MULT_EXPR, type, comb->rest,
- double_int_to_tree (type, scale));
+ wide_int_to_tree (type, scale));
}
}
/* Adds ELT * SCALE to COMB. */
void
-aff_combination_add_elt (aff_tree *comb, tree elt, double_int scale)
+aff_combination_add_elt (aff_tree *comb, tree elt, const widest_int &scale_in)
{
unsigned i;
tree type;
- scale = double_int_ext_for_comb (scale, comb);
- if (double_int_zero_p (scale))
+ widest_int scale = wide_int_ext_for_comb (scale_in, comb);
+ if (scale == 0)
return;
for (i = 0; i < comb->n; i++)
if (operand_equal_p (comb->elts[i].val, elt, 0))
{
- double_int new_coef;
-
- new_coef = double_int_add (comb->elts[i].coef, scale);
- new_coef = double_int_ext_for_comb (new_coef, comb);
- if (!double_int_zero_p (new_coef))
+ widest_int new_coef
+ = wide_int_ext_for_comb (comb->elts[i].coef + scale, comb);
+ if (new_coef != 0)
{
comb->elts[i].coef = new_coef;
return;
if (comb->rest)
{
gcc_assert (comb->n == MAX_AFF_ELTS - 1);
- comb->elts[comb->n].coef = double_int_one;
+ comb->elts[comb->n].coef = 1;
comb->elts[comb->n].val = comb->rest;
comb->rest = NULL_TREE;
comb->n++;
if (POINTER_TYPE_P (type))
type = sizetype;
- if (double_int_one_p (scale))
+ if (scale == 1)
elt = fold_convert (type, elt);
else
elt = fold_build2 (MULT_EXPR, type,
fold_convert (type, elt),
- double_int_to_tree (type, scale));
+ wide_int_to_tree (type, scale));
if (comb->rest)
comb->rest = fold_build2 (PLUS_EXPR, type, comb->rest,
/* Adds CST to C. */
static void
-aff_combination_add_cst (aff_tree *c, double_int cst)
+aff_combination_add_cst (aff_tree *c, const widest_int &cst)
{
- c->offset = double_int_ext_for_comb (double_int_add (c->offset, cst), c);
+ c->offset = wide_int_ext_for_comb (c->offset + cst, c);
}
/* Adds COMB2 to COMB1. */
for (i = 0; i < comb2->n; i++)
aff_combination_add_elt (comb1, comb2->elts[i].val, comb2->elts[i].coef);
if (comb2->rest)
- aff_combination_add_elt (comb1, comb2->rest, double_int_one);
+ aff_combination_add_elt (comb1, comb2->rest, 1);
}
/* Converts affine combination COMB to TYPE. */
if (TYPE_PRECISION (type) == TYPE_PRECISION (comb_type))
return;
- comb->offset = double_int_ext_for_comb (comb->offset, comb);
+ comb->offset = wide_int_ext_for_comb (comb->offset, comb);
for (i = j = 0; i < comb->n; i++)
{
- double_int new_coef = double_int_ext_for_comb (comb->elts[i].coef, comb);
- if (double_int_zero_p (new_coef))
+ if (comb->elts[i].coef == 0)
continue;
- comb->elts[j].coef = new_coef;
+ comb->elts[j].coef = comb->elts[i].coef;
comb->elts[j].val = fold_convert (type, comb->elts[i].val);
j++;
}
comb->n = j;
if (comb->n < MAX_AFF_ELTS && comb->rest)
{
- comb->elts[comb->n].coef = double_int_one;
+ comb->elts[comb->n].coef = 1;
comb->elts[comb->n].val = comb->rest;
comb->rest = NULL_TREE;
comb->n++;
enum tree_code code;
tree cst, core, toffset;
HOST_WIDE_INT bitpos, bitsize;
- enum machine_mode mode;
+ machine_mode mode;
int unsignedp, volatilep;
STRIP_NOPS (expr);
switch (code)
{
case INTEGER_CST:
- aff_combination_const (comb, type, tree_to_double_int (expr));
+ aff_combination_const (comb, type, wi::to_widest (expr));
return;
case POINTER_PLUS_EXPR:
tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
tree_to_aff_combination (TREE_OPERAND (expr, 1), type, &tmp);
if (code == MINUS_EXPR)
- aff_combination_scale (&tmp, double_int_minus_one);
+ aff_combination_scale (&tmp, -1);
aff_combination_add (comb, &tmp);
return;
if (TREE_CODE (cst) != INTEGER_CST)
break;
tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
- aff_combination_scale (comb, tree_to_double_int (cst));
+ aff_combination_scale (comb, wi::to_widest (cst));
return;
case NEGATE_EXPR:
tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
- aff_combination_scale (comb, double_int_minus_one);
+ aff_combination_scale (comb, -1);
return;
case BIT_NOT_EXPR:
/* ~x = -x - 1 */
tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
- aff_combination_scale (comb, double_int_minus_one);
- aff_combination_add_cst (comb, double_int_minus_one);
+ aff_combination_scale (comb, -1);
+ aff_combination_add_cst (comb, -1);
return;
case ADDR_EXPR:
false);
if (bitpos % BITS_PER_UNIT != 0)
break;
- aff_combination_const (comb, type,
- uhwi_to_double_int (bitpos / BITS_PER_UNIT));
- core = build_fold_addr_expr (core);
+ aff_combination_const (comb, type, bitpos / BITS_PER_UNIT);
+ if (TREE_CODE (core) == MEM_REF)
+ {
+ aff_combination_add_cst (comb, wi::to_widest (TREE_OPERAND (core, 1)));
+ core = TREE_OPERAND (core, 0);
+ }
+ else
+ core = build_fold_addr_expr (core);
+
if (TREE_CODE (core) == ADDR_EXPR)
- aff_combination_add_elt (comb, core, double_int_one);
+ aff_combination_add_elt (comb, core, 1);
else
{
tree_to_aff_combination (core, type, &tmp);
combination COMB. */
static tree
-add_elt_to_tree (tree expr, tree type, tree elt, double_int scale,
- aff_tree *comb)
+add_elt_to_tree (tree expr, tree type, tree elt, const widest_int &scale_in,
+ aff_tree *comb ATTRIBUTE_UNUSED)
{
enum tree_code code;
tree type1 = type;
if (POINTER_TYPE_P (type))
type1 = sizetype;
- scale = double_int_ext_for_comb (scale, comb);
- elt = fold_convert (type1, elt);
+ widest_int scale = wide_int_ext_for_comb (scale_in, comb);
- if (double_int_one_p (scale))
+ if (scale == -1
+ && POINTER_TYPE_P (TREE_TYPE (elt)))
+ {
+ elt = convert_to_ptrofftype (elt);
+ elt = fold_build1 (NEGATE_EXPR, TREE_TYPE (elt), elt);
+ scale = 1;
+ }
+
+ if (scale == 1)
{
if (!expr)
- return fold_convert (type, elt);
+ {
+ if (POINTER_TYPE_P (TREE_TYPE (elt)))
+ return elt;
+ else
+ return fold_convert (type1, elt);
+ }
- if (POINTER_TYPE_P (type))
- return fold_build_pointer_plus (expr, elt);
- return fold_build2 (PLUS_EXPR, type, expr, elt);
+ if (POINTER_TYPE_P (TREE_TYPE (expr)))
+ return fold_build_pointer_plus (expr, elt);
+ if (POINTER_TYPE_P (TREE_TYPE (elt)))
+ return fold_build_pointer_plus (elt, expr);
+ return fold_build2 (PLUS_EXPR, type1,
+ expr, fold_convert (type1, elt));
}
- if (double_int_minus_one_p (scale))
+ if (scale == -1)
{
if (!expr)
- return fold_convert (type, fold_build1 (NEGATE_EXPR, type1, elt));
+ return fold_build1 (NEGATE_EXPR, type1,
+ fold_convert (type1, elt));
- if (POINTER_TYPE_P (type))
+ if (POINTER_TYPE_P (TREE_TYPE (expr)))
{
- elt = fold_build1 (NEGATE_EXPR, type1, elt);
+ elt = convert_to_ptrofftype (elt);
+ elt = fold_build1 (NEGATE_EXPR, TREE_TYPE (elt), elt);
return fold_build_pointer_plus (expr, elt);
}
- return fold_build2 (MINUS_EXPR, type, expr, elt);
+ return fold_build2 (MINUS_EXPR, type1,
+ expr, fold_convert (type1, elt));
}
+ elt = fold_convert (type1, elt);
if (!expr)
- return fold_convert (type,
- fold_build2 (MULT_EXPR, type1, elt,
- double_int_to_tree (type1, scale)));
+ return fold_build2 (MULT_EXPR, type1, elt,
+ wide_int_to_tree (type1, scale));
- if (double_int_negative_p (scale))
+ if (wi::neg_p (scale))
{
code = MINUS_EXPR;
- scale = double_int_neg (scale);
+ scale = -scale;
}
else
code = PLUS_EXPR;
elt = fold_build2 (MULT_EXPR, type1, elt,
- double_int_to_tree (type1, scale));
- if (POINTER_TYPE_P (type))
+ wide_int_to_tree (type1, scale));
+ if (POINTER_TYPE_P (TREE_TYPE (expr)))
{
if (code == MINUS_EXPR)
elt = fold_build1 (NEGATE_EXPR, type1, elt);
return fold_build_pointer_plus (expr, elt);
}
- return fold_build2 (code, type, expr, elt);
+ return fold_build2 (code, type1, expr, elt);
}
/* Makes tree from the affine combination COMB. */
tree type = comb->type;
tree expr = NULL_TREE;
unsigned i;
- double_int off, sgn;
+ widest_int off, sgn;
tree type1 = type;
if (POINTER_TYPE_P (type))
type1 = sizetype;
comb);
if (comb->rest)
- expr = add_elt_to_tree (expr, type, comb->rest, double_int_one, comb);
+ expr = add_elt_to_tree (expr, type, comb->rest, 1, comb);
/* Ensure that we get x - 1, not x + (-1) or x + 0xff..f if x is
unsigned. */
- if (double_int_negative_p (comb->offset))
+ if (wi::neg_p (comb->offset))
{
- off = double_int_neg (comb->offset);
- sgn = double_int_minus_one;
+ off = -comb->offset;
+ sgn = -1;
}
else
{
off = comb->offset;
- sgn = double_int_one;
+ sgn = 1;
}
- return add_elt_to_tree (expr, type, double_int_to_tree (type1, off), sgn,
+ return add_elt_to_tree (expr, type, wide_int_to_tree (type1, off), sgn,
comb);
}
comb->elts[m] = comb->elts[comb->n];
if (comb->rest)
{
- comb->elts[comb->n].coef = double_int_one;
+ comb->elts[comb->n].coef = 1;
comb->elts[comb->n].val = comb->rest;
comb->rest = NULL_TREE;
comb->n++;
static void
-aff_combination_add_product (aff_tree *c, double_int coef, tree val,
+aff_combination_add_product (aff_tree *c, const widest_int &coef, tree val,
aff_tree *r)
{
unsigned i;
fold_convert (type, val));
}
- aff_combination_add_elt (r, aval,
- double_int_mul (coef, c->elts[i].coef));
+ aff_combination_add_elt (r, aval, coef * c->elts[i].coef);
}
if (c->rest)
}
if (val)
- aff_combination_add_elt (r, val,
- double_int_mul (coef, c->offset));
+ aff_combination_add_elt (r, val, coef * c->offset);
else
- aff_combination_add_cst (r, double_int_mul (coef, c->offset));
+ aff_combination_add_cst (r, coef * c->offset);
}
/* Multiplies C1 by C2, storing the result to R */
for (i = 0; i < c2->n; i++)
aff_combination_add_product (c1, c2->elts[i].coef, c2->elts[i].val, r);
if (c2->rest)
- aff_combination_add_product (c1, double_int_one, c2->rest, r);
+ aff_combination_add_product (c1, 1, c2->rest, r);
aff_combination_add_product (c1, c2->offset, NULL, r);
}
void
aff_combination_expand (aff_tree *comb ATTRIBUTE_UNUSED,
- struct pointer_map_t **cache ATTRIBUTE_UNUSED)
+ hash_map<tree, name_expansion *> **cache)
{
unsigned i;
aff_tree to_add, current, curre;
tree e, rhs;
gimple def;
- double_int scale;
- void **slot;
+ widest_int scale;
struct name_expansion *exp;
aff_combination_zero (&to_add, comb->type);
type = TREE_TYPE (e);
name = e;
/* Look through some conversions. */
- if (TREE_CODE (e) == NOP_EXPR
+ if (CONVERT_EXPR_P (e)
&& (TYPE_PRECISION (type)
>= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (e, 0)))))
name = TREE_OPERAND (e, 0);
continue;
if (!*cache)
- *cache = pointer_map_create ();
- slot = pointer_map_insert (*cache, e);
- exp = (struct name_expansion *) *slot;
+ *cache = new hash_map<tree, name_expansion *>;
+ name_expansion **slot = &(*cache)->get_or_insert (e);
+ exp = *slot;
if (!exp)
{
it from COMB. */
scale = comb->elts[i].coef;
aff_combination_zero (&curre, comb->type);
- aff_combination_add_elt (&curre, e, double_int_neg (scale));
+ aff_combination_add_elt (&curre, e, -scale);
aff_combination_scale (¤t, scale);
aff_combination_add (&to_add, ¤t);
aff_combination_add (&to_add, &curre);
void
tree_to_aff_combination_expand (tree expr, tree type, aff_tree *comb,
- struct pointer_map_t **cache)
+ hash_map<tree, name_expansion *> **cache)
{
tree_to_aff_combination (expr, type, comb);
aff_combination_expand (comb, cache);
}
/* Frees memory occupied by struct name_expansion in *VALUE. Callback for
- pointer_map_traverse. */
+ hash_map::traverse. */
-static bool
-free_name_expansion (const void *key ATTRIBUTE_UNUSED, void **value,
- void *data ATTRIBUTE_UNUSED)
+bool
+free_name_expansion (tree const &, name_expansion **value, void *)
{
- struct name_expansion *const exp = (struct name_expansion *) *value;
-
- free (exp);
+ free (*value);
return true;
}
tree_to_aff_combination_expand. */
void
-free_affine_expand_cache (struct pointer_map_t **cache)
+free_affine_expand_cache (hash_map<tree, name_expansion *> **cache)
{
if (!*cache)
return;
- pointer_map_traverse (*cache, free_name_expansion, NULL);
- pointer_map_destroy (*cache);
+ (*cache)->traverse<void *, free_name_expansion> (NULL);
+ delete (*cache);
*cache = NULL;
}
/* If VAL != CST * DIV for any constant CST, returns false.
- Otherwise, if VAL != 0 (and hence CST != 0), and *MULT_SET is true,
- additionally compares CST and MULT, and if they are different,
- returns false. Finally, if neither of these two cases occur,
- true is returned, and if CST != 0, CST is stored to MULT and
- MULT_SET is set to true. */
+ Otherwise, if *MULT_SET is true, additionally compares CST and MULT,
+ and if they are different, returns false. Finally, if neither of these
+ two cases occur, true is returned, and CST is stored to MULT and MULT_SET
+ is set to true. */
static bool
-double_int_constant_multiple_p (double_int val, double_int div,
- bool *mult_set, double_int *mult)
+wide_int_constant_multiple_p (const widest_int &val, const widest_int &div,
+ bool *mult_set, widest_int *mult)
{
- double_int rem, cst;
+ widest_int rem, cst;
- if (double_int_zero_p (val))
- return true;
+ if (val == 0)
+ {
+ if (*mult_set && mult != 0)
+ return false;
+ *mult_set = true;
+ *mult = 0;
+ return true;
+ }
- if (double_int_zero_p (div))
+ if (div == 0)
return false;
- cst = double_int_sdivmod (val, div, FLOOR_DIV_EXPR, &rem);
- if (!double_int_zero_p (rem))
+ if (!wi::multiple_of_p (val, div, SIGNED, &cst))
return false;
- if (*mult_set && !double_int_equal_p (*mult, cst))
+ if (*mult_set && *mult != cst)
return false;
*mult_set = true;
bool
aff_combination_constant_multiple_p (aff_tree *val, aff_tree *div,
- double_int *mult)
+ widest_int *mult)
{
bool mult_set = false;
unsigned i;
- if (val->n == 0 && double_int_zero_p (val->offset))
+ if (val->n == 0 && val->offset == 0)
{
- *mult = double_int_zero;
+ *mult = 0;
return true;
}
if (val->n != div->n)
if (val->rest || div->rest)
return false;
- if (!double_int_constant_multiple_p (val->offset, div->offset,
- &mult_set, mult))
+ if (!wide_int_constant_multiple_p (val->offset, div->offset,
+ &mult_set, mult))
return false;
for (i = 0; i < div->n; i++)
= aff_combination_find_elt (val, div->elts[i].val, NULL);
if (!elt)
return false;
- if (!double_int_constant_multiple_p (elt->coef, div->elts[i].coef,
- &mult_set, mult))
+ if (!wide_int_constant_multiple_p (elt->coef, div->elts[i].coef,
+ &mult_set, mult))
return false;
}
/* Prints the affine VAL to the FILE. */
-void
+static void
print_aff (FILE *file, aff_tree *val)
{
unsigned i;
- bool uns = TYPE_UNSIGNED (val->type);
+ signop sgn = TYPE_SIGN (val->type);
if (POINTER_TYPE_P (val->type))
- uns = false;
+ sgn = SIGNED;
fprintf (file, "{\n type = ");
print_generic_expr (file, val->type, TDF_VOPS|TDF_MEMSYMS);
fprintf (file, "\n offset = ");
- dump_double_int (file, val->offset, uns);
+ print_dec (val->offset, file, sgn);
if (val->n > 0)
{
fprintf (file, "\n elements = {\n");
print_generic_expr (file, val->elts[i].val, TDF_VOPS|TDF_MEMSYMS);
fprintf (file, " * ");
- dump_double_int (file, val->elts[i].coef, uns);
+ print_dec (val->elts[i].coef, file, sgn);
if (i != val->n - 1)
fprintf (file, ", \n");
}
fprintf (stderr, "\n");
}
-/* Returns address of the reference REF in ADDR. The size of the accessed
- location is stored to SIZE. */
+/* Computes address of the reference REF in ADDR. The size of the accessed
+ location is stored to SIZE. Returns the ultimate containing object to
+ which REF refers. */
-void
-get_inner_reference_aff (tree ref, aff_tree *addr, double_int *size)
+tree
+get_inner_reference_aff (tree ref, aff_tree *addr, widest_int *size)
{
HOST_WIDE_INT bitsize, bitpos;
tree toff;
- enum machine_mode mode;
+ machine_mode mode;
int uns, vol;
aff_tree tmp;
tree base = get_inner_reference (ref, &bitsize, &bitpos, &toff, &mode,
aff_combination_add (addr, &tmp);
}
- aff_combination_const (&tmp, sizetype,
- shwi_to_double_int (bitpos / BITS_PER_UNIT));
+ aff_combination_const (&tmp, sizetype, bitpos / BITS_PER_UNIT);
aff_combination_add (addr, &tmp);
- *size = shwi_to_double_int ((bitsize + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
+ *size = (bitsize + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
+
+ return base;
}
/* Returns true if a region of size SIZE1 at position 0 and a region of
size SIZE2 at position DIFF cannot overlap. */
bool
-aff_comb_cannot_overlap_p (aff_tree *diff, double_int size1, double_int size2)
+aff_comb_cannot_overlap_p (aff_tree *diff, const widest_int &size1,
+ const widest_int &size2)
{
- double_int d, bound;
-
/* Unless the difference is a constant, we fail. */
if (diff->n != 0)
return false;
- d = diff->offset;
- if (double_int_negative_p (d))
+ if (wi::neg_p (diff->offset))
{
/* The second object is before the first one, we succeed if the last
element of the second object is before the start of the first one. */
- bound = double_int_add (d, double_int_add (size2, double_int_minus_one));
- return double_int_negative_p (bound);
+ return wi::neg_p (diff->offset + size2 - 1);
}
else
{
/* We succeed if the second object starts after the first one ends. */
- return double_int_scmp (size1, d) <= 0;
+ return wi::les_p (size1, diff->offset);
}
}