GTFILES = $(CPP_ID_DATA_H) $(srcdir)/input.h $(srcdir)/coretypes.h \
$(host_xm_file_list) \
$(tm_file_list) $(HASHTAB_H) $(SPLAY_TREE_H) $(srcdir)/bitmap.h \
- $(srcdir)/alias.h $(srcdir)/coverage.c $(srcdir)/rtl.h \
+ $(srcdir)/wide-int.h $(srcdir)/alias.h $(srcdir)/coverage.c $(srcdir)/rtl.h \
- $(srcdir)/optabs.h $(srcdir)/tree.h $(srcdir)/libfuncs.h $(SYMTAB_H) \
+ $(srcdir)/optabs.h $(srcdir)/tree.h $(srcdir)/tree-core.h \
+ $(srcdir)/libfuncs.h $(SYMTAB_H) \
$(srcdir)/real.h $(srcdir)/function.h $(srcdir)/insn-addr.h $(srcdir)/hwint.h \
$(srcdir)/fixed-value.h \
$(srcdir)/output.h $(srcdir)/cfgloop.h \
$(srcdir)/cselib.h $(srcdir)/basic-block.h $(srcdir)/ipa-ref.h $(srcdir)/cgraph.h \
$(srcdir)/reload.h $(srcdir)/caller-save.c $(srcdir)/symtab.c \
$(srcdir)/alias.c $(srcdir)/bitmap.c $(srcdir)/cselib.c $(srcdir)/cgraph.c \
- $(srcdir)/ipa-prop.c $(srcdir)/ipa-cp.c \
+ $(srcdir)/ipa-prop.c $(srcdir)/ipa-cp.c $(srcdir)/ipa-utils.h \
$(srcdir)/dbxout.c \
+ $(srcdir)/signop.h \
$(srcdir)/dwarf2out.h \
$(srcdir)/dwarf2asm.c \
$(srcdir)/dwarf2cfi.c \
#include "tree-pretty-print.h"
#include "tree-iterator.h"
#include "diagnostic.h"
+#include "wide-int-print.h"
- /* Translate if being used for diagnostics, but not for dump files or
- __PRETTY_FUNCTION. */
- #define M_(msgid) (pp_translate_identifiers (pp) ? _(msgid) : (msgid))
-
/* The pretty-printer code is primarily designed to closely follow
(GNU) C and C++ grammars. That is to be contrasted with spaghetti
codes we used to have in the past. Following a structured
tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (t));
tree type = TREE_TYPE (maxval);
- if (host_integerp (maxval, 0))
- pp_wide_integer (this, tree_low_cst (maxval, 0) + 1);
+ if (tree_fits_shwi_p (maxval))
- pp_wide_integer (pp, tree_to_shwi (maxval) + 1);
++ pp_wide_integer (this, tree_to_shwi (maxval) + 1);
else
- pp_expression (pp, fold_build2 (PLUS_EXPR, type, maxval,
- build_int_cst (type, 1)));
+ expression (fold_build2 (PLUS_EXPR, type, maxval,
+ build_int_cst (type, 1)));
}
- pp_c_right_bracket (pp);
- pp_direct_abstract_declarator (pp, TREE_TYPE (t));
+ pp_c_right_bracket (this);
+ direct_abstract_declarator (TREE_TYPE (t));
break;
case IDENTIFIER_NODE:
if (type
&& tree_int_cst_equal (TYPE_SIZE (type), TREE_OPERAND (e, 1)))
{
- HOST_WIDE_INT bitpos = tree_low_cst (TREE_OPERAND (e, 2), 0);
- HOST_WIDE_INT size = tree_low_cst (TYPE_SIZE (type), 0);
+ HOST_WIDE_INT bitpos = tree_to_shwi (TREE_OPERAND (e, 2));
+ HOST_WIDE_INT size = tree_to_shwi (TYPE_SIZE (type));
if ((bitpos % size) == 0)
{
- pp_c_left_paren (pp);
- pp_c_left_paren (pp);
- pp_type_id (pp, type);
- pp_c_star (pp);
- pp_c_right_paren (pp);
- pp_c_ampersand (pp);
- pp_expression (pp, TREE_OPERAND (e, 0));
- pp_c_right_paren (pp);
- pp_c_left_bracket (pp);
- pp_wide_integer (pp, bitpos / size);
- pp_c_right_bracket (pp);
+ pp_c_left_paren (this);
+ pp_c_left_paren (this);
+ type_id (type);
+ pp_c_star (this);
+ pp_c_right_paren (this);
+ pp_c_ampersand (this);
+ expression (TREE_OPERAND (e, 0));
+ pp_c_right_paren (this);
+ pp_c_left_bracket (this);
+ pp_wide_integer (this, bitpos / size);
+ pp_c_right_bracket (this);
break;
}
}
#include "gimple.h"
#include "c-family/c-objc.h"
#include "c-family/c-common.h"
+ #include "c-family/c-ubsan.h"
+#include "wide-int.h"
/* Possible cases of implicit bad conversions. Used to select
diagnostic messages in convert_for_assignment. */
edge->indirect_info = cgraph_allocate_init_indirect_info ();
edge->indirect_info->ecf_flags = ecf_flags;
- = tree_low_cst (OBJ_TYPE_REF_TOKEN (target), 1);
+ /* Record polymorphic call info. */
+ if (call_stmt
+ && (target = gimple_call_fn (call_stmt))
+ && virtual_method_call_p (target))
+ {
+ tree type = obj_type_ref_class (target);
+
+
+ /* Only record types can have virtual calls. */
+ gcc_assert (TREE_CODE (type) == RECORD_TYPE);
+ edge->indirect_info->param_index = -1;
+ edge->indirect_info->otr_token
++ = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
+ edge->indirect_info->otr_type = type;
+ edge->indirect_info->polymorphic = 1;
+ }
+
edge->next_callee = caller->indirect_calls;
if (caller->indirect_calls)
caller->indirect_calls->prev_callee = edge;
tree dtype = TYPE_DOMAIN (t);
tree max = TYPE_MAX_VALUE (dtype);
if (integer_all_onesp (max))
- pp_character (cxx_pp, '0');
+ pp_character (pp, '0');
- else if (host_integerp (max, 0))
- pp_wide_integer (pp, tree_low_cst (max, 0) + 1);
+ else if (tree_fits_shwi_p (max))
- pp_wide_integer (cxx_pp, tree_to_shwi (max) + 1);
++ pp_wide_integer (pp, tree_to_shwi (max) + 1);
else
{
STRIP_NOPS (max);
if (integer_zerop (idx))
{
/* A NULL pointer-to-member constant. */
- pp_cxx_left_paren (cxx_pp);
- pp_cxx_left_paren (cxx_pp);
- dump_type (TREE_TYPE (t), flags);
- pp_cxx_right_paren (cxx_pp);
- pp_character (cxx_pp, '0');
- pp_cxx_right_paren (cxx_pp);
+ pp_cxx_left_paren (pp);
+ pp_cxx_left_paren (pp);
+ dump_type (pp, TREE_TYPE (t), flags);
+ pp_cxx_right_paren (pp);
+ pp_character (pp, '0');
+ pp_cxx_right_paren (pp);
break;
}
- else if (host_integerp (idx, 0))
+ else if (tree_fits_shwi_p (idx))
{
tree virtuals;
unsigned HOST_WIDE_INT n;
case dw_val_class_const_double:
CHECKSUM (at->dw_attr_val.v.val_double);
break;
+ case dw_val_class_wide_int:
+ CHECKSUM (*at->dw_attr_val.v.val_wide);
+ break;
case dw_val_class_vec:
- CHECKSUM (at->dw_attr_val.v.val_vec);
+ CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
+ (at->dw_attr_val.v.val_vec.length
+ * at->dw_attr_val.v.val_vec.elt_size));
break;
case dw_val_class_flag:
CHECKSUM (at->dw_attr_val.v.val_flag);
CHECKSUM (at->dw_attr_val.v.val_double);
break;
+ case dw_val_class_wide_int:
+ CHECKSUM_ULEB128 (DW_FORM_block);
+ CHECKSUM_ULEB128 (sizeof (*at->dw_attr_val.v.val_wide));
+ CHECKSUM (*at->dw_attr_val.v.val_wide);
+ break;
+
case dw_val_class_vec:
CHECKSUM_ULEB128 (DW_FORM_block);
- CHECKSUM_ULEB128 (sizeof (at->dw_attr_val.v.val_vec));
- CHECKSUM (at->dw_attr_val.v.val_vec);
+ CHECKSUM_ULEB128 (at->dw_attr_val.v.val_vec.length
+ * at->dw_attr_val.v.val_vec.elt_size);
+ CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
+ (at->dw_attr_val.v.val_vec.length
+ * at->dw_attr_val.v.val_vec.elt_size));
break;
case dw_val_class_flag:
not overflow, adjust BITNUM and INNER. */
if (TREE_CODE (inner) == RSHIFT_EXPR
&& TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
- && (wide_int (TREE_OPERAND (inner, 1) + bitnum).ltu_p (TYPE_PRECISION (type))))
- && host_integerp (TREE_OPERAND (inner, 1), 1)
- && bitnum < TYPE_PRECISION (type)
- && (TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
- < (unsigned) (TYPE_PRECISION (type) - bitnum)))
++ && (max_wide_int (TREE_OPERAND (inner, 1)) + bitnum).ltu_p (TYPE_PRECISION (type)))
{
- bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
+ bitnum += tree_to_hwi (TREE_OPERAND (inner, 1));
inner = TREE_OPERAND (inner, 0);
}
{
unsigned HOST_WIDE_INT cst;
- cst = tree_low_cst (and1, 1);
+ cst = tree_to_uhwi (and1);
- cst &= (HOST_WIDE_INT) -1
+ cst &= HOST_WIDE_INT_M1U
<< (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
change = (cst == 0);
#ifdef LOAD_EXTEND_OP
v = TREE_OPERAND (v, 0);
if (TREE_CODE (v) != VAR_DECL
- || !DECL_VIRTUAL_P (v)
- || !DECL_INITIAL (v)
- || DECL_INITIAL (v) == error_mark_node)
+ || !DECL_VIRTUAL_P (v))
return NULL_TREE;
+ init = ctor_for_folding (v);
+
+ /* The virtual tables should always be born with constructors.
+ and we always should assume that they are avaialble for
+ folding. At the moment we do not stream them in all cases,
+ but it should never happen that ctor seem unreachable. */
+ gcc_assert (init);
+ if (init == error_mark_node)
+ {
+ gcc_assert (in_lto_p);
+ return NULL_TREE;
+ }
gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
- size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))), 1);
+ size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
offset += token * size;
- fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), DECL_INITIAL (v),
+ fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
offset, size, vtable);
if (!fn || integer_zerop (fn))
return NULL_TREE;
--- /dev/null
- hash = TREE_INT_CST_LOW (TREE_OPERAND (v, 1));
+ /* Basic IPA utilities for type inheritance graph construction and
+ devirtualization.
+ Copyright (C) 2013 Free Software Foundation, Inc.
+ Contributed by Jan Hubicka
+
+ This file is part of GCC.
+
+ GCC is free software; you can redistribute it and/or modify it under
+ the terms of the GNU General Public License as published by the Free
+ Software Foundation; either version 3, or (at your option) any later
+ version.
+
+ GCC is distributed in the hope that it will be useful, but WITHOUT ANY
+ WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
+ for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with GCC; see the file COPYING3. If not see
+ <http://www.gnu.org/licenses/>. */
+
+ /* Brief vocalburary:
+ ODR = One Definition Rule
+ In short, the ODR states that:
+ 1 In any translation unit, a template, type, function, or object can
+ have no more than one definition. Some of these can have any number
+ of declarations. A definition provides an instance.
+ 2 In the entire program, an object or non-inline function cannot have
+ more than one definition; if an object or function is used, it must
+ have exactly one definition. You can declare an object or function
+ that is never used, in which case you don't have to provide
+ a definition. In no event can there be more than one definition.
+ 3 Some things, like types, templates, and extern inline functions, can
+ be defined in more than one translation unit. For a given entity,
+ each definition must be the same. Non-extern objects and functions
+ in different translation units are different entities, even if their
+ names and types are the same.
+
+ OTR = OBJ_TYPE_REF
+ This is the Gimple representation of type information of a polymorphic call.
+ It contains two parameters:
+ otr_type is a type of class whose method is called.
+ otr_token is the index into virtual table where address is taken.
+
+ BINFO
+ This is the type inheritance information attached to each tree
+ RECORD_TYPE by the C++ frotend. It provides information about base
+ types and virtual tables.
+
+ BINFO is linked to the RECORD_TYPE by TYPE_BINFO.
+ BINFO also links to its type by BINFO_TYPE and to the virtual table by
+ BINFO_VTABLE.
+
+ Base types of a given type are enumerated by BINFO_BASE_BINFO
+ vector. Members of this vectors are not BINFOs associated
+ with a base type. Rather they are new copies of BINFOs
+ (base BINFOs). Their virtual tables may differ from
+ virtual table of the base type. Also BINFO_OFFSET specifies
+ offset of the base within the type.
+
+ In the case of single inheritance, the virtual table is shared
+ and BINFO_VTABLE of base BINFO is NULL. In the case of multiple
+ inheritance the individual virtual tables are pointer to by
+ BINFO_VTABLE of base binfos (that differs of BINFO_VTABLE of
+ binfo associated to the base type).
+
+ BINFO lookup for a given base type and offset can be done by
+ get_binfo_at_offset. It returns proper BINFO whose virtual table
+ can be used for lookup of virtual methods associated with the
+ base type.
+
+ token
+ This is an index of virtual method in virtual table associated
+ to the type defining it. Token can be looked up from OBJ_TYPE_REF
+ or from DECL_VINDEX of a given virtual table.
+
+ polymorphic (indirect) call
+ This is callgraph represention of virtual method call. Every
+ polymorphic call contains otr_type and otr_token taken from
+ original OBJ_TYPE_REF at callgraph construction time.
+
+ What we do here:
+
+ build_type_inheritance_graph triggers a construction of the type inheritance
+ graph.
+
+ We reconstruct it based on types of methods we see in the unit.
+ This means that the graph is not complete. Types with no methods are not
+ inserted into the graph. Also types without virtual methods are not
+ represented at all, though it may be easy to add this.
+
+ The inheritance graph is represented as follows:
+
+ Vertices are structures odr_type. Every odr_type may correspond
+ to one or more tree type nodes that are equivalent by ODR rule.
+ (the multiple type nodes appear only with linktime optimization)
+
+ Edges are represented by odr_type->base and odr_type->derived_types.
+ At the moment we do not track offsets of types for multiple inheritance.
+ Adding this is easy.
+
+ possible_polymorphic_call_targets returns, given an parameters found in
+ indirect polymorphic edge all possible polymorphic call targets of the call.
+
+ pass_ipa_devirt performs simple speculative devirtualization.
+ */
+
+ #include "config.h"
+ #include "system.h"
+ #include "coretypes.h"
+ #include "tm.h"
+ #include "cgraph.h"
+ #include "tree-pass.h"
+ #include "ggc.h"
+ #include "pointer-set.h"
+ #include "target.h"
+ #include "hash-table.h"
+ #include "tree-pretty-print.h"
+ #include "ipa-utils.h"
+ #include "gimple.h"
+ #include "ipa-inline.h"
+ #include "diagnostic.h"
+
+ /* Pointer set of all call targets appearing in the cache. */
+ static pointer_set_t *cached_polymorphic_call_targets;
+
+ /* The node of type inheritance graph. For each type unique in
+ One Defintion Rule (ODR) sense, we produce one node linking all
+ main variants of types equivalent to it, bases and derived types. */
+
+ struct GTY(()) odr_type_d
+ {
+ /* leader type. */
+ tree type;
+ /* All bases. */
+ vec<odr_type> GTY((skip)) bases;
+ /* All derrived types with virtual methods seen in unit. */
+ vec<odr_type> GTY((skip)) derived_types;
+
+ /* All equivalent types, if more than one. */
+ vec<tree, va_gc> *types;
+ /* Set of all equivalent types, if NON-NULL. */
+ pointer_set_t * GTY((skip)) types_set;
+
+ /* Unique ID indexing the type in odr_types array. */
+ int id;
+ /* Is it in anonymous namespace? */
+ bool anonymous_namespace;
+ };
+
+
+ /* Return true if BINFO corresponds to a type with virtual methods.
+
+ Every type has several BINFOs. One is the BINFO associated by the type
+ while other represents bases of derived types. The BINFOs representing
+ bases do not have BINFO_VTABLE pointer set when this is the single
+ inheritance (because vtables are shared). Look up the BINFO of type
+ and check presence of its vtable. */
+
+ static inline bool
+ polymorphic_type_binfo_p (tree binfo)
+ {
+ /* See if BINFO's type has an virtual table associtated with it. */
+ return BINFO_VTABLE (TYPE_BINFO (BINFO_TYPE (binfo)));
+ }
+
+ /* One Definition Rule hashtable helpers. */
+
+ struct odr_hasher
+ {
+ typedef odr_type_d value_type;
+ typedef union tree_node compare_type;
+ static inline hashval_t hash (const value_type *);
+ static inline bool equal (const value_type *, const compare_type *);
+ static inline void remove (value_type *);
+ };
+
+ /* Produce hash based on type name. */
+
+ hashval_t
+ hash_type_name (tree t)
+ {
+ gcc_checking_assert (TYPE_MAIN_VARIANT (t) == t);
+
+ /* If not in LTO, all main variants are unique, so we can do
+ pointer hash. */
+ if (!in_lto_p)
+ return htab_hash_pointer (t);
+
+ /* Anonymous types are unique. */
+ if (type_in_anonymous_namespace_p (t))
+ return htab_hash_pointer (t);
+
+ /* For polymorphic types, we can simply hash the virtual table. */
+ if (TYPE_BINFO (t) && BINFO_VTABLE (TYPE_BINFO (t)))
+ {
+ tree v = BINFO_VTABLE (TYPE_BINFO (t));
+ hashval_t hash = 0;
+
+ if (TREE_CODE (v) == POINTER_PLUS_EXPR)
+ {
++ hash = TREE_INT_CST_ELT (TREE_OPERAND (v, 1), 0);
+ v = TREE_OPERAND (TREE_OPERAND (v, 0), 0);
+ }
+
+ v = DECL_ASSEMBLER_NAME (v);
+ #ifdef ENABLE_CHECKING
+ gcc_assert (!strchr (IDENTIFIER_POINTER (v), '.'));
+ #endif
+ hash = iterative_hash_hashval_t (hash, htab_hash_pointer (v));
+ return hash;
+ }
+
+ /* Rest is not implemented yet. */
+ gcc_unreachable ();
+ }
+
+ /* Return the computed hashcode for ODR_TYPE. */
+
+ inline hashval_t
+ odr_hasher::hash (const value_type *odr_type)
+ {
+ return hash_type_name (odr_type->type);
+ }
+
+ /* Compare types T1 and T2 and return true if they are
+ equivalent. */
+
+ inline bool
+ odr_hasher::equal (const value_type *t1, const compare_type *ct2)
+ {
+ tree t2 = const_cast <tree> (ct2);
+
+ gcc_checking_assert (TYPE_MAIN_VARIANT (ct2) == ct2);
+ if (t1->type == t2)
+ return true;
+ if (!in_lto_p)
+ return false;
+ return types_same_for_odr (t1->type, t2);
+ }
+
+ /* Free ODR type V. */
+
+ inline void
+ odr_hasher::remove (value_type *v)
+ {
+ v->bases.release ();
+ v->derived_types.release ();
+ if (v->types_set)
+ pointer_set_destroy (v->types_set);
+ ggc_free (v);
+ }
+
+ /* ODR type hash used to lookup ODR type based on tree type node. */
+
+ typedef hash_table <odr_hasher> odr_hash_type;
+ static odr_hash_type odr_hash;
+
+ /* ODR types are also stored into ODR_TYPE vector to allow consistent
+ walking. Bases appear before derived types. Vector is garbage collected
+ so we won't end up visiting empty types. */
+
+ static GTY(()) vec <odr_type, va_gc> *odr_types_ptr;
+ #define odr_types (*odr_types_ptr)
+
+ /* TYPE is equivalent to VAL by ODR, but its tree representation differs
+ from VAL->type. This may happen in LTO where tree merging did not merge
+ all variants of the same type. It may or may not mean the ODR violation.
+ Add it to the list of duplicates and warn on some violations. */
+
+ static void
+ add_type_duplicate (odr_type val, tree type)
+ {
+ if (!val->types_set)
+ val->types_set = pointer_set_create ();
+
+ /* See if this duplicate is new. */
+ if (!pointer_set_insert (val->types_set, type))
+ {
+ bool merge = true;
+ bool base_mismatch = false;
+ gcc_assert (in_lto_p);
+ vec_safe_push (val->types, type);
+ unsigned int i,j;
+
+ /* First we compare memory layout. */
+ if (!types_compatible_p (val->type, type))
+ {
+ merge = false;
+ if (BINFO_VTABLE (TYPE_BINFO (val->type))
+ && warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (type)), 0,
+ "type %qD violates one definition rule ",
+ type))
+ inform (DECL_SOURCE_LOCATION (TYPE_NAME (val->type)),
+ "a type with the same name but different layout is "
+ "defined in another translation unit");
+ debug_tree (BINFO_VTABLE (TYPE_BINFO (type)));
+ debug_tree (BINFO_VTABLE (TYPE_BINFO (val->type)));
+ if (cgraph_dump_file)
+ {
+ fprintf (cgraph_dump_file, "ODR violation or merging or ODR type bug?\n");
+
+ print_node (cgraph_dump_file, "", val->type, 0);
+ putc ('\n',cgraph_dump_file);
+ print_node (cgraph_dump_file, "", type, 0);
+ putc ('\n',cgraph_dump_file);
+ }
+ }
+
+ /* Next sanity check that bases are the same. If not, we will end
+ up producing wrong answers. */
+ for (j = 0, i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
+ if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (TYPE_BINFO (type), i)))
+ {
+ odr_type base = get_odr_type
+ (BINFO_TYPE
+ (BINFO_BASE_BINFO (TYPE_BINFO (type),
+ i)),
+ true);
+ if (val->bases.length () <= j || val->bases[j] != base)
+ base_mismatch = true;
+ j++;
+ }
+ if (base_mismatch)
+ {
+ merge = false;
+
+ if (warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (type)), 0,
+ "type %qD violates one definition rule ",
+ type))
+ inform (DECL_SOURCE_LOCATION (TYPE_NAME (val->type)),
+ "a type with the same name but different bases is "
+ "defined in another translation unit");
+ if (cgraph_dump_file)
+ {
+ fprintf (cgraph_dump_file, "ODR bse violation or merging bug?\n");
+
+ print_node (cgraph_dump_file, "", val->type, 0);
+ putc ('\n',cgraph_dump_file);
+ print_node (cgraph_dump_file, "", type, 0);
+ putc ('\n',cgraph_dump_file);
+ }
+ }
+
+ /* Regularize things a little. During LTO same types may come with
+ different BINFOs. Either because their virtual table was
+ not merged by tree merging and only later at decl merging or
+ because one type comes with external vtable, while other
+ with internal. We want to merge equivalent binfos to conserve
+ memory and streaming overhead.
+
+ The external vtables are more harmful: they contain references
+ to external declarations of methods that may be defined in the
+ merged LTO unit. For this reason we absolutely need to remove
+ them and replace by internal variants. Not doing so will lead
+ to incomplete answers from possible_polymorphic_call_targets. */
+ if (!flag_ltrans && merge)
+ {
+ tree master_binfo = TYPE_BINFO (val->type);
+ tree v1 = BINFO_VTABLE (master_binfo);
+ tree v2 = BINFO_VTABLE (TYPE_BINFO (type));
+
+ if (TREE_CODE (v1) == POINTER_PLUS_EXPR)
+ {
+ gcc_assert (TREE_CODE (v2) == POINTER_PLUS_EXPR
+ && operand_equal_p (TREE_OPERAND (v1, 1),
+ TREE_OPERAND (v2, 1), 0));
+ v1 = TREE_OPERAND (TREE_OPERAND (v1, 0), 0);
+ v2 = TREE_OPERAND (TREE_OPERAND (v2, 0), 0);
+ }
+ gcc_assert (DECL_ASSEMBLER_NAME (v1)
+ == DECL_ASSEMBLER_NAME (v2));
+
+ if (DECL_EXTERNAL (v1) && !DECL_EXTERNAL (v2))
+ {
+ unsigned int i;
+
+ TYPE_BINFO (val->type) = TYPE_BINFO (type);
+ for (i = 0; i < val->types->length(); i++)
+ {
+ if (TYPE_BINFO ((*val->types)[i])
+ == master_binfo)
+ TYPE_BINFO ((*val->types)[i]) = TYPE_BINFO (type);
+ }
+ }
+ else
+ TYPE_BINFO (type) = master_binfo;
+ }
+ }
+ }
+
+ /* Get ODR type hash entry for TYPE. If INSERT is true, create
+ possibly new entry. */
+
+ odr_type
+ get_odr_type (tree type, bool insert)
+ {
+ odr_type_d **slot;
+ odr_type val;
+ hashval_t hash;
+
+ type = TYPE_MAIN_VARIANT (type);
+ gcc_checking_assert (TYPE_MAIN_VARIANT (type) == type);
+ hash = hash_type_name (type);
+ slot = odr_hash.find_slot_with_hash (type, hash, insert ? INSERT : NO_INSERT);
+ if (!slot)
+ return NULL;
+
+ /* See if we already have entry for type. */
+ if (*slot)
+ {
+ val = *slot;
+
+ /* With LTO we need to support multiple tree representation of
+ the same ODR type. */
+ if (val->type != type)
+ add_type_duplicate (val, type);
+ }
+ else
+ {
+ tree binfo = TYPE_BINFO (type);
+ unsigned int i;
+
+ val = ggc_alloc_cleared_odr_type_d ();
+ val->type = type;
+ val->bases = vNULL;
+ val->derived_types = vNULL;
+ val->anonymous_namespace = type_in_anonymous_namespace_p (type);
+ *slot = val;
+ for (i = 0; i < BINFO_N_BASE_BINFOS (binfo); i++)
+ /* For now record only polymorphic types. other are
+ pointless for devirtualization and we can not precisely
+ determine ODR equivalency of these during LTO. */
+ if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (binfo, i)))
+ {
+ odr_type base = get_odr_type (BINFO_TYPE (BINFO_BASE_BINFO (binfo,
+ i)),
+ true);
+ base->derived_types.safe_push (val);
+ val->bases.safe_push (base);
+ }
+ /* First record bases, then add into array so ids are increasing. */
+ if (odr_types_ptr)
+ val->id = odr_types.length();
+ vec_safe_push (odr_types_ptr, val);
+ }
+ return val;
+ }
+
+ /* Dump ODR type T and all its derrived type. INDENT specify indentation for
+ recusive printing. */
+
+ static void
+ dump_odr_type (FILE *f, odr_type t, int indent=0)
+ {
+ unsigned int i;
+ fprintf (f, "%*s type %i: ", indent * 2, "", t->id);
+ print_generic_expr (f, t->type, TDF_SLIM);
+ fprintf (f, "%s\n", t->anonymous_namespace ? " (anonymous namespace)":"");
+ if (TYPE_NAME (t->type))
+ {
+ fprintf (f, "%*s defined at: %s:%i\n", indent * 2, "",
+ DECL_SOURCE_FILE (TYPE_NAME (t->type)),
+ DECL_SOURCE_LINE (TYPE_NAME (t->type)));
+ }
+ if (t->bases.length())
+ {
+ fprintf (f, "%*s base odr type ids: ", indent * 2, "");
+ for (i = 0; i < t->bases.length(); i++)
+ fprintf (f, " %i", t->bases[i]->id);
+ fprintf (f, "\n");
+ }
+ if (t->derived_types.length())
+ {
+ fprintf (f, "%*s derived types:\n", indent * 2, "");
+ for (i = 0; i < t->derived_types.length(); i++)
+ dump_odr_type (f, t->derived_types[i], indent + 1);
+ }
+ fprintf (f, "\n");
+ }
+
+ /* Dump the type inheritance graph. */
+
+ static void
+ dump_type_inheritance_graph (FILE *f)
+ {
+ unsigned int i;
+ if (!odr_types_ptr)
+ return;
+ fprintf (f, "\n\nType inheritance graph:\n");
+ for (i = 0; i < odr_types.length(); i++)
+ {
+ if (odr_types[i]->bases.length() == 0)
+ dump_odr_type (f, odr_types[i]);
+ }
+ for (i = 0; i < odr_types.length(); i++)
+ {
+ if (odr_types[i]->types && odr_types[i]->types->length())
+ {
+ unsigned int j;
+ fprintf (f, "Duplicate tree types for odr type %i\n", i);
+ print_node (f, "", odr_types[i]->type, 0);
+ for (j = 0; j < odr_types[i]->types->length(); j++)
+ {
+ tree t;
+ fprintf (f, "duplicate #%i\n", j);
+ print_node (f, "", (*odr_types[i]->types)[j], 0);
+ t = (*odr_types[i]->types)[j];
+ while (TYPE_P (t) && TYPE_CONTEXT (t))
+ {
+ t = TYPE_CONTEXT (t);
+ print_node (f, "", t, 0);
+ }
+ putc ('\n',f);
+ }
+ }
+ }
+ }
+
+ /* Given method type T, return type of class it belongs to.
+ Lookup this pointer and get its type. */
+
+ tree
+ method_class_type (tree t)
+ {
+ tree first_parm_type = TREE_VALUE (TYPE_ARG_TYPES (t));
+
+ return TREE_TYPE (first_parm_type);
+ }
+
+ /* Initialize IPA devirt and build inheritance tree graph. */
+
+ void
+ build_type_inheritance_graph (void)
+ {
+ struct cgraph_node *n;
+ FILE *inheritance_dump_file;
+ int flags;
+
+ if (odr_hash.is_created ())
+ return;
+ timevar_push (TV_IPA_INHERITANCE);
+ inheritance_dump_file = dump_begin (TDI_inheritance, &flags);
+ odr_hash.create (23);
+
+ /* We reconstruct the graph starting of types of all methods seen in the
+ the unit. */
+ FOR_EACH_FUNCTION (n)
+ if (DECL_VIRTUAL_P (n->symbol.decl)
+ && symtab_real_symbol_p ((symtab_node)n))
+ get_odr_type (method_class_type (TREE_TYPE (n->symbol.decl)), true);
+ if (inheritance_dump_file)
+ {
+ dump_type_inheritance_graph (inheritance_dump_file);
+ dump_end (TDI_inheritance, inheritance_dump_file);
+ }
+ timevar_pop (TV_IPA_INHERITANCE);
+ }
+
+ /* If TARGET has associated node, record it in the NODES array. */
+
+ static void
+ maybe_record_node (vec <cgraph_node *> &nodes,
+ tree target, pointer_set_t *inserted)
+ {
+ struct cgraph_node *target_node;
+ enum built_in_function fcode;
+
+ if (target
+ /* Those are used to mark impossible scenarios. */
+ && (fcode = DECL_FUNCTION_CODE (target))
+ != BUILT_IN_UNREACHABLE
+ && fcode != BUILT_IN_TRAP
+ && !pointer_set_insert (inserted, target)
+ && (target_node = cgraph_get_node (target)) != NULL
+ && symtab_real_symbol_p ((symtab_node)target_node))
+ {
+ pointer_set_insert (cached_polymorphic_call_targets,
+ target_node);
+ nodes.safe_push (target_node);
+ }
+ }
+
+ /* See if BINFO's type match OTR_TYPE. If so, lookup method
+ in vtable of TYPE_BINFO and insert method to NODES array.
+ Otherwise recurse to base BINFOs.
+ This match what get_binfo_at_offset does, but with offset
+ being unknown.
+
+ TYPE_BINFO is binfo holding an virtual table matching
+ BINFO's type. In the case of single inheritance, this
+ is binfo of BINFO's type ancestor (vtable is shared),
+ otherwise it is binfo of BINFO's type.
+
+ MATCHED_VTABLES tracks virtual tables we already did lookup
+ for virtual function in.
+ */
+
+ static void
+ record_binfo (vec <cgraph_node *> &nodes,
+ tree binfo,
+ tree otr_type,
+ tree type_binfo,
+ HOST_WIDE_INT otr_token,
+ pointer_set_t *inserted,
+ pointer_set_t *matched_vtables)
+ {
+ tree type = BINFO_TYPE (binfo);
+ int i;
+ tree base_binfo;
+
+ gcc_checking_assert (BINFO_VTABLE (type_binfo));
+
+ if (types_same_for_odr (type, otr_type)
+ && !pointer_set_insert (matched_vtables, BINFO_VTABLE (type_binfo)))
+ {
+ tree target = gimple_get_virt_method_for_binfo (otr_token, type_binfo);
+ if (target)
+ maybe_record_node (nodes, target, inserted);
+ return;
+ }
+
+ /* Walk bases. */
+ for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
+ /* Walking bases that have no virtual method is pointless excercise. */
+ if (polymorphic_type_binfo_p (base_binfo))
+ record_binfo (nodes, base_binfo, otr_type,
+ /* In the case of single inheritance, the virtual table
+ is shared with the outer type. */
+ BINFO_VTABLE (base_binfo) ? base_binfo : type_binfo,
+ otr_token, inserted,
+ matched_vtables);
+ }
+
+ /* Lookup virtual methods matching OTR_TYPE (with OFFSET and OTR_TOKEN)
+ of TYPE, insert them to NODES, recurse into derived nodes.
+ INSERTED is used to avoid duplicate insertions of methods into NODES.
+ MATCHED_VTABLES are used to avoid duplicate walking vtables. */
+
+ static void
+ possible_polymorphic_call_targets_1 (vec <cgraph_node *> &nodes,
+ pointer_set_t *inserted,
+ pointer_set_t *matched_vtables,
+ tree otr_type,
+ odr_type type,
+ HOST_WIDE_INT otr_token)
+ {
+ tree binfo = TYPE_BINFO (type->type);
+ unsigned int i;
+
+ record_binfo (nodes, binfo, otr_type, binfo, otr_token, inserted,
+ matched_vtables);
+ for (i = 0; i < type->derived_types.length(); i++)
+ possible_polymorphic_call_targets_1 (nodes, inserted,
+ matched_vtables,
+ otr_type,
+ type->derived_types[i],
+ otr_token);
+ }
+
+ /* Cache of queries for polymorphic call targets.
+
+ Enumerating all call targets may get expensive when there are many
+ polymorphic calls in the program, so we memoize all the previous
+ queries and avoid duplicated work. */
+
+ struct polymorphic_call_target_d
+ {
+ odr_type type;
+ HOST_WIDE_INT otr_token;
+ vec <cgraph_node *> targets;
+ };
+
+ /* Polymorphic call target cache helpers. */
+
+ struct polymorphic_call_target_hasher
+ {
+ typedef polymorphic_call_target_d value_type;
+ typedef polymorphic_call_target_d compare_type;
+ static inline hashval_t hash (const value_type *);
+ static inline bool equal (const value_type *, const compare_type *);
+ static inline void remove (value_type *);
+ };
+
+ /* Return the computed hashcode for ODR_QUERY. */
+
+ inline hashval_t
+ polymorphic_call_target_hasher::hash (const value_type *odr_query)
+ {
+ return iterative_hash_hashval_t (odr_query->type->id,
+ odr_query->otr_token);
+ }
+
+ /* Compare cache entries T1 and T2. */
+
+ inline bool
+ polymorphic_call_target_hasher::equal (const value_type *t1,
+ const compare_type *t2)
+ {
+ return t1->type == t2->type && t1->otr_token == t2->otr_token;
+ }
+
+ /* Remove entry in polymorphic call target cache hash. */
+
+ inline void
+ polymorphic_call_target_hasher::remove (value_type *v)
+ {
+ v->targets.release ();
+ free (v);
+ }
+
+ /* Polymorphic call target query cache. */
+
+ typedef hash_table <polymorphic_call_target_hasher>
+ polymorphic_call_target_hash_type;
+ static polymorphic_call_target_hash_type polymorphic_call_target_hash;
+
+ /* Destroy polymorphic call target query cache. */
+
+ static void
+ free_polymorphic_call_targets_hash ()
+ {
+ if (cached_polymorphic_call_targets)
+ {
+ polymorphic_call_target_hash.dispose ();
+ pointer_set_destroy (cached_polymorphic_call_targets);
+ cached_polymorphic_call_targets = NULL;
+ }
+ }
+
+ /* When virtual function is removed, we may need to flush the cache. */
+
+ static void
+ devirt_node_removal_hook (struct cgraph_node *n, void *d ATTRIBUTE_UNUSED)
+ {
+ if (cached_polymorphic_call_targets
+ && pointer_set_contains (cached_polymorphic_call_targets, n))
+ free_polymorphic_call_targets_hash ();
+ }
+
+ /* Return vector containing possible targets of polymorphic call of type
+ OTR_TYPE caling method OTR_TOKEN with OFFSET. If FINALp is non-NULL,
+ store true if the list is complette.
+ CACHE_TOKEN (if non-NULL) will get stored to an unique ID of entry
+ in the target cache. If user needs to visit every target list
+ just once, it can memoize them.
+
+ Returned vector is placed into cache. It is NOT caller's responsibility
+ to free it. The vector can be freed on cgraph_remove_node call if
+ the particular node is a virtual function present in the cache. */
+
+ vec <cgraph_node *>
+ possible_polymorphic_call_targets (tree otr_type,
+ HOST_WIDE_INT otr_token,
+ bool *finalp,
+ void **cache_token)
+ {
+ static struct cgraph_node_hook_list *node_removal_hook_holder;
+ pointer_set_t *inserted;
+ pointer_set_t *matched_vtables;
+ vec <cgraph_node *> nodes=vNULL;
+ odr_type type;
+ polymorphic_call_target_d key;
+ polymorphic_call_target_d **slot;
+ unsigned int i;
+ tree binfo, target;
+
+ if (finalp)
+ *finalp = false;
+
+ type = get_odr_type (otr_type, false);
+ /* If we do not have type in our hash it means we never seen any method
+ in it. */
+ if (!type)
+ return nodes;
+
+ /* For anonymous namespace types we can attempt to build full type.
+ All derivations must be in this unit. */
+ if (type->anonymous_namespace && finalp && !flag_ltrans)
+ *finalp = true;
+
+ /* Initialize query cache. */
+ if (!cached_polymorphic_call_targets)
+ {
+ cached_polymorphic_call_targets = pointer_set_create ();
+ polymorphic_call_target_hash.create (23);
+ if (!node_removal_hook_holder)
+ node_removal_hook_holder =
+ cgraph_add_node_removal_hook (&devirt_node_removal_hook, NULL);
+ }
+
+ /* Lookup cached answer. */
+ key.type = type;
+ key.otr_token = otr_token;
+ slot = polymorphic_call_target_hash.find_slot (&key, INSERT);
+ if (cache_token)
+ *cache_token = (void *)*slot;
+ if (*slot)
+ return (*slot)->targets;
+
+ /* Do actual search. */
+ timevar_push (TV_IPA_VIRTUAL_CALL);
+ *slot = XCNEW (polymorphic_call_target_d);
+ if (cache_token)
+ *cache_token = (void *)*slot;
+ (*slot)->type = type;
+ (*slot)->otr_token = otr_token;
+
+ inserted = pointer_set_create ();
+ matched_vtables = pointer_set_create ();
+
+ /* First see virtual method of type itself. */
+
+ binfo = TYPE_BINFO (type->type);
+ target = gimple_get_virt_method_for_binfo (otr_token, binfo);
+ if (target)
+ maybe_record_node (nodes, target, inserted);
+ pointer_set_insert (matched_vtables, BINFO_VTABLE (binfo));
+
+ /* TODO: If method is final, we can stop here and signaize that
+ list is final. We need C++ FE to pass our info about final
+ methods and classes. */
+
+ /* Walk recursively all derived types. Here we need to lookup proper basetype
+ via their BINFO walk that is done by record_binfo */
+ for (i = 0; i < type->derived_types.length(); i++)
+ possible_polymorphic_call_targets_1 (nodes, inserted,
+ matched_vtables,
+ otr_type, type->derived_types[i],
+ otr_token);
+ (*slot)->targets = nodes;
+
+ pointer_set_destroy (inserted);
+ pointer_set_destroy (matched_vtables);
+ timevar_pop (TV_IPA_VIRTUAL_CALL);
+ return nodes;
+ }
+
+ /* Dump all possible targets of a polymorphic call. */
+
+ void
+ dump_possible_polymorphic_call_targets (FILE *f,
+ tree otr_type,
+ HOST_WIDE_INT otr_token)
+ {
+ vec <cgraph_node *> targets;
+ bool final;
+ odr_type type = get_odr_type (otr_type, false);
+ unsigned int i;
+
+ if (!type)
+ return;
+ targets = possible_polymorphic_call_targets (otr_type, otr_token,
+ &final);
+ fprintf (f, "Targets of polymorphic call of type %i ", type->id);
+ print_generic_expr (f, type->type, TDF_SLIM);
+ fprintf (f, " token %i%s:",
+ (int)otr_token,
+ final ? " (full list)" : " (partial list, may call to other unit)");
+ for (i = 0; i < targets.length (); i++)
+ fprintf (f, " %s/%i", cgraph_node_name (targets[i]),
+ targets[i]->symbol.order);
+ fprintf (f, "\n");
+ }
+
+
+ /* Return true if N can be possibly target of a polymorphic call of
+ OTR_TYPE/OTR_TOKEN. */
+
+ bool
+ possible_polymorphic_call_target_p (tree otr_type,
+ HOST_WIDE_INT otr_token,
+ struct cgraph_node *n)
+ {
+ vec <cgraph_node *> targets;
+ unsigned int i;
+
+ if (!odr_hash.is_created ())
+ return true;
+ targets = possible_polymorphic_call_targets (otr_type, otr_token);
+ for (i = 0; i < targets.length (); i++)
+ if (n == targets[i])
+ return true;
+ return false;
+ }
+
+
+ /* After callgraph construction new external nodes may appear.
+ Add them into the graph. */
+
+ void
+ update_type_inheritance_graph (void)
+ {
+ struct cgraph_node *n;
+
+ if (!odr_hash.is_created ())
+ return;
+ free_polymorphic_call_targets_hash ();
+ timevar_push (TV_IPA_INHERITANCE);
+ /* We reconstruct the graph starting of types of all methods seen in the
+ the unit. */
+ FOR_EACH_FUNCTION (n)
+ if (DECL_VIRTUAL_P (n->symbol.decl)
+ && !n->symbol.definition
+ && symtab_real_symbol_p ((symtab_node)n))
+ get_odr_type (method_class_type (TREE_TYPE (n->symbol.decl)), true);
+ timevar_pop (TV_IPA_INHERITANCE);
+ }
+
+
+ /* Return true if N looks like likely target of a polymorphic call.
+ Rule out cxa_pure_virtual, noreturns, function declared cold and
+ other obvious cases. */
+
+ bool
+ likely_target_p (struct cgraph_node *n)
+ {
+ int flags;
+ /* cxa_pure_virtual and similar things are not likely. */
+ if (TREE_CODE (TREE_TYPE (n->symbol.decl)) != METHOD_TYPE)
+ return false;
+ flags = flags_from_decl_or_type (n->symbol.decl);
+ if (flags & ECF_NORETURN)
+ return false;
+ if (lookup_attribute ("cold",
+ DECL_ATTRIBUTES (n->symbol.decl)))
+ return false;
+ if (n->frequency < NODE_FREQUENCY_NORMAL)
+ return false;
+ return true;
+ }
+
+ /* The ipa-devirt pass.
+ This performs very trivial devirtualization:
+ 1) when polymorphic call is known to have precisely one target,
+ turn it into direct call
+ 2) when polymorphic call has only one likely target in the unit,
+ turn it into speculative call. */
+
+ static unsigned int
+ ipa_devirt (void)
+ {
+ struct cgraph_node *n;
+ struct pointer_set_t *bad_call_targets = pointer_set_create ();
+ struct cgraph_edge *e;
+
+ int npolymorphic = 0, nspeculated = 0, nconverted = 0, ncold = 0;
+ int nmultiple = 0, noverwritable = 0, ndevirtualized = 0, nnotdefined = 0;
+ int nwrong = 0, nok = 0, nexternal = 0;;
+
+ FOR_EACH_DEFINED_FUNCTION (n)
+ {
+ bool update = false;
+ if (dump_file && n->indirect_calls)
+ fprintf (dump_file, "\n\nProcesing function %s/%i\n",
+ cgraph_node_name (n), n->symbol.order);
+ for (e = n->indirect_calls; e; e = e->next_callee)
+ if (e->indirect_info->polymorphic)
+ {
+ struct cgraph_node *likely_target = NULL;
+ void *cache_token;
+ bool final;
+ vec <cgraph_node *>targets
+ = possible_polymorphic_call_targets
+ (e, &final, &cache_token);
+ unsigned int i;
+
+ if (dump_file)
+ dump_possible_polymorphic_call_targets
+ (dump_file, e);
+ npolymorphic++;
+
+ if (final)
+ {
+ gcc_assert (targets.length());
+ if (targets.length() == 1)
+ {
+ if (dump_file)
+ fprintf (dump_file,
+ "Devirtualizing call in %s/%i to %s/%i\n",
+ cgraph_node_name (n), n->symbol.order,
+ cgraph_node_name (targets[0]), targets[0]->symbol.order);
+ cgraph_make_edge_direct (e, targets[0]);
+ ndevirtualized++;
+ update = true;
+ continue;
+ }
+ }
+ if (!flag_devirtualize_speculatively)
+ continue;
+ if (!cgraph_maybe_hot_edge_p (e))
+ {
+ if (dump_file)
+ fprintf (dump_file, "Call is cold\n");
+ ncold++;
+ continue;
+ }
+ if (e->speculative)
+ {
+ if (dump_file)
+ fprintf (dump_file, "Call is aready speculated\n");
+ nspeculated++;
+
+ /* When dumping see if we agree with speculation. */
+ if (!dump_file)
+ continue;
+ }
+ if (pointer_set_contains (bad_call_targets,
+ cache_token))
+ {
+ if (dump_file)
+ fprintf (dump_file, "Target list is known to be useless\n");
+ nmultiple++;
+ continue;
+ }
+ for (i = 0; i < targets.length(); i++)
+ if (likely_target_p (targets[i]))
+ {
+ if (likely_target)
+ {
+ likely_target = NULL;
+ if (dump_file)
+ fprintf (dump_file, "More than one likely target\n");
+ nmultiple++;
+ break;
+ }
+ likely_target = targets[i];
+ }
+ if (!likely_target)
+ {
+ pointer_set_insert (bad_call_targets, cache_token);
+ continue;
+ }
+ /* This is reached only when dumping; check if we agree or disagree
+ with the speculation. */
+ if (e->speculative)
+ {
+ struct cgraph_edge *e2;
+ struct ipa_ref *ref;
+ cgraph_speculative_call_info (e, e2, e, ref);
+ if (cgraph_function_or_thunk_node (e2->callee, NULL)
+ == cgraph_function_or_thunk_node (likely_target, NULL))
+ {
+ fprintf (dump_file, "We agree with speculation\n");
+ nok++;
+ }
+ else
+ {
+ fprintf (dump_file, "We disagree with speculation\n");
+ nwrong++;
+ }
+ continue;
+ }
+ if (!likely_target->symbol.definition)
+ {
+ if (dump_file)
+ fprintf (dump_file, "Target is not an definition\n");
+ nnotdefined++;
+ continue;
+ }
+ /* Do not introduce new references to external symbols. While we
+ can handle these just well, it is common for programs to
+ incorrectly with headers defining methods they are linked
+ with. */
+ if (DECL_EXTERNAL (likely_target->symbol.decl))
+ {
+ if (dump_file)
+ fprintf (dump_file, "Target is external\n");
+ nexternal++;
+ continue;
+ }
+ if (cgraph_function_body_availability (likely_target)
+ <= AVAIL_OVERWRITABLE
+ && symtab_can_be_discarded ((symtab_node) likely_target))
+ {
+ if (dump_file)
+ fprintf (dump_file, "Target is overwritable\n");
+ noverwritable++;
+ continue;
+ }
+ else
+ {
+ if (dump_file)
+ fprintf (dump_file,
+ "Speculatively devirtualizing call in %s/%i to %s/%i\n",
+ cgraph_node_name (n), n->symbol.order,
+ cgraph_node_name (likely_target),
+ likely_target->symbol.order);
+ if (!symtab_can_be_discarded ((symtab_node) likely_target))
+ likely_target = cgraph (symtab_nonoverwritable_alias ((symtab_node)likely_target));
+ nconverted++;
+ update = true;
+ cgraph_turn_edge_to_speculative
+ (e, likely_target, e->count * 8 / 10, e->frequency * 8 / 10);
+ }
+ }
+ if (update)
+ inline_update_overall_summary (n);
+ }
+ pointer_set_destroy (bad_call_targets);
+
+ if (dump_file)
+ fprintf (dump_file,
+ "%i polymorphic calls, %i devirtualized,"
+ " %i speculatively devirtualized, %i cold\n"
+ "%i have multiple targets, %i overwritable,"
+ " %i already speculated (%i agree, %i disagree),"
+ " %i external, %i not defined\n",
+ npolymorphic, ndevirtualized, nconverted, ncold,
+ nmultiple, noverwritable, nspeculated, nok, nwrong,
+ nexternal, nnotdefined);
+ return ndevirtualized ? TODO_remove_functions : 0;
+ }
+
+ /* Gate for IPCP optimization. */
+
+ static bool
+ gate_ipa_devirt (void)
+ {
+ /* FIXME: We should remove the optimize check after we ensure we never run
+ IPA passes when not optimizing. */
+ return flag_devirtualize && !in_lto_p;
+ }
+
+ namespace {
+
+ const pass_data pass_data_ipa_devirt =
+ {
+ IPA_PASS, /* type */
+ "devirt", /* name */
+ OPTGROUP_NONE, /* optinfo_flags */
+ true, /* has_gate */
+ true, /* has_execute */
+ TV_IPA_DEVIRT, /* tv_id */
+ 0, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ ( TODO_dump_symtab ), /* todo_flags_finish */
+ };
+
+ class pass_ipa_devirt : public ipa_opt_pass_d
+ {
+ public:
+ pass_ipa_devirt(gcc::context *ctxt)
+ : ipa_opt_pass_d(pass_data_ipa_devirt, ctxt,
+ NULL, /* generate_summary */
+ NULL, /* write_summary */
+ NULL, /* read_summary */
+ NULL, /* write_optimization_summary */
+ NULL, /* read_optimization_summary */
+ NULL, /* stmt_fixup */
+ 0, /* function_transform_todo_flags_start */
+ NULL, /* function_transform */
+ NULL) /* variable_transform */
+ {}
+
+ /* opt_pass methods: */
+ bool gate () { return gate_ipa_devirt (); }
+ unsigned int execute () { return ipa_devirt (); }
+
+ }; // class pass_ipa_devirt
+
+ } // anon namespace
+
+ ipa_opt_pass_d *
+ make_pass_ipa_devirt (gcc::context *ctxt)
+ {
+ return new pass_ipa_devirt (ctxt);
+ }
+
+ #include "gt-ipa-devirt.h"
cs = ipa_note_param_call (node, index, call);
ii = cs->indirect_info;
ii->offset = anc_offset;
- ii->otr_token = tree_low_cst (OBJ_TYPE_REF_TOKEN (target), 1);
+ ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
- ii->otr_type = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (target)));
+ ii->otr_type = obj_type_ref_class (target);
ii->polymorphic = 1;
}
}
}
- max_vf = tree_low_cst (OMP_CLAUSE_SAFELEN_EXPR (c), 0);
+ /* Return maximum possible vectorization factor for the target. */
+
+ static int
+ omp_max_vf (void)
+ {
+ if (!optimize
+ || optimize_debug
+ || (!flag_tree_vectorize
+ && global_options_set.x_flag_tree_vectorize))
+ return 1;
+
+ int vs = targetm.vectorize.autovectorize_vector_sizes ();
+ if (vs)
+ {
+ vs = 1 << floor_log2 (vs);
+ return vs;
+ }
+ enum machine_mode vqimode = targetm.vectorize.preferred_simd_mode (QImode);
+ if (GET_MODE_CLASS (vqimode) == MODE_VECTOR_INT)
+ return GET_MODE_NUNITS (vqimode);
+ return 1;
+ }
+
+ /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
+ privatization. */
+
+ static bool
+ lower_rec_simd_input_clauses (tree new_var, omp_context *ctx, int &max_vf,
+ tree &idx, tree &lane, tree &ivar, tree &lvar)
+ {
+ if (max_vf == 0)
+ {
+ max_vf = omp_max_vf ();
+ if (max_vf > 1)
+ {
+ tree c = find_omp_clause (gimple_omp_for_clauses (ctx->stmt),
+ OMP_CLAUSE_SAFELEN);
+ if (c
+ && compare_tree_int (OMP_CLAUSE_SAFELEN_EXPR (c), max_vf) == -1)
++ max_vf = tree_to_shwi (OMP_CLAUSE_SAFELEN_EXPR (c));
+ }
+ if (max_vf > 1)
+ {
+ idx = create_tmp_var (unsigned_type_node, NULL);
+ lane = create_tmp_var (unsigned_type_node, NULL);
+ }
+ }
+ if (max_vf == 1)
+ return false;
+
+ tree atype = build_array_type_nelts (TREE_TYPE (new_var), max_vf);
+ tree avar = create_tmp_var_raw (atype, NULL);
+ if (TREE_ADDRESSABLE (new_var))
+ TREE_ADDRESSABLE (avar) = 1;
+ DECL_ATTRIBUTES (avar)
+ = tree_cons (get_identifier ("omp simd array"), NULL,
+ DECL_ATTRIBUTES (avar));
+ gimple_add_tmp_var (avar);
+ ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, idx,
+ NULL_TREE, NULL_TREE);
+ lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, lane,
+ NULL_TREE, NULL_TREE);
+ SET_DECL_VALUE_EXPR (new_var, lvar);
+ DECL_HAS_VALUE_EXPR_P (new_var) = 1;
+ return true;
+ }
+
/* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
from the receiver (aka child) side and initializers for REFERENCE_TYPE
private variables. Initialization statements go in ILIST, while calls
add_loop (loop, trip_loop);
}
- if (!host_integerp (safelen, 1)
- || (unsigned HOST_WIDE_INT) tree_low_cst (safelen, 1)
- > INT_MAX)
+ /* A subroutine of expand_omp_for. Generate code for a simd non-worksharing
+ loop. Given parameters:
+
+ for (V = N1; V cond N2; V += STEP) BODY;
+
+ where COND is "<" or ">", we generate pseudocode
+
+ V = N1;
+ goto L1;
+ L0:
+ BODY;
+ V += STEP;
+ L1:
+ if (V cond N2) goto L0; else goto L2;
+ L2:
+
+ For collapsed loops, given parameters:
+ collapse(3)
+ for (V1 = N11; V1 cond1 N12; V1 += STEP1)
+ for (V2 = N21; V2 cond2 N22; V2 += STEP2)
+ for (V3 = N31; V3 cond3 N32; V3 += STEP3)
+ BODY;
+
+ we generate pseudocode
+
+ if (cond3 is <)
+ adj = STEP3 - 1;
+ else
+ adj = STEP3 + 1;
+ count3 = (adj + N32 - N31) / STEP3;
+ if (cond2 is <)
+ adj = STEP2 - 1;
+ else
+ adj = STEP2 + 1;
+ count2 = (adj + N22 - N21) / STEP2;
+ if (cond1 is <)
+ adj = STEP1 - 1;
+ else
+ adj = STEP1 + 1;
+ count1 = (adj + N12 - N11) / STEP1;
+ count = count1 * count2 * count3;
+ V = 0;
+ V1 = N11;
+ V2 = N21;
+ V3 = N31;
+ goto L1;
+ L0:
+ BODY;
+ V += 1;
+ V3 += STEP3;
+ V2 += (V3 cond3 N32) ? 0 : STEP2;
+ V3 = (V3 cond3 N32) ? V3 : N31;
+ V1 += (V2 cond2 N22) ? 0 : STEP1;
+ V2 = (V2 cond2 N22) ? V2 : N21;
+ L1:
+ if (V < count) goto L0; else goto L2;
+ L2:
+
+ */
+
+ static void
+ expand_omp_simd (struct omp_region *region, struct omp_for_data *fd)
+ {
+ tree type, t;
+ basic_block entry_bb, cont_bb, exit_bb, l0_bb, l1_bb, l2_bb, l2_dom_bb;
+ gimple_stmt_iterator gsi;
+ gimple stmt;
+ bool broken_loop = region->cont == NULL;
+ edge e, ne;
+ tree *counts = NULL;
+ int i;
+ tree safelen = find_omp_clause (gimple_omp_for_clauses (fd->for_stmt),
+ OMP_CLAUSE_SAFELEN);
+ tree simduid = find_omp_clause (gimple_omp_for_clauses (fd->for_stmt),
+ OMP_CLAUSE__SIMDUID_);
+ tree n2;
+
+ type = TREE_TYPE (fd->loop.v);
+ entry_bb = region->entry;
+ cont_bb = region->cont;
+ gcc_assert (EDGE_COUNT (entry_bb->succs) == 2);
+ gcc_assert (broken_loop
+ || BRANCH_EDGE (entry_bb)->dest == FALLTHRU_EDGE (cont_bb)->dest);
+ l0_bb = FALLTHRU_EDGE (entry_bb)->dest;
+ if (!broken_loop)
+ {
+ gcc_assert (BRANCH_EDGE (cont_bb)->dest == l0_bb);
+ gcc_assert (EDGE_COUNT (cont_bb->succs) == 2);
+ l1_bb = split_block (cont_bb, last_stmt (cont_bb))->dest;
+ l2_bb = BRANCH_EDGE (entry_bb)->dest;
+ }
+ else
+ {
+ BRANCH_EDGE (entry_bb)->flags &= ~EDGE_ABNORMAL;
+ l1_bb = split_edge (BRANCH_EDGE (entry_bb));
+ l2_bb = single_succ (l1_bb);
+ }
+ exit_bb = region->exit;
+ l2_dom_bb = NULL;
+
+ gsi = gsi_last_bb (entry_bb);
+
+ gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_FOR);
+ /* Not needed in SSA form right now. */
+ gcc_assert (!gimple_in_ssa_p (cfun));
+ if (fd->collapse > 1)
+ {
+ int first_zero_iter = -1;
+ basic_block zero_iter_bb = l2_bb;
+
+ counts = XALLOCAVEC (tree, fd->collapse);
+ expand_omp_for_init_counts (fd, &gsi, entry_bb, counts,
+ zero_iter_bb, first_zero_iter,
+ l2_dom_bb);
+ }
+ if (l2_dom_bb == NULL)
+ l2_dom_bb = l1_bb;
+
+ n2 = fd->loop.n2;
+ if (0)
+ /* Place holder for gimple_omp_for_combined_into_p() in
+ the upcoming gomp-4_0-branch merge. */;
+ else
+ {
+ expand_omp_build_assign (&gsi, fd->loop.v,
+ fold_convert (type, fd->loop.n1));
+ if (fd->collapse > 1)
+ for (i = 0; i < fd->collapse; i++)
+ {
+ tree itype = TREE_TYPE (fd->loops[i].v);
+ if (POINTER_TYPE_P (itype))
+ itype = signed_type_for (itype);
+ t = fold_convert (TREE_TYPE (fd->loops[i].v), fd->loops[i].n1);
+ expand_omp_build_assign (&gsi, fd->loops[i].v, t);
+ }
+ }
+
+ /* Remove the GIMPLE_OMP_FOR statement. */
+ gsi_remove (&gsi, true);
+
+ if (!broken_loop)
+ {
+ /* Code to control the increment goes in the CONT_BB. */
+ gsi = gsi_last_bb (cont_bb);
+ stmt = gsi_stmt (gsi);
+ gcc_assert (gimple_code (stmt) == GIMPLE_OMP_CONTINUE);
+
+ if (POINTER_TYPE_P (type))
+ t = fold_build_pointer_plus (fd->loop.v, fd->loop.step);
+ else
+ t = fold_build2 (PLUS_EXPR, type, fd->loop.v, fd->loop.step);
+ expand_omp_build_assign (&gsi, fd->loop.v, t);
+
+ if (fd->collapse > 1)
+ {
+ i = fd->collapse - 1;
+ if (POINTER_TYPE_P (TREE_TYPE (fd->loops[i].v)))
+ {
+ t = fold_convert (sizetype, fd->loops[i].step);
+ t = fold_build_pointer_plus (fd->loops[i].v, t);
+ }
+ else
+ {
+ t = fold_convert (TREE_TYPE (fd->loops[i].v),
+ fd->loops[i].step);
+ t = fold_build2 (PLUS_EXPR, TREE_TYPE (fd->loops[i].v),
+ fd->loops[i].v, t);
+ }
+ expand_omp_build_assign (&gsi, fd->loops[i].v, t);
+
+ for (i = fd->collapse - 1; i > 0; i--)
+ {
+ tree itype = TREE_TYPE (fd->loops[i].v);
+ tree itype2 = TREE_TYPE (fd->loops[i - 1].v);
+ if (POINTER_TYPE_P (itype2))
+ itype2 = signed_type_for (itype2);
+ t = build3 (COND_EXPR, itype2,
+ build2 (fd->loops[i].cond_code, boolean_type_node,
+ fd->loops[i].v,
+ fold_convert (itype, fd->loops[i].n2)),
+ build_int_cst (itype2, 0),
+ fold_convert (itype2, fd->loops[i - 1].step));
+ if (POINTER_TYPE_P (TREE_TYPE (fd->loops[i - 1].v)))
+ t = fold_build_pointer_plus (fd->loops[i - 1].v, t);
+ else
+ t = fold_build2 (PLUS_EXPR, itype2, fd->loops[i - 1].v, t);
+ expand_omp_build_assign (&gsi, fd->loops[i - 1].v, t);
+
+ t = build3 (COND_EXPR, itype,
+ build2 (fd->loops[i].cond_code, boolean_type_node,
+ fd->loops[i].v,
+ fold_convert (itype, fd->loops[i].n2)),
+ fd->loops[i].v,
+ fold_convert (itype, fd->loops[i].n1));
+ expand_omp_build_assign (&gsi, fd->loops[i].v, t);
+ }
+ }
+
+ /* Remove GIMPLE_OMP_CONTINUE. */
+ gsi_remove (&gsi, true);
+ }
+
+ /* Emit the condition in L1_BB. */
+ gsi = gsi_start_bb (l1_bb);
+
+ t = fold_convert (type, n2);
+ t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
+ false, GSI_CONTINUE_LINKING);
+ t = build2 (fd->loop.cond_code, boolean_type_node, fd->loop.v, t);
+ stmt = gimple_build_cond_empty (t);
+ gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
+ if (walk_tree (gimple_cond_lhs_ptr (stmt), expand_omp_regimplify_p,
+ NULL, NULL)
+ || walk_tree (gimple_cond_rhs_ptr (stmt), expand_omp_regimplify_p,
+ NULL, NULL))
+ {
+ gsi = gsi_for_stmt (stmt);
+ gimple_regimplify_operands (stmt, &gsi);
+ }
+
+ /* Remove GIMPLE_OMP_RETURN. */
+ gsi = gsi_last_bb (exit_bb);
+ gsi_remove (&gsi, true);
+
+ /* Connect the new blocks. */
+ remove_edge (FALLTHRU_EDGE (entry_bb));
+
+ if (!broken_loop)
+ {
+ remove_edge (BRANCH_EDGE (entry_bb));
+ make_edge (entry_bb, l1_bb, EDGE_FALLTHRU);
+
+ e = BRANCH_EDGE (l1_bb);
+ ne = FALLTHRU_EDGE (l1_bb);
+ e->flags = EDGE_TRUE_VALUE;
+ }
+ else
+ {
+ single_succ_edge (entry_bb)->flags = EDGE_FALLTHRU;
+
+ ne = single_succ_edge (l1_bb);
+ e = make_edge (l1_bb, l0_bb, EDGE_TRUE_VALUE);
+
+ }
+ ne->flags = EDGE_FALSE_VALUE;
+ e->probability = REG_BR_PROB_BASE * 7 / 8;
+ ne->probability = REG_BR_PROB_BASE / 8;
+
+ set_immediate_dominator (CDI_DOMINATORS, l1_bb, entry_bb);
+ set_immediate_dominator (CDI_DOMINATORS, l2_bb, l2_dom_bb);
+ set_immediate_dominator (CDI_DOMINATORS, l0_bb, l1_bb);
+
+ if (!broken_loop)
+ {
+ struct loop *loop = alloc_loop ();
+ loop->header = l1_bb;
+ loop->latch = e->dest;
+ add_loop (loop, l1_bb->loop_father);
+ if (safelen == NULL_TREE)
+ loop->safelen = INT_MAX;
+ else
+ {
+ safelen = OMP_CLAUSE_SAFELEN_EXPR (safelen);
- loop->safelen = tree_low_cst (safelen, 1);
++ if (!tree_fits_uhwi_p (safelen)
++ || tree_to_uhwi (safelen) > INT_MAX)
+ loop->safelen = INT_MAX;
+ else
++ loop->safelen = tree_to_uhwi (safelen);
+ if (loop->safelen == 1)
+ loop->safelen = 0;
+ }
+ if (simduid)
+ {
+ loop->simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
+ cfun->has_simduid_loops = true;
+ }
+ /* If not -fno-tree-vectorize, hint that we want to vectorize
+ the loop. */
+ if ((flag_tree_vectorize
+ || !global_options_set.x_flag_tree_vectorize)
+ && loop->safelen > 1)
+ {
+ loop->force_vect = true;
+ cfun->has_force_vect_loops = true;
+ }
+ }
+ }
+
/* Expand the OpenMP loop defined by REGION. */
/* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
vinit = fd->loop.n1;
if (cond_code == EQ_EXPR
- && host_integerp (fd->loop.n2, 0)
+ && tree_fits_shwi_p (fd->loop.n2)
&& ! integer_zerop (fd->loop.n2))
vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
+ else
+ vinit = unshare_expr (vinit);
/* Initialize the iterator variable, so that threads that don't execute
any iterations don't execute the lastprivate clauses by accident. */
if (POINTER_TYPE_P (type))
type1 = sizetype;
- scale = double_int_ext_for_comb (scale, comb);
+ scale = wide_int_ext_for_comb (scale, comb);
- elt = fold_convert (type1, elt);
+
- if (scale.is_minus_one ()
++ if (scale.minus_one_p ()
+ && POINTER_TYPE_P (TREE_TYPE (elt)))
+ {
+ elt = convert_to_ptrofftype (elt);
+ elt = fold_build1 (NEGATE_EXPR, TREE_TYPE (elt), elt);
- scale = double_int_one;
++ scale = max_wide_int (1);
+ }
- if (scale.is_one ())
+ if (scale.one_p ())
{
if (!expr)
- return fold_convert (type, elt);
+ {
+ if (POINTER_TYPE_P (TREE_TYPE (elt)))
+ return elt;
+ else
+ return fold_convert (type1, elt);
+ }
- if (POINTER_TYPE_P (type))
- return fold_build_pointer_plus (expr, elt);
- return fold_build2 (PLUS_EXPR, type, expr, elt);
+ if (POINTER_TYPE_P (TREE_TYPE (expr)))
+ return fold_build_pointer_plus (expr, elt);
+ if (POINTER_TYPE_P (TREE_TYPE (elt)))
+ return fold_build_pointer_plus (elt, expr);
+ return fold_build2 (PLUS_EXPR, type1,
+ expr, fold_convert (type1, elt));
}
- if (scale.is_minus_one ())
+ if (scale.minus_one_p ())
{
if (!expr)
- return fold_convert (type, fold_build1 (NEGATE_EXPR, type1, elt));
+ return fold_build1 (NEGATE_EXPR, type1,
+ fold_convert (type1, elt));
- if (POINTER_TYPE_P (type))
+ if (POINTER_TYPE_P (TREE_TYPE (expr)))
{
- elt = fold_build1 (NEGATE_EXPR, type1, elt);
+ elt = convert_to_ptrofftype (elt);
+ elt = fold_build1 (NEGATE_EXPR, TREE_TYPE (elt), elt);
return fold_build_pointer_plus (expr, elt);
}
- return fold_build2 (MINUS_EXPR, type, expr, elt);
+ return fold_build2 (MINUS_EXPR, type1,
+ expr, fold_convert (type1, elt));
}
+ elt = fold_convert (type1, elt);
if (!expr)
- return fold_convert (type,
- fold_build2 (MULT_EXPR, type1, elt,
- wide_int_to_tree (type1, scale)));
+ return fold_build2 (MULT_EXPR, type1, elt,
- double_int_to_tree (type1, scale));
++ wide_int_to_tree (type1, scale));
- if (scale.is_negative ())
+ if (scale.neg_p ())
{
code = MINUS_EXPR;
scale = -scale;
code = PLUS_EXPR;
elt = fold_build2 (MULT_EXPR, type1, elt,
- double_int_to_tree (type1, scale));
+ wide_int_to_tree (type1, scale));
- if (POINTER_TYPE_P (type))
+ if (POINTER_TYPE_P (TREE_TYPE (expr)))
{
if (code == MINUS_EXPR)
elt = fold_build1 (NEGATE_EXPR, type1, elt);
--- /dev/null
- /* VEC length. This field is only used with TREE_VEC. */
+ /* Core data structures for the 'tree' type.
+ Copyright (C) 1989-2013 Free Software Foundation, Inc.
+
+ This file is part of GCC.
+
+ GCC is free software; you can redistribute it and/or modify it under
+ the terms of the GNU General Public License as published by the Free
+ Software Foundation; either version 3, or (at your option) any later
+ version.
+
+ GCC is distributed in the hope that it will be useful, but WITHOUT ANY
+ WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
+ for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with GCC; see the file COPYING3. If not see
+ <http://www.gnu.org/licenses/>. */
+
+ #ifndef GCC_TREE_CORE_H
+ #define GCC_TREE_CORE_H
+
+ #include "hashtab.h"
+ #include "machmode.h"
+ #include "input.h"
+ #include "statistics.h"
+ #include "vec.h"
+ #include "double-int.h"
+ #include "real.h"
+ #include "fixed-value.h"
+ #include "alias.h"
+ #include "flags.h"
+ #include "symtab.h"
+
+ /* This file contains all the data structures that define the 'tree' type.
+ There are no accessor macros nor functions in this file. Only the
+ basic data structures, extern declarations and type definitions. */
+
+ /*---------------------------------------------------------------------------
+ Forward type declarations. Mostly to avoid including unnecessary headers
+ ---------------------------------------------------------------------------*/
+ struct function;
+ struct real_value;
+ struct fixed_value;
+ struct ptr_info_def;
+ struct die_struct;
+ struct pointer_set_t;
+
+
+ /*---------------------------------------------------------------------------
+ #defined constants
+ ---------------------------------------------------------------------------*/
+ /* Nonzero if this is a call to a function whose return value depends
+ solely on its arguments, has no side effects, and does not read
+ global memory. This corresponds to TREE_READONLY for function
+ decls. */
+ #define ECF_CONST (1 << 0)
+
+ /* Nonzero if this is a call to "pure" function (like const function,
+ but may read memory. This corresponds to DECL_PURE_P for function
+ decls. */
+ #define ECF_PURE (1 << 1)
+
+ /* Nonzero if this is ECF_CONST or ECF_PURE but cannot be proven to no
+ infinite loop. This corresponds to DECL_LOOPING_CONST_OR_PURE_P
+ for function decls.*/
+ #define ECF_LOOPING_CONST_OR_PURE (1 << 2)
+
+ /* Nonzero if this call will never return. */
+ #define ECF_NORETURN (1 << 3)
+
+ /* Nonzero if this is a call to malloc or a related function. */
+ #define ECF_MALLOC (1 << 4)
+
+ /* Nonzero if it is plausible that this is a call to alloca. */
+ #define ECF_MAY_BE_ALLOCA (1 << 5)
+
+ /* Nonzero if this is a call to a function that won't throw an exception. */
+ #define ECF_NOTHROW (1 << 6)
+
+ /* Nonzero if this is a call to setjmp or a related function. */
+ #define ECF_RETURNS_TWICE (1 << 7)
+
+ /* Nonzero if this call replaces the current stack frame. */
+ #define ECF_SIBCALL (1 << 8)
+
+ /* Function does not read or write memory (but may have side effects, so
+ it does not necessarily fit ECF_CONST). */
+ #define ECF_NOVOPS (1 << 9)
+
+ /* The function does not lead to calls within current function unit. */
+ #define ECF_LEAF (1 << 10)
+
+ /* Nonzero if this call does not affect transactions. */
+ #define ECF_TM_PURE (1 << 11)
+
+ /* Nonzero if this call is into the transaction runtime library. */
+ #define ECF_TM_BUILTIN (1 << 12)
+
+ /* Call argument flags. */
+ /* Nonzero if the argument is not dereferenced recursively, thus only
+ directly reachable memory is read or written. */
+ #define EAF_DIRECT (1 << 0)
+
+ /* Nonzero if memory reached by the argument is not clobbered. */
+ #define EAF_NOCLOBBER (1 << 1)
+
+ /* Nonzero if the argument does not escape. */
+ #define EAF_NOESCAPE (1 << 2)
+
+ /* Nonzero if the argument is not used by the function. */
+ #define EAF_UNUSED (1 << 3)
+
+ /* Call return flags. */
+ /* Mask for the argument number that is returned. Lower two bits of
+ the return flags, encodes argument slots zero to three. */
+ #define ERF_RETURN_ARG_MASK (3)
+
+ /* Nonzero if the return value is equal to the argument number
+ flags & ERF_RETURN_ARG_MASK. */
+ #define ERF_RETURNS_ARG (1 << 2)
+
+ /* Nonzero if the return value does not alias with anything. Functions
+ with the malloc attribute have this set on their return value. */
+ #define ERF_NOALIAS (1 << 3)
+
+
+ /*---------------------------------------------------------------------------
+ Enumerations
+ ---------------------------------------------------------------------------*/
+ /* Codes of tree nodes. */
+ #define DEFTREECODE(SYM, STRING, TYPE, NARGS) SYM,
+ #define END_OF_BASE_TREE_CODES LAST_AND_UNUSED_TREE_CODE,
+
+ enum tree_code {
+ #include "all-tree.def"
+ MAX_TREE_CODES
+ };
+
+ #undef DEFTREECODE
+ #undef END_OF_BASE_TREE_CODES
+
+ /* Number of language-independent tree codes. */
+ #define NUM_TREE_CODES \
+ ((int) LAST_AND_UNUSED_TREE_CODE)
+
+ #define CODE_CONTAINS_STRUCT(CODE, STRUCT) \
+ (tree_contains_struct[(CODE)][(STRUCT)])
+
+
+ /* Classify which part of the compiler has defined a given builtin function.
+ Note that we assume below that this is no more than two bits. */
+ enum built_in_class {
+ NOT_BUILT_IN = 0,
+ BUILT_IN_FRONTEND,
+ BUILT_IN_MD,
+ BUILT_IN_NORMAL
+ };
+
+ /* Last marker used for LTO stremaing of built_in_class. We can not add it
+ to the enum since we need the enumb to fit in 2 bits. */
+ #define BUILT_IN_LAST (BUILT_IN_NORMAL + 1)
+
+ /* Codes that identify the various built in functions
+ so that expand_call can identify them quickly. */
+ #define DEF_BUILTIN(ENUM, N, C, T, LT, B, F, NA, AT, IM, COND) ENUM,
+ enum built_in_function {
+ #include "builtins.def"
+
+ /* Complex division routines in libgcc. These are done via builtins
+ because emit_library_call_value can't handle complex values. */
+ BUILT_IN_COMPLEX_MUL_MIN,
+ BUILT_IN_COMPLEX_MUL_MAX
+ = BUILT_IN_COMPLEX_MUL_MIN
+ + MAX_MODE_COMPLEX_FLOAT
+ - MIN_MODE_COMPLEX_FLOAT,
+
+ BUILT_IN_COMPLEX_DIV_MIN,
+ BUILT_IN_COMPLEX_DIV_MAX
+ = BUILT_IN_COMPLEX_DIV_MIN
+ + MAX_MODE_COMPLEX_FLOAT
+ - MIN_MODE_COMPLEX_FLOAT,
+
+ /* Upper bound on non-language-specific builtins. */
+ END_BUILTINS
+ };
+ #undef DEF_BUILTIN
+
+ /* Tree code classes. Each tree_code has an associated code class
+ represented by a TREE_CODE_CLASS. */
+ enum tree_code_class {
+ tcc_exceptional, /* An exceptional code (fits no category). */
+ tcc_constant, /* A constant. */
+ /* Order of tcc_type and tcc_declaration is important. */
+ tcc_type, /* A type object code. */
+ tcc_declaration, /* A declaration (also serving as variable refs). */
+ tcc_reference, /* A reference to storage. */
+ tcc_comparison, /* A comparison expression. */
+ tcc_unary, /* A unary arithmetic expression. */
+ tcc_binary, /* A binary arithmetic expression. */
+ tcc_statement, /* A statement expression, which have side effects
+ but usually no interesting value. */
+ tcc_vl_exp, /* A function call or other expression with a
+ variable-length operand vector. */
+ tcc_expression /* Any other expression. */
+ };
+
+ /* OMP_CLAUSE codes. Do not reorder, as this is used to index into
+ the tables omp_clause_num_ops and omp_clause_code_name. */
+ enum omp_clause_code {
+ /* Clause zero is special-cased inside the parser
+ (c_parser_omp_variable_list). */
+ OMP_CLAUSE_ERROR = 0,
+
+ /* OpenMP clause: private (variable_list). */
+ OMP_CLAUSE_PRIVATE,
+
+ /* OpenMP clause: shared (variable_list). */
+ OMP_CLAUSE_SHARED,
+
+ /* OpenMP clause: firstprivate (variable_list). */
+ OMP_CLAUSE_FIRSTPRIVATE,
+
+ /* OpenMP clause: lastprivate (variable_list). */
+ OMP_CLAUSE_LASTPRIVATE,
+
+ /* OpenMP clause: reduction (operator:variable_list).
+ OMP_CLAUSE_REDUCTION_CODE: The tree_code of the operator.
+ Operand 1: OMP_CLAUSE_REDUCTION_INIT: Stmt-list to initialize the var.
+ Operand 2: OMP_CLAUSE_REDUCTION_MERGE: Stmt-list to merge private var
+ into the shared one.
+ Operand 3: OMP_CLAUSE_REDUCTION_PLACEHOLDER: A dummy VAR_DECL
+ placeholder used in OMP_CLAUSE_REDUCTION_{INIT,MERGE}. */
+ OMP_CLAUSE_REDUCTION,
+
+ /* OpenMP clause: copyin (variable_list). */
+ OMP_CLAUSE_COPYIN,
+
+ /* OpenMP clause: copyprivate (variable_list). */
+ OMP_CLAUSE_COPYPRIVATE,
+
+ /* OpenMP clause: linear (variable-list[:linear-step]). */
+ OMP_CLAUSE_LINEAR,
+
+ /* OpenMP clause: uniform (argument-list). */
+ OMP_CLAUSE_UNIFORM,
+
+ /* OpenMP clause: if (scalar-expression). */
+ OMP_CLAUSE_IF,
+
+ /* OpenMP clause: num_threads (integer-expression). */
+ OMP_CLAUSE_NUM_THREADS,
+
+ /* OpenMP clause: schedule. */
+ OMP_CLAUSE_SCHEDULE,
+
+ /* OpenMP clause: nowait. */
+ OMP_CLAUSE_NOWAIT,
+
+ /* OpenMP clause: ordered. */
+ OMP_CLAUSE_ORDERED,
+
+ /* OpenMP clause: default. */
+ OMP_CLAUSE_DEFAULT,
+
+ /* OpenMP clause: collapse (constant-integer-expression). */
+ OMP_CLAUSE_COLLAPSE,
+
+ /* OpenMP clause: untied. */
+ OMP_CLAUSE_UNTIED,
+
+ /* OpenMP clause: final (scalar-expression). */
+ OMP_CLAUSE_FINAL,
+
+ /* OpenMP clause: mergeable. */
+ OMP_CLAUSE_MERGEABLE,
+
+ /* OpenMP clause: safelen (constant-integer-expression). */
+ OMP_CLAUSE_SAFELEN,
+
+ /* Internally used only clause, holding SIMD uid. */
+ OMP_CLAUSE__SIMDUID_
+ };
+
+ #undef DEFTREESTRUCT
+ #define DEFTREESTRUCT(ENUM, NAME) ENUM,
+ enum tree_node_structure_enum {
+ #include "treestruct.def"
+ LAST_TS_ENUM
+ };
+ #undef DEFTREESTRUCT
+
+ enum omp_clause_schedule_kind {
+ OMP_CLAUSE_SCHEDULE_STATIC,
+ OMP_CLAUSE_SCHEDULE_DYNAMIC,
+ OMP_CLAUSE_SCHEDULE_GUIDED,
+ OMP_CLAUSE_SCHEDULE_AUTO,
+ OMP_CLAUSE_SCHEDULE_RUNTIME
+ };
+
+ enum omp_clause_default_kind {
+ OMP_CLAUSE_DEFAULT_UNSPECIFIED,
+ OMP_CLAUSE_DEFAULT_SHARED,
+ OMP_CLAUSE_DEFAULT_NONE,
+ OMP_CLAUSE_DEFAULT_PRIVATE,
+ OMP_CLAUSE_DEFAULT_FIRSTPRIVATE
+ };
+
+ /* There is a TYPE_QUAL value for each type qualifier. They can be
+ combined by bitwise-or to form the complete set of qualifiers for a
+ type. */
+ enum cv_qualifier {
+ TYPE_UNQUALIFIED = 0x0,
+ TYPE_QUAL_CONST = 0x1,
+ TYPE_QUAL_VOLATILE = 0x2,
+ TYPE_QUAL_RESTRICT = 0x4
+ };
+
+ /* Enumerate visibility settings. */
+ #ifndef SYMBOL_VISIBILITY_DEFINED
+ #define SYMBOL_VISIBILITY_DEFINED
+ enum symbol_visibility {
+ VISIBILITY_DEFAULT,
+ VISIBILITY_PROTECTED,
+ VISIBILITY_HIDDEN,
+ VISIBILITY_INTERNAL
+ };
+ #endif // SYMBOL_VISIBILITY_DEFINED
+
+ /* Standard named or nameless data types of the C compiler. */
+ enum tree_index {
+ TI_ERROR_MARK,
+ TI_INTQI_TYPE,
+ TI_INTHI_TYPE,
+ TI_INTSI_TYPE,
+ TI_INTDI_TYPE,
+ TI_INTTI_TYPE,
+
+ TI_UINTQI_TYPE,
+ TI_UINTHI_TYPE,
+ TI_UINTSI_TYPE,
+ TI_UINTDI_TYPE,
+ TI_UINTTI_TYPE,
+
+ TI_UINT16_TYPE,
+ TI_UINT32_TYPE,
+ TI_UINT64_TYPE,
+
+ TI_INTEGER_ZERO,
+ TI_INTEGER_ONE,
+ TI_INTEGER_THREE,
+ TI_INTEGER_MINUS_ONE,
+ TI_NULL_POINTER,
+
+ TI_SIZE_ZERO,
+ TI_SIZE_ONE,
+
+ TI_BITSIZE_ZERO,
+ TI_BITSIZE_ONE,
+ TI_BITSIZE_UNIT,
+
+ TI_PUBLIC,
+ TI_PROTECTED,
+ TI_PRIVATE,
+
+ TI_BOOLEAN_FALSE,
+ TI_BOOLEAN_TRUE,
+
+ TI_COMPLEX_INTEGER_TYPE,
+ TI_COMPLEX_FLOAT_TYPE,
+ TI_COMPLEX_DOUBLE_TYPE,
+ TI_COMPLEX_LONG_DOUBLE_TYPE,
+
+ TI_FLOAT_TYPE,
+ TI_DOUBLE_TYPE,
+ TI_LONG_DOUBLE_TYPE,
+
+ TI_FLOAT_PTR_TYPE,
+ TI_DOUBLE_PTR_TYPE,
+ TI_LONG_DOUBLE_PTR_TYPE,
+ TI_INTEGER_PTR_TYPE,
+
+ TI_VOID_TYPE,
+ TI_PTR_TYPE,
+ TI_CONST_PTR_TYPE,
+ TI_SIZE_TYPE,
+ TI_PID_TYPE,
+ TI_PTRDIFF_TYPE,
+ TI_VA_LIST_TYPE,
+ TI_VA_LIST_GPR_COUNTER_FIELD,
+ TI_VA_LIST_FPR_COUNTER_FIELD,
+ TI_BOOLEAN_TYPE,
+ TI_FILEPTR_TYPE,
+ TI_POINTER_SIZED_TYPE,
+
+ TI_DFLOAT32_TYPE,
+ TI_DFLOAT64_TYPE,
+ TI_DFLOAT128_TYPE,
+ TI_DFLOAT32_PTR_TYPE,
+ TI_DFLOAT64_PTR_TYPE,
+ TI_DFLOAT128_PTR_TYPE,
+
+ TI_VOID_LIST_NODE,
+
+ TI_MAIN_IDENTIFIER,
+
+ TI_SAT_SFRACT_TYPE,
+ TI_SAT_FRACT_TYPE,
+ TI_SAT_LFRACT_TYPE,
+ TI_SAT_LLFRACT_TYPE,
+ TI_SAT_USFRACT_TYPE,
+ TI_SAT_UFRACT_TYPE,
+ TI_SAT_ULFRACT_TYPE,
+ TI_SAT_ULLFRACT_TYPE,
+ TI_SFRACT_TYPE,
+ TI_FRACT_TYPE,
+ TI_LFRACT_TYPE,
+ TI_LLFRACT_TYPE,
+ TI_USFRACT_TYPE,
+ TI_UFRACT_TYPE,
+ TI_ULFRACT_TYPE,
+ TI_ULLFRACT_TYPE,
+ TI_SAT_SACCUM_TYPE,
+ TI_SAT_ACCUM_TYPE,
+ TI_SAT_LACCUM_TYPE,
+ TI_SAT_LLACCUM_TYPE,
+ TI_SAT_USACCUM_TYPE,
+ TI_SAT_UACCUM_TYPE,
+ TI_SAT_ULACCUM_TYPE,
+ TI_SAT_ULLACCUM_TYPE,
+ TI_SACCUM_TYPE,
+ TI_ACCUM_TYPE,
+ TI_LACCUM_TYPE,
+ TI_LLACCUM_TYPE,
+ TI_USACCUM_TYPE,
+ TI_UACCUM_TYPE,
+ TI_ULACCUM_TYPE,
+ TI_ULLACCUM_TYPE,
+ TI_QQ_TYPE,
+ TI_HQ_TYPE,
+ TI_SQ_TYPE,
+ TI_DQ_TYPE,
+ TI_TQ_TYPE,
+ TI_UQQ_TYPE,
+ TI_UHQ_TYPE,
+ TI_USQ_TYPE,
+ TI_UDQ_TYPE,
+ TI_UTQ_TYPE,
+ TI_SAT_QQ_TYPE,
+ TI_SAT_HQ_TYPE,
+ TI_SAT_SQ_TYPE,
+ TI_SAT_DQ_TYPE,
+ TI_SAT_TQ_TYPE,
+ TI_SAT_UQQ_TYPE,
+ TI_SAT_UHQ_TYPE,
+ TI_SAT_USQ_TYPE,
+ TI_SAT_UDQ_TYPE,
+ TI_SAT_UTQ_TYPE,
+ TI_HA_TYPE,
+ TI_SA_TYPE,
+ TI_DA_TYPE,
+ TI_TA_TYPE,
+ TI_UHA_TYPE,
+ TI_USA_TYPE,
+ TI_UDA_TYPE,
+ TI_UTA_TYPE,
+ TI_SAT_HA_TYPE,
+ TI_SAT_SA_TYPE,
+ TI_SAT_DA_TYPE,
+ TI_SAT_TA_TYPE,
+ TI_SAT_UHA_TYPE,
+ TI_SAT_USA_TYPE,
+ TI_SAT_UDA_TYPE,
+ TI_SAT_UTA_TYPE,
+
+ TI_OPTIMIZATION_DEFAULT,
+ TI_OPTIMIZATION_CURRENT,
+ TI_TARGET_OPTION_DEFAULT,
+ TI_TARGET_OPTION_CURRENT,
+ TI_CURRENT_TARGET_PRAGMA,
+ TI_CURRENT_OPTIMIZE_PRAGMA,
+
+ TI_MAX
+ };
+
+ /* An enumeration of the standard C integer types. These must be
+ ordered so that shorter types appear before longer ones, and so
+ that signed types appear before unsigned ones, for the correct
+ functioning of interpret_integer() in c-lex.c. */
+ enum integer_type_kind {
+ itk_char,
+ itk_signed_char,
+ itk_unsigned_char,
+ itk_short,
+ itk_unsigned_short,
+ itk_int,
+ itk_unsigned_int,
+ itk_long,
+ itk_unsigned_long,
+ itk_long_long,
+ itk_unsigned_long_long,
+ itk_int128,
+ itk_unsigned_int128,
+ itk_none
+ };
+
+ /* A pointer-to-function member type looks like:
+
+ struct {
+ __P __pfn;
+ ptrdiff_t __delta;
+ };
+
+ If __pfn is NULL, it is a NULL pointer-to-member-function.
+
+ (Because the vtable is always the first thing in the object, we
+ don't need its offset.) If the function is virtual, then PFN is
+ one plus twice the index into the vtable; otherwise, it is just a
+ pointer to the function.
+
+ Unfortunately, using the lowest bit of PFN doesn't work in
+ architectures that don't impose alignment requirements on function
+ addresses, or that use the lowest bit to tell one ISA from another,
+ for example. For such architectures, we use the lowest bit of
+ DELTA instead of the lowest bit of the PFN, and DELTA will be
+ multiplied by 2. */
+ enum ptrmemfunc_vbit_where_t {
+ ptrmemfunc_vbit_in_pfn,
+ ptrmemfunc_vbit_in_delta
+ };
+
+ /* Flags that may be passed in the third argument of decl_attributes, and
+ to handler functions for attributes. */
+ enum attribute_flags {
+ /* The type passed in is the type of a DECL, and any attributes that
+ should be passed in again to be applied to the DECL rather than the
+ type should be returned. */
+ ATTR_FLAG_DECL_NEXT = 1,
+ /* The type passed in is a function return type, and any attributes that
+ should be passed in again to be applied to the function type rather
+ than the return type should be returned. */
+ ATTR_FLAG_FUNCTION_NEXT = 2,
+ /* The type passed in is an array element type, and any attributes that
+ should be passed in again to be applied to the array type rather
+ than the element type should be returned. */
+ ATTR_FLAG_ARRAY_NEXT = 4,
+ /* The type passed in is a structure, union or enumeration type being
+ created, and should be modified in place. */
+ ATTR_FLAG_TYPE_IN_PLACE = 8,
+ /* The attributes are being applied by default to a library function whose
+ name indicates known behavior, and should be silently ignored if they
+ are not in fact compatible with the function type. */
+ ATTR_FLAG_BUILT_IN = 16,
+ /* A given attribute has been parsed as a C++-11 attribute. */
+ ATTR_FLAG_CXX11 = 32
+ };
+
+ /* Types used to represent sizes. */
+ enum size_type_kind {
+ stk_sizetype, /* Normal representation of sizes in bytes. */
+ stk_ssizetype, /* Signed representation of sizes in bytes. */
+ stk_bitsizetype, /* Normal representation of sizes in bits. */
+ stk_sbitsizetype, /* Signed representation of sizes in bits. */
+ stk_type_kind_last
+ };
+
+ enum operand_equal_flag {
+ OEP_ONLY_CONST = 1,
+ OEP_PURE_SAME = 2,
+ OEP_CONSTANT_ADDRESS_OF = 4
+ };
+
+ /* Enum and arrays used for tree allocation stats.
+ Keep in sync with tree.c:tree_node_kind_names. */
+ enum tree_node_kind {
+ d_kind,
+ t_kind,
+ b_kind,
+ s_kind,
+ r_kind,
+ e_kind,
+ c_kind,
+ id_kind,
+ vec_kind,
+ binfo_kind,
+ ssa_name_kind,
+ constr_kind,
+ x_kind,
+ lang_decl,
+ lang_type,
+ omp_clause_kind,
+ all_kinds
+ };
+
+
+ /*---------------------------------------------------------------------------
+ Type definitions
+ ---------------------------------------------------------------------------*/
+ /* When processing aliases at the symbol table level, we need the
+ declaration of target. For this reason we need to queue aliases and
+ process them after all declarations has been produced. */
+ typedef struct GTY(()) alias_pair {
+ tree decl;
+ tree target;
+ } alias_pair;
+
+ /* An initialization priority. */
+ typedef unsigned short priority_type;
+
+ /* The type of a callback function for walking over tree structure. */
+ typedef tree (*walk_tree_fn) (tree *, int *, void *);
+
+ /* The type of a callback function that represents a custom walk_tree. */
+ typedef tree (*walk_tree_lh) (tree *, int *, tree (*) (tree *, int *, void *),
+ void *, struct pointer_set_t*);
+
+
+ /*---------------------------------------------------------------------------
+ Main data structures
+ ---------------------------------------------------------------------------*/
+ /* A tree node can represent a data type, a variable, an expression
+ or a statement. Each node has a TREE_CODE which says what kind of
+ thing it represents. Some common codes are:
+ INTEGER_TYPE -- represents a type of integers.
+ ARRAY_TYPE -- represents a type of pointer.
+ VAR_DECL -- represents a declared variable.
+ INTEGER_CST -- represents a constant integer value.
+ PLUS_EXPR -- represents a sum (an expression).
+
+ As for the contents of a tree node: there are some fields
+ that all nodes share. Each TREE_CODE has various special-purpose
+ fields as well. The fields of a node are never accessed directly,
+ always through accessor macros. */
+
+ /* Every kind of tree node starts with this structure,
+ so all nodes have these fields.
+
+ See the accessor macros, defined below, for documentation of the
+ fields, and the table below which connects the fields and the
+ accessor macros. */
+
+ struct GTY(()) tree_base {
+ ENUM_BITFIELD(tree_code) code : 16;
+
+ unsigned side_effects_flag : 1;
+ unsigned constant_flag : 1;
+ unsigned addressable_flag : 1;
+ unsigned volatile_flag : 1;
+ unsigned readonly_flag : 1;
+ unsigned asm_written_flag: 1;
+ unsigned nowarning_flag : 1;
+ unsigned visited : 1;
+
+ unsigned used_flag : 1;
+ unsigned nothrow_flag : 1;
+ unsigned static_flag : 1;
+ unsigned public_flag : 1;
+ unsigned private_flag : 1;
+ unsigned protected_flag : 1;
+ unsigned deprecated_flag : 1;
+ unsigned default_def_flag : 1;
+
+ union {
+ /* The bits in the following structure should only be used with
+ accessor macros that constrain inputs with tree checking. */
+ struct {
+ unsigned lang_flag_0 : 1;
+ unsigned lang_flag_1 : 1;
+ unsigned lang_flag_2 : 1;
+ unsigned lang_flag_3 : 1;
+ unsigned lang_flag_4 : 1;
+ unsigned lang_flag_5 : 1;
+ unsigned lang_flag_6 : 1;
+ unsigned saturating_flag : 1;
+
+ unsigned unsigned_flag : 1;
+ unsigned packed_flag : 1;
+ unsigned user_align : 1;
+ unsigned nameless_flag : 1;
+ unsigned spare0 : 4;
+
+ unsigned spare1 : 8;
+
+ /* This field is only used with TREE_TYPE nodes; the only reason it is
+ present in tree_base instead of tree_type is to save space. The size
+ of the field must be large enough to hold addr_space_t values. */
+ unsigned address_space : 8;
+ } bits;
+ /* The following fields are present in tree_base to save space. The
+ nodes using them do not require any of the flags above and so can
+ make better use of the 4-byte sized word. */
- double_int int_cst;
++ /* VEC length. This field is only used with TREE_VEC and
++ TREE_INT_CST. */
+ int length;
+ /* SSA version number. This field is only used with SSA_NAME. */
+ unsigned int version;
+ } GTY((skip(""))) u;
+ };
+
+ /* The following table lists the uses of each of the above flags and
+ for which types of nodes they are defined.
+
+ addressable_flag:
+
+ TREE_ADDRESSABLE in
+ VAR_DECL, PARM_DECL, RESULT_DECL, FUNCTION_DECL, LABEL_DECL
+ SSA_NAME
+ all types
+ CONSTRUCTOR, IDENTIFIER_NODE
+ STMT_EXPR
+
+ CALL_EXPR_TAILCALL in
+ CALL_EXPR
+
+ CASE_LOW_SEEN in
+ CASE_LABEL_EXPR
+
+ PREDICT_EXPR_OUTCOME in
+ PREDICT_EXPR
+
+ static_flag:
+
+ TREE_STATIC in
+ VAR_DECL, FUNCTION_DECL
+ CONSTRUCTOR
+
+ TREE_NO_TRAMPOLINE in
+ ADDR_EXPR
+
+ BINFO_VIRTUAL_P in
+ TREE_BINFO
+
+ TREE_SYMBOL_REFERENCED in
+ IDENTIFIER_NODE
+
+ CLEANUP_EH_ONLY in
+ TARGET_EXPR, WITH_CLEANUP_EXPR
+
+ TRY_CATCH_IS_CLEANUP in
+ TRY_CATCH_EXPR
+
+ ASM_INPUT_P in
+ ASM_EXPR
+
+ TYPE_REF_CAN_ALIAS_ALL in
+ POINTER_TYPE, REFERENCE_TYPE
+
+ CASE_HIGH_SEEN in
+ CASE_LABEL_EXPR
+
+ ENUM_IS_SCOPED in
+ ENUMERAL_TYPE
+
+ TRANSACTION_EXPR_OUTER in
+ TRANSACTION_EXPR
+
+ public_flag:
+
+ TREE_OVERFLOW in
+ INTEGER_CST, REAL_CST, COMPLEX_CST, VECTOR_CST
+
+ TREE_PUBLIC in
+ VAR_DECL, FUNCTION_DECL
+ IDENTIFIER_NODE
+
+ ASM_VOLATILE_P in
+ ASM_EXPR
+
+ CALL_EXPR_VA_ARG_PACK in
+ CALL_EXPR
+
+ TYPE_CACHED_VALUES_P in
+ all types
+
+ SAVE_EXPR_RESOLVED_P in
+ SAVE_EXPR
+
+ OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE in
+ OMP_CLAUSE_LASTPRIVATE
+
+ OMP_CLAUSE_PRIVATE_DEBUG in
+ OMP_CLAUSE_PRIVATE
+
+ TRANSACTION_EXPR_RELAXED in
+ TRANSACTION_EXPR
+
+ private_flag:
+
+ TREE_PRIVATE in
+ all decls
+
+ CALL_EXPR_RETURN_SLOT_OPT in
+ CALL_EXPR
+
+ OMP_SECTION_LAST in
+ OMP_SECTION
+
+ OMP_PARALLEL_COMBINED in
+ OMP_PARALLEL
+
+ OMP_CLAUSE_PRIVATE_OUTER_REF in
+ OMP_CLAUSE_PRIVATE
+
+ TYPE_REF_IS_RVALUE in
+ REFERENCE_TYPE
+
+ ENUM_IS_OPAQUE in
+ ENUMERAL_TYPE
+
+ protected_flag:
+
+ TREE_PROTECTED in
+ BLOCK
+ all decls
+
+ CALL_FROM_THUNK_P and
+ CALL_ALLOCA_FOR_VAR_P in
+ CALL_EXPR
+
+ side_effects_flag:
+
+ TREE_SIDE_EFFECTS in
+ all expressions
+ all decls
+ all constants
+
+ FORCED_LABEL in
+ LABEL_DECL
+
+ volatile_flag:
+
+ TREE_THIS_VOLATILE in
+ all expressions
+ all decls
+
+ TYPE_VOLATILE in
+ all types
+
+ readonly_flag:
+
+ TREE_READONLY in
+ all expressions
+ all decls
+
+ TYPE_READONLY in
+ all types
+
+ constant_flag:
+
+ TREE_CONSTANT in
+ all expressions
+ all decls
+ all constants
+
+ TYPE_SIZES_GIMPLIFIED in
+ all types
+
+ unsigned_flag:
+
+ TYPE_UNSIGNED in
+ all types
+
+ DECL_UNSIGNED in
+ all decls
+
+ asm_written_flag:
+
+ TREE_ASM_WRITTEN in
+ VAR_DECL, FUNCTION_DECL, TYPE_DECL
+ RECORD_TYPE, UNION_TYPE, QUAL_UNION_TYPE
+ BLOCK, STRING_CST
+
+ SSA_NAME_OCCURS_IN_ABNORMAL_PHI in
+ SSA_NAME
+
+ used_flag:
+
+ TREE_USED in
+ all expressions
+ all decls
+ IDENTIFIER_NODE
+
+ nothrow_flag:
+
+ TREE_NOTHROW in
+ CALL_EXPR
+ FUNCTION_DECL
+
+ TYPE_ALIGN_OK in
+ all types
+
+ TREE_THIS_NOTRAP in
+ INDIRECT_REF, MEM_REF, TARGET_MEM_REF, ARRAY_REF, ARRAY_RANGE_REF
+
+ SSA_NAME_IN_FREELIST in
+ SSA_NAME
+
+ deprecated_flag:
+
+ TREE_DEPRECATED in
+ all decls
+ all types
+
+ IDENTIFIER_TRANSPARENT_ALIAS in
+ IDENTIFIER_NODE
+
+ visited:
+
+ TREE_VISITED in
+ all trees (used liberally by many passes)
+
+ saturating_flag:
+
+ TYPE_SATURATING in
+ all types
+
+ VAR_DECL_IS_VIRTUAL_OPERAND in
+ VAR_DECL
+
+ nowarning_flag:
+
+ TREE_NO_WARNING in
+ all expressions
+ all decls
+
+ TYPE_ARTIFICIAL in
+ all types
+
+ default_def_flag:
+
+ TYPE_VECTOR_OPAQUE in
+ VECTOR_TYPE
+
+ SSA_NAME_IS_DEFAULT_DEF in
+ SSA_NAME
+
+ DECL_NONLOCAL_FRAME in
+ VAR_DECL
+ */
+
+ struct GTY(()) tree_typed {
+ struct tree_base base;
+ tree type;
+ };
+
+ struct GTY(()) tree_common {
+ struct tree_typed typed;
+ tree chain;
+ };
+
+ struct GTY(()) tree_int_cst {
+ struct tree_typed typed;
++ HOST_WIDE_INT val[1];
+ };
+
+
+ struct GTY(()) tree_real_cst {
+ struct tree_typed typed;
+ struct real_value * real_cst_ptr;
+ };
+
+ struct GTY(()) tree_fixed_cst {
+ struct tree_typed typed;
+ struct fixed_value * fixed_cst_ptr;
+ };
+
+ struct GTY(()) tree_string {
+ struct tree_typed typed;
+ int length;
+ char str[1];
+ };
+
+ struct GTY(()) tree_complex {
+ struct tree_typed typed;
+ tree real;
+ tree imag;
+ };
+
+ struct GTY(()) tree_vector {
+ struct tree_typed typed;
+ tree GTY ((length ("TYPE_VECTOR_SUBPARTS (TREE_TYPE ((tree)&%h))"))) elts[1];
+ };
+
+ struct GTY(()) tree_identifier {
+ struct tree_common common;
+ struct ht_identifier id;
+ };
+
+ struct GTY(()) tree_list {
+ struct tree_common common;
+ tree purpose;
+ tree value;
+ };
+
+ struct GTY(()) tree_vec {
+ struct tree_common common;
+ tree GTY ((length ("TREE_VEC_LENGTH ((tree)&%h)"))) a[1];
+ };
+
+ /* A single element of a CONSTRUCTOR. VALUE holds the actual value of the
+ element. INDEX can optionally design the position of VALUE: in arrays,
+ it is the index where VALUE has to be placed; in structures, it is the
+ FIELD_DECL of the member. */
+ typedef struct GTY(()) constructor_elt_d {
+ tree index;
+ tree value;
+ } constructor_elt;
+
+ struct GTY(()) tree_constructor {
+ struct tree_typed typed;
+ vec<constructor_elt, va_gc> *elts;
+ };
+
+ struct GTY(()) tree_exp {
+ struct tree_typed typed;
+ location_t locus;
+ tree GTY ((special ("tree_exp"),
+ desc ("TREE_CODE ((tree) &%0)")))
+ operands[1];
+ };
+
+ /* Immediate use linking structure. This structure is used for maintaining
+ a doubly linked list of uses of an SSA_NAME. */
+ typedef struct GTY(()) ssa_use_operand_d {
+ struct ssa_use_operand_d* GTY((skip(""))) prev;
+ struct ssa_use_operand_d* GTY((skip(""))) next;
+ /* Immediate uses for a given SSA name are maintained as a cyclic
+ list. To recognize the root of this list, the location field
+ needs to point to the original SSA name. Since statements and
+ SSA names are of different data types, we need this union. See
+ the explanation in struct immediate_use_iterator_d. */
+ union { gimple stmt; tree ssa_name; } GTY((skip(""))) loc;
+ tree *GTY((skip(""))) use;
+ } ssa_use_operand_t;
+
+ struct GTY(()) tree_ssa_name {
+ struct tree_typed typed;
+
+ /* _DECL wrapped by this SSA name. */
+ tree var;
+
+ /* Statement that defines this SSA name. */
+ gimple def_stmt;
+
+ /* Pointer attributes used for alias analysis. */
+ struct ptr_info_def *ptr_info;
+
+ /* Immediate uses list for this SSA_NAME. */
+ struct ssa_use_operand_d imm_uses;
+ };
+
+ struct GTY(()) phi_arg_d {
+ /* imm_use MUST be the first element in struct because we do some
+ pointer arithmetic with it. See phi_arg_index_from_use. */
+ struct ssa_use_operand_d imm_use;
+ tree def;
+ location_t locus;
+ };
+
+ struct GTY(()) tree_omp_clause {
+ struct tree_common common;
+ location_t locus;
+ enum omp_clause_code code;
+ union omp_clause_subcode {
+ enum omp_clause_default_kind default_kind;
+ enum omp_clause_schedule_kind schedule_kind;
+ enum tree_code reduction_code;
+ } GTY ((skip)) subcode;
+
+ /* The gimplification of OMP_CLAUSE_REDUCTION_{INIT,MERGE} for omp-low's
+ usage. */
+ gimple_seq gimple_reduction_init;
+ gimple_seq gimple_reduction_merge;
+
+ tree GTY ((length ("omp_clause_num_ops[OMP_CLAUSE_CODE ((tree)&%h)]")))
+ ops[1];
+ };
+
+ struct GTY(()) tree_block {
+ struct tree_base base;
+ tree chain;
+
+ unsigned abstract_flag : 1;
+ unsigned block_num : 31;
+
+ location_t locus;
+
+ tree vars;
+ vec<tree, va_gc> *nonlocalized_vars;
+
+ tree subblocks;
+ tree supercontext;
+ tree abstract_origin;
+ tree fragment_origin;
+ tree fragment_chain;
+ };
+
+ struct GTY(()) tree_type_common {
+ struct tree_common common;
+ tree size;
+ tree size_unit;
+ tree attributes;
+ unsigned int uid;
+
+ unsigned int precision : 10;
+ unsigned no_force_blk_flag : 1;
+ unsigned needs_constructing_flag : 1;
+ unsigned transparent_aggr_flag : 1;
+ unsigned restrict_flag : 1;
+ unsigned contains_placeholder_bits : 2;
+
+ ENUM_BITFIELD(machine_mode) mode : 8;
+
+ unsigned string_flag : 1;
+ unsigned lang_flag_0 : 1;
+ unsigned lang_flag_1 : 1;
+ unsigned lang_flag_2 : 1;
+ unsigned lang_flag_3 : 1;
+ unsigned lang_flag_4 : 1;
+ unsigned lang_flag_5 : 1;
+ unsigned lang_flag_6 : 1;
+
+ unsigned int align;
+ alias_set_type alias_set;
+ tree pointer_to;
+ tree reference_to;
+ union tree_type_symtab {
+ int GTY ((tag ("TYPE_SYMTAB_IS_ADDRESS"))) address;
+ const char * GTY ((tag ("TYPE_SYMTAB_IS_POINTER"))) pointer;
+ struct die_struct * GTY ((tag ("TYPE_SYMTAB_IS_DIE"))) die;
+ } GTY ((desc ("debug_hooks->tree_type_symtab_field"))) symtab;
+ tree name;
+ tree next_variant;
+ tree main_variant;
+ tree context;
+ tree canonical;
+ };
+
+ struct GTY(()) tree_type_with_lang_specific {
+ struct tree_type_common common;
+ /* Points to a structure whose details depend on the language in use. */
+ struct lang_type *lang_specific;
+ };
+
+ struct GTY(()) tree_type_non_common {
+ struct tree_type_with_lang_specific with_lang_specific;
+ tree values;
+ tree minval;
+ tree maxval;
+ tree binfo;
+ };
+
+ struct GTY (()) tree_binfo {
+ struct tree_common common;
+
+ tree offset;
+ tree vtable;
+ tree virtuals;
+ tree vptr_field;
+ vec<tree, va_gc> *base_accesses;
+ tree inheritance;
+
+ tree vtt_subvtt;
+ tree vtt_vptr;
+
+ vec<tree, va_gc> base_binfos;
+ };
+
+ struct GTY(()) tree_decl_minimal {
+ struct tree_common common;
+ location_t locus;
+ unsigned int uid;
+ tree name;
+ tree context;
+ };
+
+ struct GTY(()) tree_decl_common {
+ struct tree_decl_minimal common;
+ tree size;
+
+ ENUM_BITFIELD(machine_mode) mode : 8;
+
+ unsigned nonlocal_flag : 1;
+ unsigned virtual_flag : 1;
+ unsigned ignored_flag : 1;
+ unsigned abstract_flag : 1;
+ unsigned artificial_flag : 1;
+ unsigned preserve_flag: 1;
+ unsigned debug_expr_is_from : 1;
+
+ unsigned lang_flag_0 : 1;
+ unsigned lang_flag_1 : 1;
+ unsigned lang_flag_2 : 1;
+ unsigned lang_flag_3 : 1;
+ unsigned lang_flag_4 : 1;
+ unsigned lang_flag_5 : 1;
+ unsigned lang_flag_6 : 1;
+ unsigned lang_flag_7 : 1;
+ unsigned lang_flag_8 : 1;
+
+ /* In LABEL_DECL, this is DECL_ERROR_ISSUED.
+ In VAR_DECL and PARM_DECL, this is DECL_REGISTER. */
+ unsigned decl_flag_0 : 1;
+ /* In FIELD_DECL, this is DECL_BIT_FIELD
+ In VAR_DECL and FUNCTION_DECL, this is DECL_EXTERNAL.
+ In TYPE_DECL, this is TYPE_DECL_SUPPRESS_DEBUG. */
+ unsigned decl_flag_1 : 1;
+ /* In FIELD_DECL, this is DECL_NONADDRESSABLE_P
+ In VAR_DECL, PARM_DECL and RESULT_DECL, this is
+ DECL_HAS_VALUE_EXPR_P. */
+ unsigned decl_flag_2 : 1;
+ /* 1 bit unused. */
+ unsigned decl_flag_3 : 1;
+ /* Logically, these two would go in a theoretical base shared by var and
+ parm decl. */
+ unsigned gimple_reg_flag : 1;
+ /* In VAR_DECL, PARM_DECL and RESULT_DECL, this is DECL_BY_REFERENCE. */
+ unsigned decl_by_reference_flag : 1;
+ /* In a VAR_DECL and PARM_DECL, this is DECL_READ_P. */
+ unsigned decl_read_flag : 1;
+ /* In a VAR_DECL or RESULT_DECL, this is DECL_NONSHAREABLE. */
+ unsigned decl_nonshareable_flag : 1;
+
+ /* DECL_OFFSET_ALIGN, used only for FIELD_DECLs. */
+ unsigned int off_align : 8;
+
+ /* 24 bits unused. */
+
+ /* DECL_ALIGN. It should have the same size as TYPE_ALIGN. */
+ unsigned int align;
+
+ /* UID for points-to sets, stable over copying from inlining. */
+ unsigned int pt_uid;
+
+ tree size_unit;
+ tree initial;
+ tree attributes;
+ tree abstract_origin;
+
+ /* Points to a structure whose details depend on the language in use. */
+ struct lang_decl *lang_specific;
+ };
+
+ struct GTY(()) tree_decl_with_rtl {
+ struct tree_decl_common common;
+ rtx rtl;
+ };
+
+ struct GTY(()) tree_field_decl {
+ struct tree_decl_common common;
+
+ tree offset;
+ tree bit_field_type;
+ tree qualifier;
+ tree bit_offset;
+ tree fcontext;
+ };
+
+ struct GTY(()) tree_label_decl {
+ struct tree_decl_with_rtl common;
+ int label_decl_uid;
+ int eh_landing_pad_nr;
+ };
+
+ struct GTY(()) tree_result_decl {
+ struct tree_decl_with_rtl common;
+ };
+
+ struct GTY(()) tree_const_decl {
+ struct tree_decl_common common;
+ };
+
+ struct GTY(()) tree_parm_decl {
+ struct tree_decl_with_rtl common;
+ rtx incoming_rtl;
+ };
+
+ struct GTY(()) tree_decl_with_vis {
+ struct tree_decl_with_rtl common;
+ tree assembler_name;
+ tree section_name;
+ tree comdat_group;
+
+ /* Belong to VAR_DECL exclusively. */
+ unsigned defer_output : 1;
+ unsigned hard_register : 1;
+ unsigned common_flag : 1;
+ unsigned in_text_section : 1;
+ unsigned in_constant_pool : 1;
+ unsigned dllimport_flag : 1;
+ /* Don't belong to VAR_DECL exclusively. */
+ unsigned weak_flag : 1;
+ /* When SECTION_NAME is implied by -ffunction-section. */
+ unsigned implicit_section_name_p : 1;
+
+ unsigned seen_in_bind_expr : 1;
+ unsigned comdat_flag : 1;
+ ENUM_BITFIELD(symbol_visibility) visibility : 2;
+ unsigned visibility_specified : 1;
+ /* Belongs to VAR_DECL exclusively. */
+ ENUM_BITFIELD(tls_model) tls_model : 3;
+
+ /* Belong to FUNCTION_DECL exclusively. */
+ unsigned init_priority_p : 1;
+ /* Used by C++ only. Might become a generic decl flag. */
+ unsigned shadowed_for_var_p : 1;
+ /* Belong to FUNCTION_DECL exclusively. */
+ unsigned cxx_constructor : 1;
+ /* Belong to FUNCTION_DECL exclusively. */
+ unsigned cxx_destructor : 1;
+ /* Belong to FUNCTION_DECL exclusively. */
+ unsigned final : 1;
+ /* 11 unused bits. */
+ };
+
+ struct GTY(()) tree_var_decl {
+ struct tree_decl_with_vis common;
+ };
+
+ struct GTY(()) tree_decl_non_common {
+ struct tree_decl_with_vis common;
+ /* C++ uses this in namespaces. */
+ tree saved_tree;
+ /* C++ uses this in templates. */
+ tree arguments;
+ /* Almost all FE's use this. */
+ tree result;
+ /* C++ uses this in namespaces and function_decls. */
+ tree vindex;
+ };
+
+ struct GTY(()) tree_function_decl {
+ struct tree_decl_non_common common;
+
+ struct function *f;
+
+ /* The personality function. Used for stack unwinding. */
+ tree personality;
+
+ /* Function specific options that are used by this function. */
+ tree function_specific_target; /* target options */
+ tree function_specific_optimization; /* optimization options */
+
+ /* In a FUNCTION_DECL for which DECL_BUILT_IN holds, this is
+ DECL_FUNCTION_CODE. Otherwise unused.
+ ??? The bitfield needs to be able to hold all target function
+ codes as well. */
+ ENUM_BITFIELD(built_in_function) function_code : 11;
+ ENUM_BITFIELD(built_in_class) built_in_class : 2;
+
+ unsigned static_ctor_flag : 1;
+ unsigned static_dtor_flag : 1;
+ unsigned uninlinable : 1;
+
+ unsigned possibly_inlined : 1;
+ unsigned novops_flag : 1;
+ unsigned returns_twice_flag : 1;
+ unsigned malloc_flag : 1;
+ unsigned operator_new_flag : 1;
+ unsigned declared_inline_flag : 1;
+ unsigned regdecl_flag : 1;
+ unsigned no_inline_warning_flag : 1;
+
+ unsigned no_instrument_function_entry_exit : 1;
+ unsigned no_limit_stack : 1;
+ unsigned disregard_inline_limits : 1;
+ unsigned pure_flag : 1;
+ unsigned looping_const_or_pure_flag : 1;
+ unsigned has_debug_args_flag : 1;
+ unsigned tm_clone_flag : 1;
+ unsigned versioned_function : 1;
+ /* No bits left. */
+ };
+
+ struct GTY(()) tree_translation_unit_decl {
+ struct tree_decl_common common;
+ /* Source language of this translation unit. Used for DWARF output. */
+ const char * GTY((skip(""))) language;
+ /* TODO: Non-optimization used to build this translation unit. */
+ /* TODO: Root of a partial DWARF tree for global types and decls. */
+ };
+
+ struct GTY(()) tree_type_decl {
+ struct tree_decl_non_common common;
+
+ };
+
+ struct GTY ((chain_next ("%h.next"), chain_prev ("%h.prev"))) tree_statement_list_node
+ {
+ struct tree_statement_list_node *prev;
+ struct tree_statement_list_node *next;
+ tree stmt;
+ };
+
+ struct GTY(()) tree_statement_list
+ {
+ struct tree_typed typed;
+ struct tree_statement_list_node *head;
+ struct tree_statement_list_node *tail;
+ };
+
+ struct GTY(()) tree_optimization_option {
+ struct tree_common common;
+
+ /* The optimization options used by the user. */
+ struct cl_optimization opts;
+
+ /* Target optabs for this set of optimization options. This is of
+ type `struct target_optabs *'. */
+ unsigned char *GTY ((atomic)) optabs;
+
+ /* The value of this_target_optabs against which the optabs above were
+ generated. */
+ struct target_optabs *GTY ((skip)) base_optabs;
+ };
+
+ struct GTY(()) tree_target_option {
+ struct tree_common common;
+
+ /* The optimization options used by the user. */
+ struct cl_target_option opts;
+ };
+
+ /* Define the overall contents of a tree node.
+ It may be any of the structures declared above
+ for various types of node. */
+ union GTY ((ptr_alias (union lang_tree_node),
+ desc ("tree_node_structure (&%h)"), variable_size)) tree_node {
+ struct tree_base GTY ((tag ("TS_BASE"))) base;
+ struct tree_typed GTY ((tag ("TS_TYPED"))) typed;
+ struct tree_common GTY ((tag ("TS_COMMON"))) common;
+ struct tree_int_cst GTY ((tag ("TS_INT_CST"))) int_cst;
+ struct tree_real_cst GTY ((tag ("TS_REAL_CST"))) real_cst;
+ struct tree_fixed_cst GTY ((tag ("TS_FIXED_CST"))) fixed_cst;
+ struct tree_vector GTY ((tag ("TS_VECTOR"))) vector;
+ struct tree_string GTY ((tag ("TS_STRING"))) string;
+ struct tree_complex GTY ((tag ("TS_COMPLEX"))) complex;
+ struct tree_identifier GTY ((tag ("TS_IDENTIFIER"))) identifier;
+ struct tree_decl_minimal GTY((tag ("TS_DECL_MINIMAL"))) decl_minimal;
+ struct tree_decl_common GTY ((tag ("TS_DECL_COMMON"))) decl_common;
+ struct tree_decl_with_rtl GTY ((tag ("TS_DECL_WRTL"))) decl_with_rtl;
+ struct tree_decl_non_common GTY ((tag ("TS_DECL_NON_COMMON")))
+ decl_non_common;
+ struct tree_parm_decl GTY ((tag ("TS_PARM_DECL"))) parm_decl;
+ struct tree_decl_with_vis GTY ((tag ("TS_DECL_WITH_VIS"))) decl_with_vis;
+ struct tree_var_decl GTY ((tag ("TS_VAR_DECL"))) var_decl;
+ struct tree_field_decl GTY ((tag ("TS_FIELD_DECL"))) field_decl;
+ struct tree_label_decl GTY ((tag ("TS_LABEL_DECL"))) label_decl;
+ struct tree_result_decl GTY ((tag ("TS_RESULT_DECL"))) result_decl;
+ struct tree_const_decl GTY ((tag ("TS_CONST_DECL"))) const_decl;
+ struct tree_type_decl GTY ((tag ("TS_TYPE_DECL"))) type_decl;
+ struct tree_function_decl GTY ((tag ("TS_FUNCTION_DECL"))) function_decl;
+ struct tree_translation_unit_decl GTY ((tag ("TS_TRANSLATION_UNIT_DECL")))
+ translation_unit_decl;
+ struct tree_type_common GTY ((tag ("TS_TYPE_COMMON"))) type_common;
+ struct tree_type_with_lang_specific GTY ((tag ("TS_TYPE_WITH_LANG_SPECIFIC")))
+ type_with_lang_specific;
+ struct tree_type_non_common GTY ((tag ("TS_TYPE_NON_COMMON")))
+ type_non_common;
+ struct tree_list GTY ((tag ("TS_LIST"))) list;
+ struct tree_vec GTY ((tag ("TS_VEC"))) vec;
+ struct tree_exp GTY ((tag ("TS_EXP"))) exp;
+ struct tree_ssa_name GTY ((tag ("TS_SSA_NAME"))) ssa_name;
+ struct tree_block GTY ((tag ("TS_BLOCK"))) block;
+ struct tree_binfo GTY ((tag ("TS_BINFO"))) binfo;
+ struct tree_statement_list GTY ((tag ("TS_STATEMENT_LIST"))) stmt_list;
+ struct tree_constructor GTY ((tag ("TS_CONSTRUCTOR"))) constructor;
+ struct tree_omp_clause GTY ((tag ("TS_OMP_CLAUSE"))) omp_clause;
+ struct tree_optimization_option GTY ((tag ("TS_OPTIMIZATION"))) optimization;
+ struct tree_target_option GTY ((tag ("TS_TARGET_OPTION"))) target_option;
+ };
+
+ /* Structure describing an attribute and a function to handle it. */
+ struct attribute_spec {
+ /* The name of the attribute (without any leading or trailing __),
+ or NULL to mark the end of a table of attributes. */
+ const char *name;
+ /* The minimum length of the list of arguments of the attribute. */
+ int min_length;
+ /* The maximum length of the list of arguments of the attribute
+ (-1 for no maximum). */
+ int max_length;
+ /* Whether this attribute requires a DECL. If it does, it will be passed
+ from types of DECLs, function return types and array element types to
+ the DECLs, function types and array types respectively; but when
+ applied to a type in any other circumstances, it will be ignored with
+ a warning. (If greater control is desired for a given attribute,
+ this should be false, and the flags argument to the handler may be
+ used to gain greater control in that case.) */
+ bool decl_required;
+ /* Whether this attribute requires a type. If it does, it will be passed
+ from a DECL to the type of that DECL. */
+ bool type_required;
+ /* Whether this attribute requires a function (or method) type. If it does,
+ it will be passed from a function pointer type to the target type,
+ and from a function return type (which is not itself a function
+ pointer type) to the function type. */
+ bool function_type_required;
+ /* Function to handle this attribute. NODE points to the node to which
+ the attribute is to be applied. If a DECL, it should be modified in
+ place; if a TYPE, a copy should be created. NAME is the name of the
+ attribute (possibly with leading or trailing __). ARGS is the TREE_LIST
+ of the arguments (which may be NULL). FLAGS gives further information
+ about the context of the attribute. Afterwards, the attributes will
+ be added to the DECL_ATTRIBUTES or TYPE_ATTRIBUTES, as appropriate,
+ unless *NO_ADD_ATTRS is set to true (which should be done on error,
+ as well as in any other cases when the attributes should not be added
+ to the DECL or TYPE). Depending on FLAGS, any attributes to be
+ applied to another type or DECL later may be returned;
+ otherwise the return value should be NULL_TREE. This pointer may be
+ NULL if no special handling is required beyond the checks implied
+ by the rest of this structure. */
+ tree (*handler) (tree *node, tree name, tree args,
+ int flags, bool *no_add_attrs);
+ /* Specifies if attribute affects type's identity. */
+ bool affects_type_identity;
+ };
+
+ /* These functions allow a front-end to perform a manual layout of a
+ RECORD_TYPE. (For instance, if the placement of subsequent fields
+ depends on the placement of fields so far.) Begin by calling
+ start_record_layout. Then, call place_field for each of the
+ fields. Then, call finish_record_layout. See layout_type for the
+ default way in which these functions are used. */
+ typedef struct record_layout_info_s {
+ /* The RECORD_TYPE that we are laying out. */
+ tree t;
+ /* The offset into the record so far, in bytes, not including bits in
+ BITPOS. */
+ tree offset;
+ /* The last known alignment of SIZE. */
+ unsigned int offset_align;
+ /* The bit position within the last OFFSET_ALIGN bits, in bits. */
+ tree bitpos;
+ /* The alignment of the record so far, in bits. */
+ unsigned int record_align;
+ /* The alignment of the record so far, ignoring #pragma pack and
+ __attribute__ ((packed)), in bits. */
+ unsigned int unpacked_align;
+ /* The previous field laid out. */
+ tree prev_field;
+ /* The static variables (i.e., class variables, as opposed to
+ instance variables) encountered in T. */
+ vec<tree, va_gc> *pending_statics;
+ /* Bits remaining in the current alignment group */
+ int remaining_in_alignment;
+ /* True if we've seen a packed field that didn't have normal
+ alignment anyway. */
+ int packed_maybe_necessary;
+ } *record_layout_info;
+
+ /* Iterator for going through the function arguments. */
+ struct function_args_iterator {
+ tree next; /* TREE_LIST pointing to the next argument */
+ };
+
+ /* Structures to map from a tree to another tree. */
+ struct GTY(()) tree_map_base {
+ tree from;
+ };
+
+ struct GTY(()) tree_map {
+ struct tree_map_base base;
+ unsigned int hash;
+ tree to;
+ };
+
+ /* Map from a decl tree to another tree. */
+ struct GTY(()) tree_decl_map {
+ struct tree_map_base base;
+ tree to;
+ };
+
+ /* Map from a tree to an int. */
+ struct GTY(()) tree_int_map {
+ struct tree_map_base base;
+ unsigned int to;
+ };
+
+ /* Map from a tree to initialization/finalization priorities. */
+ struct GTY(()) tree_priority_map {
+ struct tree_map_base base;
+ priority_type init;
+ priority_type fini;
+ };
+
+ /* Map from a decl tree to a tree vector. */
+ struct GTY(()) tree_vec_map {
+ struct tree_map_base base;
+ vec<tree, va_gc> *to;
+ };
+
+ /* Abstract iterators for CALL_EXPRs. These static inline definitions
+ have to go towards the end of tree.h so that union tree_node is fully
+ defined by this point. */
+
+ /* Structure containing iterator state. */
+ struct call_expr_arg_iterator {
+ tree t; /* the call_expr */
+ int n; /* argument count */
+ int i; /* next argument index */
+ };
+
+ struct const_call_expr_arg_iterator {
+ const_tree t; /* the call_expr */
+ int n; /* argument count */
+ int i; /* next argument index */
+ };
+
+ /* The builtin_info structure holds the FUNCTION_DECL of the standard builtin
+ function, and a flag that says if the function is available implicitly, or
+ whether the user has to code explicit calls to __builtin_<xxx>. */
+ struct GTY(()) builtin_info_type {
+ tree decl[(int)END_BUILTINS];
+ bool implicit_p[(int)END_BUILTINS];
+ };
+
+
+ /*---------------------------------------------------------------------------
+ Global variables
+ ---------------------------------------------------------------------------*/
+ /* Matrix describing the structures contained in a given tree code. */
+ extern unsigned char tree_contains_struct[MAX_TREE_CODES][64];
+
+ /* Class of tree given its code. */
+ extern const enum tree_code_class tree_code_type[];
+
+ /* Each tree code class has an associated string representation.
+ These must correspond to the tree_code_class entries. */
+ extern const char *const tree_code_class_strings[];
+
+ /* Number of argument-words in each kind of tree-node. */
+ extern const unsigned char tree_code_length[];
+
+ /* Names of tree components. */
+ extern const char *const tree_code_name[];
+
+ /* Vector of all alias pairs for global symbols. */
+ extern GTY(()) vec<alias_pair, va_gc> *alias_pairs;
+
+ /* Names of all the built_in classes. */
+ extern const char *const built_in_class_names[BUILT_IN_LAST];
+
+ /* Names of all the built_in functions. */
+ extern const char * built_in_names[(int) END_BUILTINS];
+
+ /* Number of operands and names for each OMP_CLAUSE node. */
+ extern unsigned const char omp_clause_num_ops[];
+ extern const char * const omp_clause_code_name[];
+
+ /* A vector of all translation-units. */
+ extern GTY (()) vec<tree, va_gc> *all_translation_units;
+
+ /* Vector of standard trees used by the C compiler. */
+ extern GTY(()) tree global_trees[TI_MAX];
+
+ /* The standard C integer types. Use integer_type_kind to index into
+ this array. */
+ extern GTY(()) tree integer_types[itk_none];
+
+ /* Types used to represent sizes. */
+ extern GTY(()) tree sizetype_tab[(int) stk_type_kind_last];
+
+ /* Arrays for keeping track of tree node statistics. */
+ extern int tree_node_counts[];
+ extern int tree_node_sizes[];
+
+ /* True if we are in gimple form and the actions of the folders need to
+ be restricted. False if we are not in gimple form and folding is not
+ restricted to creating gimple expressions. */
+ extern bool in_gimple_form;
+
+ /* Functional interface to the builtin functions. */
+ extern GTY(()) builtin_info_type builtin_info;
+
+ /* If nonzero, an upper limit on alignment of structure fields, in bits, */
+ extern unsigned int maximum_field_alignment;
+
+ /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
+ Zero means allow extended lvalues. */
+ extern int pedantic_lvalues;
+
+ /* Points to the FUNCTION_DECL of the function whose body we are reading. */
+ extern GTY(()) tree current_function_decl;
+
+ /* Nonzero means a FUNC_BEGIN label was emitted. */
+ extern GTY(()) const char * current_function_func_begin_label;
+
+ #endif // GCC_TREE_CORE_H
#include "dumpfile.h"
#include "value-prof.h"
#include "predict.h"
+#include "wide-int-print.h"
+ #include <new> // For placement-new.
+
/* Local functions, macros and variables. */
static const char *op_symbol (const_tree);
static void pretty_print_string (pretty_printer *, const char*);
else
{
val.lattice_val = VARYING;
- val.mask = double_int_minus_one;
+ val.mask = -1;
}
}
- else if (is_gimple_assign (stmt)
- /* Value-returning GIMPLE_CALL statements assign to
- a variable, and are treated similarly to GIMPLE_ASSIGN. */
- || (is_gimple_call (stmt)
- && gimple_call_lhs (stmt) != NULL_TREE)
- || gimple_code (stmt) == GIMPLE_PHI)
+ else if (is_gimple_assign (stmt))
{
tree cst;
if (gimple_assign_single_p (stmt)
&& DR_STEP (newdr)
&& integer_zerop (DR_STEP (newdr)))
{
- dr = newdr;
- gather = true;
+ if (maybe_simd_lane_access)
+ {
+ tree off = DR_OFFSET (newdr);
+ STRIP_NOPS (off);
+ if (TREE_CODE (DR_INIT (newdr)) == INTEGER_CST
+ && TREE_CODE (off) == MULT_EXPR
- && host_integerp (TREE_OPERAND (off, 1), 1))
++ && tree_fits_uhwi_p (TREE_OPERAND (off, 1)))
+ {
+ tree step = TREE_OPERAND (off, 1);
+ off = TREE_OPERAND (off, 0);
+ STRIP_NOPS (off);
+ if (CONVERT_EXPR_P (off)
+ && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (off,
+ 0)))
+ < TYPE_PRECISION (TREE_TYPE (off)))
+ off = TREE_OPERAND (off, 0);
+ if (TREE_CODE (off) == SSA_NAME)
+ {
+ gimple def = SSA_NAME_DEF_STMT (off);
+ tree reft = TREE_TYPE (DR_REF (newdr));
+ if (gimple_call_internal_p (def)
+ && gimple_call_internal_fn (def)
+ == IFN_GOMP_SIMD_LANE)
+ {
+ tree arg = gimple_call_arg (def, 0);
+ gcc_assert (TREE_CODE (arg) == SSA_NAME);
+ arg = SSA_NAME_VAR (arg);
+ if (arg == loop->simduid
+ /* For now. */
+ && tree_int_cst_equal
+ (TYPE_SIZE_UNIT (reft),
+ step))
+ {
+ DR_OFFSET (newdr) = ssize_int (0);
+ DR_STEP (newdr) = step;
+ dr = newdr;
+ simd_lane_access = true;
+ }
+ }
+ }
+ }
+ }
+ if (!simd_lane_access && maybe_gather)
+ {
+ dr = newdr;
+ gather = true;
+ }
}
- else
+ if (!gather && !simd_lane_access)
free_data_ref (newdr);
}
#define CASE_FLT_FN(FN) case FN: case FN##F: case FN##L
#define CASE_FLT_FN_REENT(FN) case FN##_R: case FN##F_R: case FN##L_R
#define CASE_INT_FN(FN) case FN: case FN##L: case FN##LL: case FN##IMAX
- \f
- /* In an OMP_CLAUSE node. */
-
- /* Number of operands and names for each clause. */
- extern unsigned const char omp_clause_num_ops[];
- extern const char * const omp_clause_code_name[];
-
- /* Clause codes. Do not reorder, as this is used to index into the tables
- omp_clause_num_ops and omp_clause_code_name. */
- enum omp_clause_code
- {
- /* Clause zero is special-cased inside the parser
- (c_parser_omp_variable_list). */
- OMP_CLAUSE_ERROR = 0,
-
- /* OpenMP clause: private (variable_list). */
- OMP_CLAUSE_PRIVATE,
-
- /* OpenMP clause: shared (variable_list). */
- OMP_CLAUSE_SHARED,
-
- /* OpenMP clause: firstprivate (variable_list). */
- OMP_CLAUSE_FIRSTPRIVATE,
-
- /* OpenMP clause: lastprivate (variable_list). */
- OMP_CLAUSE_LASTPRIVATE,
-
- /* OpenMP clause: reduction (operator:variable_list).
- OMP_CLAUSE_REDUCTION_CODE: The tree_code of the operator.
- Operand 1: OMP_CLAUSE_REDUCTION_INIT: Stmt-list to initialize the var.
- Operand 2: OMP_CLAUSE_REDUCTION_MERGE: Stmt-list to merge private var
- into the shared one.
- Operand 3: OMP_CLAUSE_REDUCTION_PLACEHOLDER: A dummy VAR_DECL
- placeholder used in OMP_CLAUSE_REDUCTION_{INIT,MERGE}. */
- OMP_CLAUSE_REDUCTION,
-
- /* OpenMP clause: copyin (variable_list). */
- OMP_CLAUSE_COPYIN,
-
- /* OpenMP clause: copyprivate (variable_list). */
- OMP_CLAUSE_COPYPRIVATE,
-
- /* OpenMP clause: if (scalar-expression). */
- OMP_CLAUSE_IF,
-
- /* OpenMP clause: num_threads (integer-expression). */
- OMP_CLAUSE_NUM_THREADS,
-
- /* OpenMP clause: schedule. */
- OMP_CLAUSE_SCHEDULE,
-
- /* OpenMP clause: nowait. */
- OMP_CLAUSE_NOWAIT,
-
- /* OpenMP clause: ordered. */
- OMP_CLAUSE_ORDERED,
-
- /* OpenMP clause: default. */
- OMP_CLAUSE_DEFAULT,
-
- /* OpenMP clause: collapse (constant-integer-expression). */
- OMP_CLAUSE_COLLAPSE,
-
- /* OpenMP clause: untied. */
- OMP_CLAUSE_UNTIED,
-
- /* OpenMP clause: final (scalar-expression). */
- OMP_CLAUSE_FINAL,
-
- /* OpenMP clause: mergeable. */
- OMP_CLAUSE_MERGEABLE
- };
- \f
- /* The definition of tree nodes fills the next several pages. */
-
- /* A tree node can represent a data type, a variable, an expression
- or a statement. Each node has a TREE_CODE which says what kind of
- thing it represents. Some common codes are:
- INTEGER_TYPE -- represents a type of integers.
- ARRAY_TYPE -- represents a type of pointer.
- VAR_DECL -- represents a declared variable.
- INTEGER_CST -- represents a constant integer value.
- PLUS_EXPR -- represents a sum (an expression).
-
- As for the contents of a tree node: there are some fields
- that all nodes share. Each TREE_CODE has various special-purpose
- fields as well. The fields of a node are never accessed directly,
- always through accessor macros. */
-
- /* Every kind of tree node starts with this structure,
- so all nodes have these fields.
-
- See the accessor macros, defined below, for documentation of the
- fields, and the table below which connects the fields and the
- accessor macros. */
-
- struct GTY(()) tree_base {
- ENUM_BITFIELD(tree_code) code : 16;
-
- unsigned side_effects_flag : 1;
- unsigned constant_flag : 1;
- unsigned addressable_flag : 1;
- unsigned volatile_flag : 1;
- unsigned readonly_flag : 1;
- unsigned asm_written_flag: 1;
- unsigned nowarning_flag : 1;
- unsigned visited : 1;
-
- unsigned used_flag : 1;
- unsigned nothrow_flag : 1;
- unsigned static_flag : 1;
- unsigned public_flag : 1;
- unsigned private_flag : 1;
- unsigned protected_flag : 1;
- unsigned deprecated_flag : 1;
- unsigned default_def_flag : 1;
-
- union {
- /* The bits in the following structure should only be used with
- accessor macros that constrain inputs with tree checking. */
- struct {
- unsigned lang_flag_0 : 1;
- unsigned lang_flag_1 : 1;
- unsigned lang_flag_2 : 1;
- unsigned lang_flag_3 : 1;
- unsigned lang_flag_4 : 1;
- unsigned lang_flag_5 : 1;
- unsigned lang_flag_6 : 1;
- unsigned saturating_flag : 1;
-
- unsigned unsigned_flag : 1;
- unsigned packed_flag : 1;
- unsigned user_align : 1;
- unsigned nameless_flag : 1;
- unsigned spare0 : 4;
-
- unsigned spare1 : 8;
-
- /* This field is only used with TREE_TYPE nodes; the only reason it is
- present in tree_base instead of tree_type is to save space. The size
- of the field must be large enough to hold addr_space_t values. */
- unsigned address_space : 8;
- } bits;
- /* The following fields are present in tree_base to save space. The
- nodes using them do not require any of the flags above and so can
- make better use of the 4-byte sized word. */
- /* VEC length. This field is only used with TREE_VEC and TREE_INT_CST. */
- int length;
- /* SSA version number. This field is only used with SSA_NAME. */
- unsigned int version;
- } GTY((skip(""))) u;
- };
-
- /* The following table lists the uses of each of the above flags and
- for which types of nodes they are defined.
-
- addressable_flag:
-
- TREE_ADDRESSABLE in
- VAR_DECL, PARM_DECL, RESULT_DECL, FUNCTION_DECL, LABEL_DECL
- SSA_NAME
- all types
- CONSTRUCTOR, IDENTIFIER_NODE
- STMT_EXPR
-
- CALL_EXPR_TAILCALL in
- CALL_EXPR
-
- CASE_LOW_SEEN in
- CASE_LABEL_EXPR
-
- PREDICT_EXPR_OUTCOME in
- PREDICT_EXPR
-
- static_flag:
-
- TREE_STATIC in
- VAR_DECL, FUNCTION_DECL
- CONSTRUCTOR
-
- TREE_NO_TRAMPOLINE in
- ADDR_EXPR
-
- BINFO_VIRTUAL_P in
- TREE_BINFO
-
- TREE_SYMBOL_REFERENCED in
- IDENTIFIER_NODE
-
- CLEANUP_EH_ONLY in
- TARGET_EXPR, WITH_CLEANUP_EXPR
-
- TRY_CATCH_IS_CLEANUP in
- TRY_CATCH_EXPR
-
- ASM_INPUT_P in
- ASM_EXPR
-
- TYPE_REF_CAN_ALIAS_ALL in
- POINTER_TYPE, REFERENCE_TYPE
-
- CASE_HIGH_SEEN in
- CASE_LABEL_EXPR
-
- ENUM_IS_SCOPED in
- ENUMERAL_TYPE
-
- TRANSACTION_EXPR_OUTER in
- TRANSACTION_EXPR
-
- public_flag:
-
- TREE_OVERFLOW in
- INTEGER_CST, REAL_CST, COMPLEX_CST, VECTOR_CST
-
- TREE_PUBLIC in
- VAR_DECL, FUNCTION_DECL
- IDENTIFIER_NODE
-
- ASM_VOLATILE_P in
- ASM_EXPR
-
- CALL_EXPR_VA_ARG_PACK in
- CALL_EXPR
-
- TYPE_CACHED_VALUES_P in
- all types
-
- SAVE_EXPR_RESOLVED_P in
- SAVE_EXPR
-
- OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE in
- OMP_CLAUSE_LASTPRIVATE
-
- OMP_CLAUSE_PRIVATE_DEBUG in
- OMP_CLAUSE_PRIVATE
-
- TRANSACTION_EXPR_RELAXED in
- TRANSACTION_EXPR
-
- private_flag:
-
- TREE_PRIVATE in
- all decls
-
- CALL_EXPR_RETURN_SLOT_OPT in
- CALL_EXPR
-
- OMP_SECTION_LAST in
- OMP_SECTION
-
- OMP_PARALLEL_COMBINED in
- OMP_PARALLEL
-
- OMP_CLAUSE_PRIVATE_OUTER_REF in
- OMP_CLAUSE_PRIVATE
-
- TYPE_REF_IS_RVALUE in
- REFERENCE_TYPE
-
- ENUM_IS_OPAQUE in
- ENUMERAL_TYPE
-
- protected_flag:
-
- TREE_PROTECTED in
- BLOCK
- all decls
-
- CALL_FROM_THUNK_P and
- CALL_ALLOCA_FOR_VAR_P in
- CALL_EXPR
-
- side_effects_flag:
-
- TREE_SIDE_EFFECTS in
- all expressions
- all decls
- all constants
-
- FORCED_LABEL in
- LABEL_DECL
-
- volatile_flag:
-
- TREE_THIS_VOLATILE in
- all expressions
- all decls
-
- TYPE_VOLATILE in
- all types
-
- readonly_flag:
-
- TREE_READONLY in
- all expressions
- all decls
-
- TYPE_READONLY in
- all types
-
- constant_flag:
-
- TREE_CONSTANT in
- all expressions
- all decls
- all constants
-
- TYPE_SIZES_GIMPLIFIED in
- all types
-
- unsigned_flag:
-
- TYPE_UNSIGNED in
- all types
-
- DECL_UNSIGNED in
- all decls
-
- asm_written_flag:
-
- TREE_ASM_WRITTEN in
- VAR_DECL, FUNCTION_DECL, TYPE_DECL
- RECORD_TYPE, UNION_TYPE, QUAL_UNION_TYPE
- BLOCK, STRING_CST
-
- SSA_NAME_OCCURS_IN_ABNORMAL_PHI in
- SSA_NAME
-
- used_flag:
-
- TREE_USED in
- all expressions
- all decls
- IDENTIFIER_NODE
-
- nothrow_flag:
-
- TREE_NOTHROW in
- CALL_EXPR
- FUNCTION_DECL
-
- TYPE_ALIGN_OK in
- all types
-
- TREE_THIS_NOTRAP in
- INDIRECT_REF, MEM_REF, TARGET_MEM_REF, ARRAY_REF, ARRAY_RANGE_REF
-
- SSA_NAME_IN_FREELIST in
- SSA_NAME
-
- deprecated_flag:
-
- TREE_DEPRECATED in
- all decls
- all types
-
- IDENTIFIER_TRANSPARENT_ALIAS in
- IDENTIFIER_NODE
-
- visited:
-
- TREE_VISITED in
- all trees (used liberally by many passes)
-
- saturating_flag:
-
- TYPE_SATURATING in
- all types
-
- VAR_DECL_IS_VIRTUAL_OPERAND in
- VAR_DECL
-
- nowarning_flag:
-
- TREE_NO_WARNING in
- all expressions
- all decls
-
- TYPE_ARTIFICIAL in
- all types
-
- default_def_flag:
-
- TYPE_VECTOR_OPAQUE in
- VECTOR_TYPE
-
- SSA_NAME_IS_DEFAULT_DEF in
- SSA_NAME
-
- DECL_NONLOCAL_FRAME in
- VAR_DECL
- */
-
- struct GTY(()) tree_typed {
- struct tree_base base;
- tree type;
- };
-
- struct GTY(()) tree_common {
- struct tree_typed typed;
- tree chain;
- };
-
- #undef DEFTREESTRUCT
- #define DEFTREESTRUCT(ENUM, NAME) ENUM,
- enum tree_node_structure_enum {
- #include "treestruct.def"
- LAST_TS_ENUM
- };
- #undef DEFTREESTRUCT
+#define NULL_TREE (tree) NULL
+
/* Define accessors for the fields that all tree nodes have
(though some fields are not used for all kinds of nodes). */
\f
/* Define additional fields and accessors for nodes representing constants. */
-/* In an INTEGER_CST node. These two together make a 2-word integer.
- If the data type is signed, the value is sign-extended to 2 words
- even though not all of them may really be in use.
- In an unsigned constant shorter than 2 words, the extra bits are 0. */
-#define TREE_INT_CST(NODE) (INTEGER_CST_CHECK (NODE)->int_cst.int_cst)
-#define TREE_INT_CST_LOW(NODE) (TREE_INT_CST (NODE).low)
-#define TREE_INT_CST_HIGH(NODE) (TREE_INT_CST (NODE).high)
-
#define INT_CST_LT(A, B) \
- (TREE_INT_CST_HIGH (A) < TREE_INT_CST_HIGH (B) \
- || (TREE_INT_CST_HIGH (A) == TREE_INT_CST_HIGH (B) \
- && TREE_INT_CST_LOW (A) < TREE_INT_CST_LOW (B)))
+ (wide_int::lts_p (A, B))
-#define INT_CST_LT_UNSIGNED(A, B) \
- (((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (A) \
- < (unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (B)) \
- || (((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (A) \
- == (unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (B)) \
- && TREE_INT_CST_LOW (A) < TREE_INT_CST_LOW (B)))
+#define INT_CST_LT_UNSIGNED(A, B) \
+ (wide_int::ltu_p (A, B))
+
+#define TREE_INT_CST_NUNITS(NODE) (INTEGER_CST_CHECK (NODE)->base.u.length)
+#define TREE_INT_CST_ELT(NODE, I) TREE_INT_CST_ELT_CHECK (NODE, I)
- struct GTY(()) tree_int_cst {
- struct tree_typed typed;
- HOST_WIDE_INT val[1];
- };
-
- /* In a REAL_CST node. struct real_value is an opaque entity, with
- manipulators defined in real.h. We don't want tree.h depending on
- real.h and transitively on tm.h. */
- struct real_value;
-
#define TREE_REAL_CST_PTR(NODE) (REAL_CST_CHECK (NODE)->real_cst.real_cst_ptr)
#define TREE_REAL_CST(NODE) (*TREE_REAL_CST_PTR (NODE))
#define long_long_unsigned_type_node integer_types[itk_unsigned_long_long]
#define int128_integer_type_node integer_types[itk_int128]
#define int128_unsigned_type_node integer_types[itk_unsigned_int128]
- \f
- /* A pointer-to-function member type looks like:
-
- struct {
- __P __pfn;
- ptrdiff_t __delta;
- };
- If __pfn is NULL, it is a NULL pointer-to-member-function.
-
- (Because the vtable is always the first thing in the object, we
- don't need its offset.) If the function is virtual, then PFN is
- one plus twice the index into the vtable; otherwise, it is just a
- pointer to the function.
-
- Unfortunately, using the lowest bit of PFN doesn't work in
- architectures that don't impose alignment requirements on function
- addresses, or that use the lowest bit to tell one ISA from another,
- for example. For such architectures, we use the lowest bit of
- DELTA instead of the lowest bit of the PFN, and DELTA will be
- multiplied by 2. */
-#define NULL_TREE (tree) NULL
--
- enum ptrmemfunc_vbit_where_t
- {
- ptrmemfunc_vbit_in_pfn,
- ptrmemfunc_vbit_in_delta
- };
- \f
/* True if NODE is an erroneous expression. */
#define error_operand_p(NODE) \
{
unsigned HOST_WIDE_INT size, rounded;
- size = tree_low_cst (DECL_SIZE_UNIT (decl), 1);
+ size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
rounded = size;
- if (flag_asan && asan_protect_global (decl))
+ if ((flag_sanitize & SANITIZE_ADDRESS) && asan_protect_global (decl))
size += asan_red_zone_size (size);
/* Don't allocate zero bytes of common,
{
decl = SYMBOL_REF_DECL (symbol);
alignment = get_variable_align (decl);
- size = tree_low_cst (DECL_SIZE_UNIT (decl), 1);
+ size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
- if (flag_asan && asan_protect_global (decl))
+ if ((flag_sanitize & SANITIZE_ADDRESS)
+ && asan_protect_global (decl))
{
size += asan_red_zone_size (size);
alignment = MAX (alignment,
HOST_WIDE_INT size;
decl = SYMBOL_REF_DECL (symbol);
assemble_variable_contents (decl, XSTR (symbol, 0), false);
- size = tree_low_cst (DECL_SIZE_UNIT (decl), 1);
+ size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
offset += size;
- if (flag_asan && asan_protect_global (decl))
+ if ((flag_sanitize & SANITIZE_ADDRESS)
+ && asan_protect_global (decl))
{
size = asan_red_zone_size (size);
assemble_zeros (size);