1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
27 It is intended to be language-independent but can occasionally
28 calls language-dependent routines. */
32 #include "coretypes.h"
37 #include "tree-pass.h"
40 #include "diagnostic.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
47 #include "toplev.h" /* get_random_seed */
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "internal-fn.h"
54 #include "gimple-iterator.h"
58 #include "langhooks-def.h"
59 #include "tree-diagnostic.h"
62 #include "print-tree.h"
63 #include "ipa-utils.h"
66 /* Tree code classes. */
68 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
69 #define END_OF_BASE_TREE_CODES tcc_exceptional,
71 const enum tree_code_class tree_code_type[] = {
72 #include "all-tree.def"
76 #undef END_OF_BASE_TREE_CODES
78 /* Table indexed by tree code giving number of expression
79 operands beyond the fixed part of the node structure.
80 Not used for types or decls. */
82 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
83 #define END_OF_BASE_TREE_CODES 0,
85 const unsigned char tree_code_length[] = {
86 #include "all-tree.def"
90 #undef END_OF_BASE_TREE_CODES
92 /* Names of tree components.
93 Used for printing out the tree and error messages. */
94 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
95 #define END_OF_BASE_TREE_CODES "@dummy",
97 static const char *const tree_code_name[] = {
98 #include "all-tree.def"
102 #undef END_OF_BASE_TREE_CODES
104 /* Each tree code class has an associated string representation.
105 These must correspond to the tree_code_class entries. */
107 const char *const tree_code_class_strings[] =
122 /* obstack.[ch] explicitly declined to prototype this. */
123 extern int _obstack_allocated_p (struct obstack *h, void *obj);
125 /* Statistics-gathering stuff. */
127 static int tree_code_counts[MAX_TREE_CODES];
128 int tree_node_counts[(int) all_kinds];
129 int tree_node_sizes[(int) all_kinds];
131 /* Keep in sync with tree.h:enum tree_node_kind. */
132 static const char * const tree_node_kind_names[] = {
151 /* Unique id for next decl created. */
152 static GTY(()) int next_decl_uid;
153 /* Unique id for next type created. */
154 static GTY(()) unsigned next_type_uid = 1;
155 /* Unique id for next debug decl created. Use negative numbers,
156 to catch erroneous uses. */
157 static GTY(()) int next_debug_decl_uid;
159 /* Since we cannot rehash a type after it is in the table, we have to
160 keep the hash code. */
162 struct GTY((for_user)) type_hash {
167 /* Initial size of the hash table (rounded to next prime). */
168 #define TYPE_HASH_INITIAL_SIZE 1000
170 struct type_cache_hasher : ggc_cache_ptr_hash<type_hash>
172 static hashval_t hash (type_hash *t) { return t->hash; }
173 static bool equal (type_hash *a, type_hash *b);
176 keep_cache_entry (type_hash *&t)
178 return ggc_marked_p (t->type);
182 /* Now here is the hash table. When recording a type, it is added to
183 the slot whose index is the hash code. Note that the hash table is
184 used for several kinds of types (function types, array types and
185 array index range types, for now). While all these live in the
186 same table, they are completely independent, and the hash code is
187 computed differently for each of these. */
189 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
191 /* Hash table and temporary node for larger integer const values. */
192 static GTY (()) tree int_cst_node;
194 struct int_cst_hasher : ggc_cache_ptr_hash<tree_node>
196 static hashval_t hash (tree t);
197 static bool equal (tree x, tree y);
200 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
202 /* Hash table for optimization flags and target option flags. Use the same
203 hash table for both sets of options. Nodes for building the current
204 optimization and target option nodes. The assumption is most of the time
205 the options created will already be in the hash table, so we avoid
206 allocating and freeing up a node repeatably. */
207 static GTY (()) tree cl_optimization_node;
208 static GTY (()) tree cl_target_option_node;
210 struct cl_option_hasher : ggc_cache_ptr_hash<tree_node>
212 static hashval_t hash (tree t);
213 static bool equal (tree x, tree y);
216 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
218 /* General tree->tree mapping structure for use in hash tables. */
222 hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
225 hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
227 struct tree_vec_map_cache_hasher : ggc_cache_ptr_hash<tree_vec_map>
229 static hashval_t hash (tree_vec_map *m) { return DECL_UID (m->base.from); }
232 equal (tree_vec_map *a, tree_vec_map *b)
234 return a->base.from == b->base.from;
238 keep_cache_entry (tree_vec_map *&m)
240 return ggc_marked_p (m->base.from);
245 hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
247 static void set_type_quals (tree, int);
248 static void print_type_hash_statistics (void);
249 static void print_debug_expr_statistics (void);
250 static void print_value_expr_statistics (void);
252 tree global_trees[TI_MAX];
253 tree integer_types[itk_none];
255 bool int_n_enabled_p[NUM_INT_N_ENTS];
256 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
258 unsigned char tree_contains_struct[MAX_TREE_CODES][64];
260 /* Number of operands for each OpenMP clause. */
261 unsigned const char omp_clause_num_ops[] =
263 0, /* OMP_CLAUSE_ERROR */
264 1, /* OMP_CLAUSE_PRIVATE */
265 1, /* OMP_CLAUSE_SHARED */
266 1, /* OMP_CLAUSE_FIRSTPRIVATE */
267 2, /* OMP_CLAUSE_LASTPRIVATE */
268 5, /* OMP_CLAUSE_REDUCTION */
269 1, /* OMP_CLAUSE_COPYIN */
270 1, /* OMP_CLAUSE_COPYPRIVATE */
271 3, /* OMP_CLAUSE_LINEAR */
272 2, /* OMP_CLAUSE_ALIGNED */
273 1, /* OMP_CLAUSE_DEPEND */
274 1, /* OMP_CLAUSE_UNIFORM */
275 1, /* OMP_CLAUSE_TO_DECLARE */
276 1, /* OMP_CLAUSE_LINK */
277 2, /* OMP_CLAUSE_FROM */
278 2, /* OMP_CLAUSE_TO */
279 2, /* OMP_CLAUSE_MAP */
280 1, /* OMP_CLAUSE_USE_DEVICE_PTR */
281 1, /* OMP_CLAUSE_IS_DEVICE_PTR */
282 2, /* OMP_CLAUSE__CACHE_ */
283 2, /* OMP_CLAUSE_GANG */
284 1, /* OMP_CLAUSE_ASYNC */
285 1, /* OMP_CLAUSE_WAIT */
286 0, /* OMP_CLAUSE_AUTO */
287 0, /* OMP_CLAUSE_SEQ */
288 1, /* OMP_CLAUSE__LOOPTEMP_ */
289 1, /* OMP_CLAUSE_IF */
290 1, /* OMP_CLAUSE_NUM_THREADS */
291 1, /* OMP_CLAUSE_SCHEDULE */
292 0, /* OMP_CLAUSE_NOWAIT */
293 1, /* OMP_CLAUSE_ORDERED */
294 0, /* OMP_CLAUSE_DEFAULT */
295 3, /* OMP_CLAUSE_COLLAPSE */
296 0, /* OMP_CLAUSE_UNTIED */
297 1, /* OMP_CLAUSE_FINAL */
298 0, /* OMP_CLAUSE_MERGEABLE */
299 1, /* OMP_CLAUSE_DEVICE */
300 1, /* OMP_CLAUSE_DIST_SCHEDULE */
301 0, /* OMP_CLAUSE_INBRANCH */
302 0, /* OMP_CLAUSE_NOTINBRANCH */
303 1, /* OMP_CLAUSE_NUM_TEAMS */
304 1, /* OMP_CLAUSE_THREAD_LIMIT */
305 0, /* OMP_CLAUSE_PROC_BIND */
306 1, /* OMP_CLAUSE_SAFELEN */
307 1, /* OMP_CLAUSE_SIMDLEN */
308 0, /* OMP_CLAUSE_FOR */
309 0, /* OMP_CLAUSE_PARALLEL */
310 0, /* OMP_CLAUSE_SECTIONS */
311 0, /* OMP_CLAUSE_TASKGROUP */
312 1, /* OMP_CLAUSE_PRIORITY */
313 1, /* OMP_CLAUSE_GRAINSIZE */
314 1, /* OMP_CLAUSE_NUM_TASKS */
315 0, /* OMP_CLAUSE_NOGROUP */
316 0, /* OMP_CLAUSE_THREADS */
317 0, /* OMP_CLAUSE_SIMD */
318 1, /* OMP_CLAUSE_HINT */
319 0, /* OMP_CLAUSE_DEFALTMAP */
320 1, /* OMP_CLAUSE__SIMDUID_ */
321 0, /* OMP_CLAUSE__SIMT_ */
322 1, /* OMP_CLAUSE__CILK_FOR_COUNT_ */
323 0, /* OMP_CLAUSE_INDEPENDENT */
324 1, /* OMP_CLAUSE_WORKER */
325 1, /* OMP_CLAUSE_VECTOR */
326 1, /* OMP_CLAUSE_NUM_GANGS */
327 1, /* OMP_CLAUSE_NUM_WORKERS */
328 1, /* OMP_CLAUSE_VECTOR_LENGTH */
329 3, /* OMP_CLAUSE_TILE */
330 2, /* OMP_CLAUSE__GRIDDIM_ */
333 const char * const omp_clause_code_name[] =
406 /* Return the tree node structure used by tree code CODE. */
408 static inline enum tree_node_structure_enum
409 tree_node_structure_for_code (enum tree_code code)
411 switch (TREE_CODE_CLASS (code))
413 case tcc_declaration:
418 return TS_FIELD_DECL;
424 return TS_LABEL_DECL;
426 return TS_RESULT_DECL;
427 case DEBUG_EXPR_DECL:
430 return TS_CONST_DECL;
434 return TS_FUNCTION_DECL;
435 case TRANSLATION_UNIT_DECL:
436 return TS_TRANSLATION_UNIT_DECL;
438 return TS_DECL_NON_COMMON;
442 return TS_TYPE_NON_COMMON;
451 default: /* tcc_constant and tcc_exceptional */
456 /* tcc_constant cases. */
457 case VOID_CST: return TS_TYPED;
458 case INTEGER_CST: return TS_INT_CST;
459 case REAL_CST: return TS_REAL_CST;
460 case FIXED_CST: return TS_FIXED_CST;
461 case COMPLEX_CST: return TS_COMPLEX;
462 case VECTOR_CST: return TS_VECTOR;
463 case STRING_CST: return TS_STRING;
464 /* tcc_exceptional cases. */
465 case ERROR_MARK: return TS_COMMON;
466 case IDENTIFIER_NODE: return TS_IDENTIFIER;
467 case TREE_LIST: return TS_LIST;
468 case TREE_VEC: return TS_VEC;
469 case SSA_NAME: return TS_SSA_NAME;
470 case PLACEHOLDER_EXPR: return TS_COMMON;
471 case STATEMENT_LIST: return TS_STATEMENT_LIST;
472 case BLOCK: return TS_BLOCK;
473 case CONSTRUCTOR: return TS_CONSTRUCTOR;
474 case TREE_BINFO: return TS_BINFO;
475 case OMP_CLAUSE: return TS_OMP_CLAUSE;
476 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
477 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
485 /* Initialize tree_contains_struct to describe the hierarchy of tree
489 initialize_tree_contains_struct (void)
493 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
496 enum tree_node_structure_enum ts_code;
498 code = (enum tree_code) i;
499 ts_code = tree_node_structure_for_code (code);
501 /* Mark the TS structure itself. */
502 tree_contains_struct[code][ts_code] = 1;
504 /* Mark all the structures that TS is derived from. */
509 case TS_OPTIMIZATION:
510 case TS_TARGET_OPTION:
524 case TS_STATEMENT_LIST:
525 MARK_TS_TYPED (code);
529 case TS_DECL_MINIMAL:
535 MARK_TS_COMMON (code);
538 case TS_TYPE_WITH_LANG_SPECIFIC:
539 MARK_TS_TYPE_COMMON (code);
542 case TS_TYPE_NON_COMMON:
543 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
547 MARK_TS_DECL_MINIMAL (code);
552 MARK_TS_DECL_COMMON (code);
555 case TS_DECL_NON_COMMON:
556 MARK_TS_DECL_WITH_VIS (code);
559 case TS_DECL_WITH_VIS:
563 MARK_TS_DECL_WRTL (code);
567 MARK_TS_DECL_COMMON (code);
571 MARK_TS_DECL_WITH_VIS (code);
575 case TS_FUNCTION_DECL:
576 MARK_TS_DECL_NON_COMMON (code);
579 case TS_TRANSLATION_UNIT_DECL:
580 MARK_TS_DECL_COMMON (code);
588 /* Basic consistency checks for attributes used in fold. */
589 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
590 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
591 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
592 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
593 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
594 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
595 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
596 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
597 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
598 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
599 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
600 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
601 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
602 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
603 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
604 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
605 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
606 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
607 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
608 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
609 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
610 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
611 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
612 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
613 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
614 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
615 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
616 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
617 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
618 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
619 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
620 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
621 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
622 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
623 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
624 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
625 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
626 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
627 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
628 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
637 /* Initialize the hash table of types. */
639 = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
642 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
645 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
647 int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
649 int_cst_node = make_int_cst (1, 1);
651 cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
653 cl_optimization_node = make_node (OPTIMIZATION_NODE);
654 cl_target_option_node = make_node (TARGET_OPTION_NODE);
656 /* Initialize the tree_contains_struct array. */
657 initialize_tree_contains_struct ();
658 lang_hooks.init_ts ();
662 /* The name of the object as the assembler will see it (but before any
663 translations made by ASM_OUTPUT_LABELREF). Often this is the same
664 as DECL_NAME. It is an IDENTIFIER_NODE. */
666 decl_assembler_name (tree decl)
668 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
669 lang_hooks.set_decl_assembler_name (decl);
670 return DECL_WITH_VIS_CHECK (decl)->decl_with_vis.assembler_name;
673 /* When the target supports COMDAT groups, this indicates which group the
674 DECL is associated with. This can be either an IDENTIFIER_NODE or a
675 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
677 decl_comdat_group (const_tree node)
679 struct symtab_node *snode = symtab_node::get (node);
682 return snode->get_comdat_group ();
685 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
687 decl_comdat_group_id (const_tree node)
689 struct symtab_node *snode = symtab_node::get (node);
692 return snode->get_comdat_group_id ();
695 /* When the target supports named section, return its name as IDENTIFIER_NODE
696 or NULL if it is in no section. */
698 decl_section_name (const_tree node)
700 struct symtab_node *snode = symtab_node::get (node);
703 return snode->get_section ();
706 /* Set section name of NODE to VALUE (that is expected to be
709 set_decl_section_name (tree node, const char *value)
711 struct symtab_node *snode;
715 snode = symtab_node::get (node);
719 else if (VAR_P (node))
720 snode = varpool_node::get_create (node);
722 snode = cgraph_node::get_create (node);
723 snode->set_section (value);
726 /* Return TLS model of a variable NODE. */
728 decl_tls_model (const_tree node)
730 struct varpool_node *snode = varpool_node::get (node);
732 return TLS_MODEL_NONE;
733 return snode->tls_model;
736 /* Set TLS model of variable NODE to MODEL. */
738 set_decl_tls_model (tree node, enum tls_model model)
740 struct varpool_node *vnode;
742 if (model == TLS_MODEL_NONE)
744 vnode = varpool_node::get (node);
749 vnode = varpool_node::get_create (node);
750 vnode->tls_model = model;
753 /* Compute the number of bytes occupied by a tree with code CODE.
754 This function cannot be used for nodes that have variable sizes,
755 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
757 tree_code_size (enum tree_code code)
759 switch (TREE_CODE_CLASS (code))
761 case tcc_declaration: /* A decl node */
766 return sizeof (struct tree_field_decl);
768 return sizeof (struct tree_parm_decl);
770 return sizeof (struct tree_var_decl);
772 return sizeof (struct tree_label_decl);
774 return sizeof (struct tree_result_decl);
776 return sizeof (struct tree_const_decl);
778 return sizeof (struct tree_type_decl);
780 return sizeof (struct tree_function_decl);
781 case DEBUG_EXPR_DECL:
782 return sizeof (struct tree_decl_with_rtl);
783 case TRANSLATION_UNIT_DECL:
784 return sizeof (struct tree_translation_unit_decl);
788 return sizeof (struct tree_decl_non_common);
790 return lang_hooks.tree_size (code);
794 case tcc_type: /* a type node */
795 return sizeof (struct tree_type_non_common);
797 case tcc_reference: /* a reference */
798 case tcc_expression: /* an expression */
799 case tcc_statement: /* an expression with side effects */
800 case tcc_comparison: /* a comparison expression */
801 case tcc_unary: /* a unary arithmetic expression */
802 case tcc_binary: /* a binary arithmetic expression */
803 return (sizeof (struct tree_exp)
804 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
806 case tcc_constant: /* a constant */
809 case VOID_CST: return sizeof (struct tree_typed);
810 case INTEGER_CST: gcc_unreachable ();
811 case REAL_CST: return sizeof (struct tree_real_cst);
812 case FIXED_CST: return sizeof (struct tree_fixed_cst);
813 case COMPLEX_CST: return sizeof (struct tree_complex);
814 case VECTOR_CST: return sizeof (struct tree_vector);
815 case STRING_CST: gcc_unreachable ();
817 return lang_hooks.tree_size (code);
820 case tcc_exceptional: /* something random, like an identifier. */
823 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
824 case TREE_LIST: return sizeof (struct tree_list);
827 case PLACEHOLDER_EXPR: return sizeof (struct tree_common);
830 case OMP_CLAUSE: gcc_unreachable ();
832 case SSA_NAME: return sizeof (struct tree_ssa_name);
834 case STATEMENT_LIST: return sizeof (struct tree_statement_list);
835 case BLOCK: return sizeof (struct tree_block);
836 case CONSTRUCTOR: return sizeof (struct tree_constructor);
837 case OPTIMIZATION_NODE: return sizeof (struct tree_optimization_option);
838 case TARGET_OPTION_NODE: return sizeof (struct tree_target_option);
841 return lang_hooks.tree_size (code);
849 /* Compute the number of bytes occupied by NODE. This routine only
850 looks at TREE_CODE, except for those nodes that have variable sizes. */
852 tree_size (const_tree node)
854 const enum tree_code code = TREE_CODE (node);
858 return (sizeof (struct tree_int_cst)
859 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
862 return (offsetof (struct tree_binfo, base_binfos)
864 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
867 return (sizeof (struct tree_vec)
868 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
871 return (sizeof (struct tree_vector)
872 + (TYPE_VECTOR_SUBPARTS (TREE_TYPE (node)) - 1) * sizeof (tree));
875 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
878 return (sizeof (struct tree_omp_clause)
879 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
883 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
884 return (sizeof (struct tree_exp)
885 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
887 return tree_code_size (code);
891 /* Record interesting allocation statistics for a tree node with CODE
895 record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED,
896 size_t length ATTRIBUTE_UNUSED)
898 enum tree_code_class type = TREE_CODE_CLASS (code);
901 if (!GATHER_STATISTICS)
906 case tcc_declaration: /* A decl node */
910 case tcc_type: /* a type node */
914 case tcc_statement: /* an expression with side effects */
918 case tcc_reference: /* a reference */
922 case tcc_expression: /* an expression */
923 case tcc_comparison: /* a comparison expression */
924 case tcc_unary: /* a unary arithmetic expression */
925 case tcc_binary: /* a binary arithmetic expression */
929 case tcc_constant: /* a constant */
933 case tcc_exceptional: /* something random, like an identifier. */
936 case IDENTIFIER_NODE:
949 kind = ssa_name_kind;
961 kind = omp_clause_kind;
978 tree_code_counts[(int) code]++;
979 tree_node_counts[(int) kind]++;
980 tree_node_sizes[(int) kind] += length;
983 /* Allocate and return a new UID from the DECL_UID namespace. */
986 allocate_decl_uid (void)
988 return next_decl_uid++;
991 /* Return a newly allocated node of code CODE. For decl and type
992 nodes, some other fields are initialized. The rest of the node is
993 initialized to zero. This function cannot be used for TREE_VEC,
994 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
997 Achoo! I got a code in the node. */
1000 make_node_stat (enum tree_code code MEM_STAT_DECL)
1003 enum tree_code_class type = TREE_CODE_CLASS (code);
1004 size_t length = tree_code_size (code);
1006 record_node_allocation_statistics (code, length);
1008 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1009 TREE_SET_CODE (t, code);
1014 TREE_SIDE_EFFECTS (t) = 1;
1017 case tcc_declaration:
1018 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1020 if (code == FUNCTION_DECL)
1022 SET_DECL_ALIGN (t, FUNCTION_ALIGNMENT (FUNCTION_BOUNDARY));
1023 SET_DECL_MODE (t, FUNCTION_MODE);
1026 SET_DECL_ALIGN (t, 1);
1028 DECL_SOURCE_LOCATION (t) = input_location;
1029 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
1030 DECL_UID (t) = --next_debug_decl_uid;
1033 DECL_UID (t) = allocate_decl_uid ();
1034 SET_DECL_PT_UID (t, -1);
1036 if (TREE_CODE (t) == LABEL_DECL)
1037 LABEL_DECL_UID (t) = -1;
1042 TYPE_UID (t) = next_type_uid++;
1043 SET_TYPE_ALIGN (t, BITS_PER_UNIT);
1044 TYPE_USER_ALIGN (t) = 0;
1045 TYPE_MAIN_VARIANT (t) = t;
1046 TYPE_CANONICAL (t) = t;
1048 /* Default to no attributes for type, but let target change that. */
1049 TYPE_ATTRIBUTES (t) = NULL_TREE;
1050 targetm.set_default_type_attributes (t);
1052 /* We have not yet computed the alias set for this type. */
1053 TYPE_ALIAS_SET (t) = -1;
1057 TREE_CONSTANT (t) = 1;
1060 case tcc_expression:
1066 case PREDECREMENT_EXPR:
1067 case PREINCREMENT_EXPR:
1068 case POSTDECREMENT_EXPR:
1069 case POSTINCREMENT_EXPR:
1070 /* All of these have side-effects, no matter what their
1072 TREE_SIDE_EFFECTS (t) = 1;
1080 case tcc_exceptional:
1083 case TARGET_OPTION_NODE:
1084 TREE_TARGET_OPTION(t)
1085 = ggc_cleared_alloc<struct cl_target_option> ();
1088 case OPTIMIZATION_NODE:
1089 TREE_OPTIMIZATION (t)
1090 = ggc_cleared_alloc<struct cl_optimization> ();
1099 /* Other classes need no special treatment. */
1106 /* Free tree node. */
1109 free_node (tree node)
1111 enum tree_code code = TREE_CODE (node);
1112 if (GATHER_STATISTICS)
1114 tree_code_counts[(int) TREE_CODE (node)]--;
1115 tree_node_counts[(int) t_kind]--;
1116 tree_node_sizes[(int) t_kind] -= tree_size (node);
1118 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1119 vec_free (CONSTRUCTOR_ELTS (node));
1120 else if (code == BLOCK)
1121 vec_free (BLOCK_NONLOCALIZED_VARS (node));
1122 else if (code == TREE_BINFO)
1123 vec_free (BINFO_BASE_ACCESSES (node));
1127 /* Return a new node with the same contents as NODE except that its
1128 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1131 copy_node_stat (tree node MEM_STAT_DECL)
1134 enum tree_code code = TREE_CODE (node);
1137 gcc_assert (code != STATEMENT_LIST);
1139 length = tree_size (node);
1140 record_node_allocation_statistics (code, length);
1141 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1142 memcpy (t, node, length);
1144 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1146 TREE_ASM_WRITTEN (t) = 0;
1147 TREE_VISITED (t) = 0;
1149 if (TREE_CODE_CLASS (code) == tcc_declaration)
1151 if (code == DEBUG_EXPR_DECL)
1152 DECL_UID (t) = --next_debug_decl_uid;
1155 DECL_UID (t) = allocate_decl_uid ();
1156 if (DECL_PT_UID_SET_P (node))
1157 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1159 if ((TREE_CODE (node) == PARM_DECL || VAR_P (node))
1160 && DECL_HAS_VALUE_EXPR_P (node))
1162 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1163 DECL_HAS_VALUE_EXPR_P (t) = 1;
1165 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1168 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1169 t->decl_with_vis.symtab_node = NULL;
1171 if (VAR_P (node) && DECL_HAS_INIT_PRIORITY_P (node))
1173 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1174 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1176 if (TREE_CODE (node) == FUNCTION_DECL)
1178 DECL_STRUCT_FUNCTION (t) = NULL;
1179 t->decl_with_vis.symtab_node = NULL;
1182 else if (TREE_CODE_CLASS (code) == tcc_type)
1184 TYPE_UID (t) = next_type_uid++;
1185 /* The following is so that the debug code for
1186 the copy is different from the original type.
1187 The two statements usually duplicate each other
1188 (because they clear fields of the same union),
1189 but the optimizer should catch that. */
1190 TYPE_SYMTAB_POINTER (t) = 0;
1191 TYPE_SYMTAB_ADDRESS (t) = 0;
1193 /* Do not copy the values cache. */
1194 if (TYPE_CACHED_VALUES_P (t))
1196 TYPE_CACHED_VALUES_P (t) = 0;
1197 TYPE_CACHED_VALUES (t) = NULL_TREE;
1200 else if (code == TARGET_OPTION_NODE)
1202 TREE_TARGET_OPTION (t) = ggc_alloc<struct cl_target_option>();
1203 memcpy (TREE_TARGET_OPTION (t), TREE_TARGET_OPTION (node),
1204 sizeof (struct cl_target_option));
1206 else if (code == OPTIMIZATION_NODE)
1208 TREE_OPTIMIZATION (t) = ggc_alloc<struct cl_optimization>();
1209 memcpy (TREE_OPTIMIZATION (t), TREE_OPTIMIZATION (node),
1210 sizeof (struct cl_optimization));
1216 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1217 For example, this can copy a list made of TREE_LIST nodes. */
1220 copy_list (tree list)
1228 head = prev = copy_node (list);
1229 next = TREE_CHAIN (list);
1232 TREE_CHAIN (prev) = copy_node (next);
1233 prev = TREE_CHAIN (prev);
1234 next = TREE_CHAIN (next);
1240 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1241 INTEGER_CST with value CST and type TYPE. */
1244 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1246 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1247 /* We need extra HWIs if CST is an unsigned integer with its
1249 if (TYPE_UNSIGNED (type) && wi::neg_p (cst))
1250 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1251 return cst.get_len ();
1254 /* Return a new INTEGER_CST with value CST and type TYPE. */
1257 build_new_int_cst (tree type, const wide_int &cst)
1259 unsigned int len = cst.get_len ();
1260 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1261 tree nt = make_int_cst (len, ext_len);
1266 TREE_INT_CST_ELT (nt, ext_len)
1267 = zext_hwi (-1, cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1268 for (unsigned int i = len; i < ext_len; ++i)
1269 TREE_INT_CST_ELT (nt, i) = -1;
1271 else if (TYPE_UNSIGNED (type)
1272 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1275 TREE_INT_CST_ELT (nt, len)
1276 = zext_hwi (cst.elt (len),
1277 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1280 for (unsigned int i = 0; i < len; i++)
1281 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1282 TREE_TYPE (nt) = type;
1286 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1289 build_int_cst (tree type, HOST_WIDE_INT low)
1291 /* Support legacy code. */
1293 type = integer_type_node;
1295 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1299 build_int_cstu (tree type, unsigned HOST_WIDE_INT cst)
1301 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1304 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1307 build_int_cst_type (tree type, HOST_WIDE_INT low)
1310 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1313 /* Constructs tree in type TYPE from with value given by CST. Signedness
1314 of CST is assumed to be the same as the signedness of TYPE. */
1317 double_int_to_tree (tree type, double_int cst)
1319 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1322 /* We force the wide_int CST to the range of the type TYPE by sign or
1323 zero extending it. OVERFLOWABLE indicates if we are interested in
1324 overflow of the value, when >0 we are only interested in signed
1325 overflow, for <0 we are interested in any overflow. OVERFLOWED
1326 indicates whether overflow has already occurred. CONST_OVERFLOWED
1327 indicates whether constant overflow has already occurred. We force
1328 T's value to be within range of T's type (by setting to 0 or 1 all
1329 the bits outside the type's range). We set TREE_OVERFLOWED if,
1330 OVERFLOWED is nonzero,
1331 or OVERFLOWABLE is >0 and signed overflow occurs
1332 or OVERFLOWABLE is <0 and any overflow occurs
1333 We return a new tree node for the extended wide_int. The node
1334 is shared if no overflow flags are set. */
1338 force_fit_type (tree type, const wide_int_ref &cst,
1339 int overflowable, bool overflowed)
1341 signop sign = TYPE_SIGN (type);
1343 /* If we need to set overflow flags, return a new unshared node. */
1344 if (overflowed || !wi::fits_to_tree_p (cst, type))
1348 || (overflowable > 0 && sign == SIGNED))
1350 wide_int tmp = wide_int::from (cst, TYPE_PRECISION (type), sign);
1351 tree t = build_new_int_cst (type, tmp);
1352 TREE_OVERFLOW (t) = 1;
1357 /* Else build a shared node. */
1358 return wide_int_to_tree (type, cst);
1361 /* These are the hash table functions for the hash table of INTEGER_CST
1362 nodes of a sizetype. */
1364 /* Return the hash code X, an INTEGER_CST. */
1367 int_cst_hasher::hash (tree x)
1369 const_tree const t = x;
1370 hashval_t code = TYPE_UID (TREE_TYPE (t));
1373 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1374 code = iterative_hash_host_wide_int (TREE_INT_CST_ELT(t, i), code);
1379 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1380 is the same as that given by *Y, which is the same. */
1383 int_cst_hasher::equal (tree x, tree y)
1385 const_tree const xt = x;
1386 const_tree const yt = y;
1388 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1389 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1390 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1393 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1394 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1400 /* Create an INT_CST node of TYPE and value CST.
1401 The returned node is always shared. For small integers we use a
1402 per-type vector cache, for larger ones we use a single hash table.
1403 The value is extended from its precision according to the sign of
1404 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1405 the upper bits and ensures that hashing and value equality based
1406 upon the underlying HOST_WIDE_INTs works without masking. */
1409 wide_int_to_tree (tree type, const wide_int_ref &pcst)
1416 unsigned int prec = TYPE_PRECISION (type);
1417 signop sgn = TYPE_SIGN (type);
1419 /* Verify that everything is canonical. */
1420 int l = pcst.get_len ();
1423 if (pcst.elt (l - 1) == 0)
1424 gcc_checking_assert (pcst.elt (l - 2) < 0);
1425 if (pcst.elt (l - 1) == HOST_WIDE_INT_M1)
1426 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1429 wide_int cst = wide_int::from (pcst, prec, sgn);
1430 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1434 /* We just need to store a single HOST_WIDE_INT. */
1436 if (TYPE_UNSIGNED (type))
1437 hwi = cst.to_uhwi ();
1439 hwi = cst.to_shwi ();
1441 switch (TREE_CODE (type))
1444 gcc_assert (hwi == 0);
1448 case REFERENCE_TYPE:
1449 case POINTER_BOUNDS_TYPE:
1450 /* Cache NULL pointer and zero bounds. */
1459 /* Cache false or true. */
1461 if (IN_RANGE (hwi, 0, 1))
1467 if (TYPE_SIGN (type) == UNSIGNED)
1470 limit = INTEGER_SHARE_LIMIT;
1471 if (IN_RANGE (hwi, 0, INTEGER_SHARE_LIMIT - 1))
1476 /* Cache [-1, N). */
1477 limit = INTEGER_SHARE_LIMIT + 1;
1478 if (IN_RANGE (hwi, -1, INTEGER_SHARE_LIMIT - 1))
1492 /* Look for it in the type's vector of small shared ints. */
1493 if (!TYPE_CACHED_VALUES_P (type))
1495 TYPE_CACHED_VALUES_P (type) = 1;
1496 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1499 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1501 /* Make sure no one is clobbering the shared constant. */
1502 gcc_checking_assert (TREE_TYPE (t) == type
1503 && TREE_INT_CST_NUNITS (t) == 1
1504 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1505 && TREE_INT_CST_EXT_NUNITS (t) == 1
1506 && TREE_INT_CST_ELT (t, 0) == hwi);
1509 /* Create a new shared int. */
1510 t = build_new_int_cst (type, cst);
1511 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1516 /* Use the cache of larger shared ints, using int_cst_node as
1519 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1520 TREE_TYPE (int_cst_node) = type;
1522 tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
1526 /* Insert this one into the hash table. */
1529 /* Make a new node for next time round. */
1530 int_cst_node = make_int_cst (1, 1);
1536 /* The value either hashes properly or we drop it on the floor
1537 for the gc to take care of. There will not be enough of them
1540 tree nt = build_new_int_cst (type, cst);
1541 tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
1545 /* Insert this one into the hash table. */
1555 cache_integer_cst (tree t)
1557 tree type = TREE_TYPE (t);
1560 int prec = TYPE_PRECISION (type);
1562 gcc_assert (!TREE_OVERFLOW (t));
1564 switch (TREE_CODE (type))
1567 gcc_assert (integer_zerop (t));
1571 case REFERENCE_TYPE:
1572 /* Cache NULL pointer. */
1573 if (integer_zerop (t))
1581 /* Cache false or true. */
1583 if (wi::ltu_p (t, 2))
1584 ix = TREE_INT_CST_ELT (t, 0);
1589 if (TYPE_UNSIGNED (type))
1592 limit = INTEGER_SHARE_LIMIT;
1594 /* This is a little hokie, but if the prec is smaller than
1595 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1596 obvious test will not get the correct answer. */
1597 if (prec < HOST_BITS_PER_WIDE_INT)
1599 if (tree_to_uhwi (t) < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT)
1600 ix = tree_to_uhwi (t);
1602 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1603 ix = tree_to_uhwi (t);
1608 limit = INTEGER_SHARE_LIMIT + 1;
1610 if (integer_minus_onep (t))
1612 else if (!wi::neg_p (t))
1614 if (prec < HOST_BITS_PER_WIDE_INT)
1616 if (tree_to_shwi (t) < INTEGER_SHARE_LIMIT)
1617 ix = tree_to_shwi (t) + 1;
1619 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1620 ix = tree_to_shwi (t) + 1;
1634 /* Look for it in the type's vector of small shared ints. */
1635 if (!TYPE_CACHED_VALUES_P (type))
1637 TYPE_CACHED_VALUES_P (type) = 1;
1638 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1641 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1642 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1646 /* Use the cache of larger shared ints. */
1647 tree *slot = int_cst_hash_table->find_slot (t, INSERT);
1648 /* If there is already an entry for the number verify it's the
1651 gcc_assert (wi::eq_p (tree (*slot), t));
1653 /* Otherwise insert this one into the hash table. */
1659 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1660 and the rest are zeros. */
1663 build_low_bits_mask (tree type, unsigned bits)
1665 gcc_assert (bits <= TYPE_PRECISION (type));
1667 return wide_int_to_tree (type, wi::mask (bits, false,
1668 TYPE_PRECISION (type)));
1671 /* Checks that X is integer constant that can be expressed in (unsigned)
1672 HOST_WIDE_INT without loss of precision. */
1675 cst_and_fits_in_hwi (const_tree x)
1677 return (TREE_CODE (x) == INTEGER_CST
1678 && (tree_fits_shwi_p (x) || tree_fits_uhwi_p (x)));
1681 /* Build a newly constructed VECTOR_CST node of length LEN. */
1684 make_vector_stat (unsigned len MEM_STAT_DECL)
1687 unsigned length = (len - 1) * sizeof (tree) + sizeof (struct tree_vector);
1689 record_node_allocation_statistics (VECTOR_CST, length);
1691 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1693 TREE_SET_CODE (t, VECTOR_CST);
1694 TREE_CONSTANT (t) = 1;
1699 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1700 are in a list pointed to by VALS. */
1703 build_vector_stat (tree type, tree *vals MEM_STAT_DECL)
1707 tree v = make_vector (TYPE_VECTOR_SUBPARTS (type));
1708 TREE_TYPE (v) = type;
1710 /* Iterate through elements and check for overflow. */
1711 for (cnt = 0; cnt < TYPE_VECTOR_SUBPARTS (type); ++cnt)
1713 tree value = vals[cnt];
1715 VECTOR_CST_ELT (v, cnt) = value;
1717 /* Don't crash if we get an address constant. */
1718 if (!CONSTANT_CLASS_P (value))
1721 over |= TREE_OVERFLOW (value);
1724 TREE_OVERFLOW (v) = over;
1728 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1729 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1732 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1734 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
1735 unsigned HOST_WIDE_INT idx, pos = 0;
1738 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1740 if (TREE_CODE (value) == VECTOR_CST)
1741 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
1742 vec[pos++] = VECTOR_CST_ELT (value, i);
1746 while (pos < TYPE_VECTOR_SUBPARTS (type))
1747 vec[pos++] = build_zero_cst (TREE_TYPE (type));
1749 return build_vector (type, vec);
1752 /* Build a vector of type VECTYPE where all the elements are SCs. */
1754 build_vector_from_val (tree vectype, tree sc)
1756 int i, nunits = TYPE_VECTOR_SUBPARTS (vectype);
1758 if (sc == error_mark_node)
1761 /* Verify that the vector type is suitable for SC. Note that there
1762 is some inconsistency in the type-system with respect to restrict
1763 qualifications of pointers. Vector types always have a main-variant
1764 element type and the qualification is applied to the vector-type.
1765 So TREE_TYPE (vector-type) does not return a properly qualified
1766 vector element-type. */
1767 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1768 TREE_TYPE (vectype)));
1770 if (CONSTANT_CLASS_P (sc))
1772 tree *v = XALLOCAVEC (tree, nunits);
1773 for (i = 0; i < nunits; ++i)
1775 return build_vector (vectype, v);
1779 vec<constructor_elt, va_gc> *v;
1780 vec_alloc (v, nunits);
1781 for (i = 0; i < nunits; ++i)
1782 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1783 return build_constructor (vectype, v);
1787 /* Something has messed with the elements of CONSTRUCTOR C after it was built;
1788 calculate TREE_CONSTANT and TREE_SIDE_EFFECTS. */
1791 recompute_constructor_flags (tree c)
1795 bool constant_p = true;
1796 bool side_effects_p = false;
1797 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
1799 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
1801 /* Mostly ctors will have elts that don't have side-effects, so
1802 the usual case is to scan all the elements. Hence a single
1803 loop for both const and side effects, rather than one loop
1804 each (with early outs). */
1805 if (!TREE_CONSTANT (val))
1807 if (TREE_SIDE_EFFECTS (val))
1808 side_effects_p = true;
1811 TREE_SIDE_EFFECTS (c) = side_effects_p;
1812 TREE_CONSTANT (c) = constant_p;
1815 /* Make sure that TREE_CONSTANT and TREE_SIDE_EFFECTS are correct for
1819 verify_constructor_flags (tree c)
1823 bool constant_p = TREE_CONSTANT (c);
1824 bool side_effects_p = TREE_SIDE_EFFECTS (c);
1825 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
1827 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
1829 if (constant_p && !TREE_CONSTANT (val))
1830 internal_error ("non-constant element in constant CONSTRUCTOR");
1831 if (!side_effects_p && TREE_SIDE_EFFECTS (val))
1832 internal_error ("side-effects element in no-side-effects CONSTRUCTOR");
1836 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1837 are in the vec pointed to by VALS. */
1839 build_constructor (tree type, vec<constructor_elt, va_gc> *vals)
1841 tree c = make_node (CONSTRUCTOR);
1843 TREE_TYPE (c) = type;
1844 CONSTRUCTOR_ELTS (c) = vals;
1846 recompute_constructor_flags (c);
1851 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
1854 build_constructor_single (tree type, tree index, tree value)
1856 vec<constructor_elt, va_gc> *v;
1857 constructor_elt elt = {index, value};
1860 v->quick_push (elt);
1862 return build_constructor (type, v);
1866 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1867 are in a list pointed to by VALS. */
1869 build_constructor_from_list (tree type, tree vals)
1872 vec<constructor_elt, va_gc> *v = NULL;
1876 vec_alloc (v, list_length (vals));
1877 for (t = vals; t; t = TREE_CHAIN (t))
1878 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
1881 return build_constructor (type, v);
1884 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
1885 of elements, provided as index/value pairs. */
1888 build_constructor_va (tree type, int nelts, ...)
1890 vec<constructor_elt, va_gc> *v = NULL;
1893 va_start (p, nelts);
1894 vec_alloc (v, nelts);
1897 tree index = va_arg (p, tree);
1898 tree value = va_arg (p, tree);
1899 CONSTRUCTOR_APPEND_ELT (v, index, value);
1902 return build_constructor (type, v);
1905 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
1908 build_fixed (tree type, FIXED_VALUE_TYPE f)
1911 FIXED_VALUE_TYPE *fp;
1913 v = make_node (FIXED_CST);
1914 fp = ggc_alloc<fixed_value> ();
1915 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
1917 TREE_TYPE (v) = type;
1918 TREE_FIXED_CST_PTR (v) = fp;
1922 /* Return a new REAL_CST node whose type is TYPE and value is D. */
1925 build_real (tree type, REAL_VALUE_TYPE d)
1928 REAL_VALUE_TYPE *dp;
1931 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
1932 Consider doing it via real_convert now. */
1934 v = make_node (REAL_CST);
1935 dp = ggc_alloc<real_value> ();
1936 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
1938 TREE_TYPE (v) = type;
1939 TREE_REAL_CST_PTR (v) = dp;
1940 TREE_OVERFLOW (v) = overflow;
1944 /* Like build_real, but first truncate D to the type. */
1947 build_real_truncate (tree type, REAL_VALUE_TYPE d)
1949 return build_real (type, real_value_truncate (TYPE_MODE (type), d));
1952 /* Return a new REAL_CST node whose type is TYPE
1953 and whose value is the integer value of the INTEGER_CST node I. */
1956 real_value_from_int_cst (const_tree type, const_tree i)
1960 /* Clear all bits of the real value type so that we can later do
1961 bitwise comparisons to see if two values are the same. */
1962 memset (&d, 0, sizeof d);
1964 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, i,
1965 TYPE_SIGN (TREE_TYPE (i)));
1969 /* Given a tree representing an integer constant I, return a tree
1970 representing the same value as a floating-point constant of type TYPE. */
1973 build_real_from_int_cst (tree type, const_tree i)
1976 int overflow = TREE_OVERFLOW (i);
1978 v = build_real (type, real_value_from_int_cst (type, i));
1980 TREE_OVERFLOW (v) |= overflow;
1984 /* Return a newly constructed STRING_CST node whose value is
1985 the LEN characters at STR.
1986 Note that for a C string literal, LEN should include the trailing NUL.
1987 The TREE_TYPE is not initialized. */
1990 build_string (int len, const char *str)
1995 /* Do not waste bytes provided by padding of struct tree_string. */
1996 length = len + offsetof (struct tree_string, str) + 1;
1998 record_node_allocation_statistics (STRING_CST, length);
2000 s = (tree) ggc_internal_alloc (length);
2002 memset (s, 0, sizeof (struct tree_typed));
2003 TREE_SET_CODE (s, STRING_CST);
2004 TREE_CONSTANT (s) = 1;
2005 TREE_STRING_LENGTH (s) = len;
2006 memcpy (s->string.str, str, len);
2007 s->string.str[len] = '\0';
2012 /* Return a newly constructed COMPLEX_CST node whose value is
2013 specified by the real and imaginary parts REAL and IMAG.
2014 Both REAL and IMAG should be constant nodes. TYPE, if specified,
2015 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
2018 build_complex (tree type, tree real, tree imag)
2020 tree t = make_node (COMPLEX_CST);
2022 TREE_REALPART (t) = real;
2023 TREE_IMAGPART (t) = imag;
2024 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
2025 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
2029 /* Build a complex (inf +- 0i), such as for the result of cproj.
2030 TYPE is the complex tree type of the result. If NEG is true, the
2031 imaginary zero is negative. */
2034 build_complex_inf (tree type, bool neg)
2036 REAL_VALUE_TYPE rinf, rzero = dconst0;
2040 return build_complex (type, build_real (TREE_TYPE (type), rinf),
2041 build_real (TREE_TYPE (type), rzero));
2044 /* Return the constant 1 in type TYPE. If TYPE has several elements, each
2045 element is set to 1. In particular, this is 1 + i for complex types. */
2048 build_each_one_cst (tree type)
2050 if (TREE_CODE (type) == COMPLEX_TYPE)
2052 tree scalar = build_one_cst (TREE_TYPE (type));
2053 return build_complex (type, scalar, scalar);
2056 return build_one_cst (type);
2059 /* Return a constant of arithmetic type TYPE which is the
2060 multiplicative identity of the set TYPE. */
2063 build_one_cst (tree type)
2065 switch (TREE_CODE (type))
2067 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2068 case POINTER_TYPE: case REFERENCE_TYPE:
2070 return build_int_cst (type, 1);
2073 return build_real (type, dconst1);
2075 case FIXED_POINT_TYPE:
2076 /* We can only generate 1 for accum types. */
2077 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2078 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
2082 tree scalar = build_one_cst (TREE_TYPE (type));
2084 return build_vector_from_val (type, scalar);
2088 return build_complex (type,
2089 build_one_cst (TREE_TYPE (type)),
2090 build_zero_cst (TREE_TYPE (type)));
2097 /* Return an integer of type TYPE containing all 1's in as much precision as
2098 it contains, or a complex or vector whose subparts are such integers. */
2101 build_all_ones_cst (tree type)
2103 if (TREE_CODE (type) == COMPLEX_TYPE)
2105 tree scalar = build_all_ones_cst (TREE_TYPE (type));
2106 return build_complex (type, scalar, scalar);
2109 return build_minus_one_cst (type);
2112 /* Return a constant of arithmetic type TYPE which is the
2113 opposite of the multiplicative identity of the set TYPE. */
2116 build_minus_one_cst (tree type)
2118 switch (TREE_CODE (type))
2120 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2121 case POINTER_TYPE: case REFERENCE_TYPE:
2123 return build_int_cst (type, -1);
2126 return build_real (type, dconstm1);
2128 case FIXED_POINT_TYPE:
2129 /* We can only generate 1 for accum types. */
2130 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2131 return build_fixed (type, fixed_from_double_int (double_int_minus_one,
2136 tree scalar = build_minus_one_cst (TREE_TYPE (type));
2138 return build_vector_from_val (type, scalar);
2142 return build_complex (type,
2143 build_minus_one_cst (TREE_TYPE (type)),
2144 build_zero_cst (TREE_TYPE (type)));
2151 /* Build 0 constant of type TYPE. This is used by constructor folding
2152 and thus the constant should be represented in memory by
2156 build_zero_cst (tree type)
2158 switch (TREE_CODE (type))
2160 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2161 case POINTER_TYPE: case REFERENCE_TYPE:
2162 case OFFSET_TYPE: case NULLPTR_TYPE:
2163 return build_int_cst (type, 0);
2166 return build_real (type, dconst0);
2168 case FIXED_POINT_TYPE:
2169 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
2173 tree scalar = build_zero_cst (TREE_TYPE (type));
2175 return build_vector_from_val (type, scalar);
2180 tree zero = build_zero_cst (TREE_TYPE (type));
2182 return build_complex (type, zero, zero);
2186 if (!AGGREGATE_TYPE_P (type))
2187 return fold_convert (type, integer_zero_node);
2188 return build_constructor (type, NULL);
2193 /* Build a BINFO with LEN language slots. */
2196 make_tree_binfo_stat (unsigned base_binfos MEM_STAT_DECL)
2199 size_t length = (offsetof (struct tree_binfo, base_binfos)
2200 + vec<tree, va_gc>::embedded_size (base_binfos));
2202 record_node_allocation_statistics (TREE_BINFO, length);
2204 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2206 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2208 TREE_SET_CODE (t, TREE_BINFO);
2210 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2215 /* Create a CASE_LABEL_EXPR tree node and return it. */
2218 build_case_label (tree low_value, tree high_value, tree label_decl)
2220 tree t = make_node (CASE_LABEL_EXPR);
2222 TREE_TYPE (t) = void_type_node;
2223 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2225 CASE_LOW (t) = low_value;
2226 CASE_HIGH (t) = high_value;
2227 CASE_LABEL (t) = label_decl;
2228 CASE_CHAIN (t) = NULL_TREE;
2233 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2234 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2235 The latter determines the length of the HOST_WIDE_INT vector. */
2238 make_int_cst_stat (int len, int ext_len MEM_STAT_DECL)
2241 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2242 + sizeof (struct tree_int_cst));
2245 record_node_allocation_statistics (INTEGER_CST, length);
2247 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2249 TREE_SET_CODE (t, INTEGER_CST);
2250 TREE_INT_CST_NUNITS (t) = len;
2251 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2252 /* to_offset can only be applied to trees that are offset_int-sized
2253 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2254 must be exactly the precision of offset_int and so LEN is correct. */
2255 if (ext_len <= OFFSET_INT_ELTS)
2256 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2258 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2260 TREE_CONSTANT (t) = 1;
2265 /* Build a newly constructed TREE_VEC node of length LEN. */
2268 make_tree_vec_stat (int len MEM_STAT_DECL)
2271 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2273 record_node_allocation_statistics (TREE_VEC, length);
2275 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2277 TREE_SET_CODE (t, TREE_VEC);
2278 TREE_VEC_LENGTH (t) = len;
2283 /* Grow a TREE_VEC node to new length LEN. */
2286 grow_tree_vec_stat (tree v, int len MEM_STAT_DECL)
2288 gcc_assert (TREE_CODE (v) == TREE_VEC);
2290 int oldlen = TREE_VEC_LENGTH (v);
2291 gcc_assert (len > oldlen);
2293 size_t oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2294 size_t length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2296 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2298 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2300 TREE_VEC_LENGTH (v) = len;
2305 /* Return 1 if EXPR is the constant zero, whether it is integral, float or
2306 fixed, and scalar, complex or vector. */
2309 zerop (const_tree expr)
2311 return (integer_zerop (expr)
2312 || real_zerop (expr)
2313 || fixed_zerop (expr));
2316 /* Return 1 if EXPR is the integer constant zero or a complex constant
2320 integer_zerop (const_tree expr)
2322 switch (TREE_CODE (expr))
2325 return wi::eq_p (expr, 0);
2327 return (integer_zerop (TREE_REALPART (expr))
2328 && integer_zerop (TREE_IMAGPART (expr)));
2332 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2333 if (!integer_zerop (VECTOR_CST_ELT (expr, i)))
2342 /* Return 1 if EXPR is the integer constant one or the corresponding
2343 complex constant. */
2346 integer_onep (const_tree expr)
2348 switch (TREE_CODE (expr))
2351 return wi::eq_p (wi::to_widest (expr), 1);
2353 return (integer_onep (TREE_REALPART (expr))
2354 && integer_zerop (TREE_IMAGPART (expr)));
2358 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2359 if (!integer_onep (VECTOR_CST_ELT (expr, i)))
2368 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2369 return 1 if every piece is the integer constant one. */
2372 integer_each_onep (const_tree expr)
2374 if (TREE_CODE (expr) == COMPLEX_CST)
2375 return (integer_onep (TREE_REALPART (expr))
2376 && integer_onep (TREE_IMAGPART (expr)));
2378 return integer_onep (expr);
2381 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2382 it contains, or a complex or vector whose subparts are such integers. */
2385 integer_all_onesp (const_tree expr)
2387 if (TREE_CODE (expr) == COMPLEX_CST
2388 && integer_all_onesp (TREE_REALPART (expr))
2389 && integer_all_onesp (TREE_IMAGPART (expr)))
2392 else if (TREE_CODE (expr) == VECTOR_CST)
2395 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2396 if (!integer_all_onesp (VECTOR_CST_ELT (expr, i)))
2401 else if (TREE_CODE (expr) != INTEGER_CST)
2404 return wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED) == expr;
2407 /* Return 1 if EXPR is the integer constant minus one. */
2410 integer_minus_onep (const_tree expr)
2412 if (TREE_CODE (expr) == COMPLEX_CST)
2413 return (integer_all_onesp (TREE_REALPART (expr))
2414 && integer_zerop (TREE_IMAGPART (expr)));
2416 return integer_all_onesp (expr);
2419 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2423 integer_pow2p (const_tree expr)
2425 if (TREE_CODE (expr) == COMPLEX_CST
2426 && integer_pow2p (TREE_REALPART (expr))
2427 && integer_zerop (TREE_IMAGPART (expr)))
2430 if (TREE_CODE (expr) != INTEGER_CST)
2433 return wi::popcount (expr) == 1;
2436 /* Return 1 if EXPR is an integer constant other than zero or a
2437 complex constant other than zero. */
2440 integer_nonzerop (const_tree expr)
2442 return ((TREE_CODE (expr) == INTEGER_CST
2443 && !wi::eq_p (expr, 0))
2444 || (TREE_CODE (expr) == COMPLEX_CST
2445 && (integer_nonzerop (TREE_REALPART (expr))
2446 || integer_nonzerop (TREE_IMAGPART (expr)))));
2449 /* Return 1 if EXPR is the integer constant one. For vector,
2450 return 1 if every piece is the integer constant minus one
2451 (representing the value TRUE). */
2454 integer_truep (const_tree expr)
2456 if (TREE_CODE (expr) == VECTOR_CST)
2457 return integer_all_onesp (expr);
2458 return integer_onep (expr);
2461 /* Return 1 if EXPR is the fixed-point constant zero. */
2464 fixed_zerop (const_tree expr)
2466 return (TREE_CODE (expr) == FIXED_CST
2467 && TREE_FIXED_CST (expr).data.is_zero ());
2470 /* Return the power of two represented by a tree node known to be a
2474 tree_log2 (const_tree expr)
2476 if (TREE_CODE (expr) == COMPLEX_CST)
2477 return tree_log2 (TREE_REALPART (expr));
2479 return wi::exact_log2 (expr);
2482 /* Similar, but return the largest integer Y such that 2 ** Y is less
2483 than or equal to EXPR. */
2486 tree_floor_log2 (const_tree expr)
2488 if (TREE_CODE (expr) == COMPLEX_CST)
2489 return tree_log2 (TREE_REALPART (expr));
2491 return wi::floor_log2 (expr);
2494 /* Return number of known trailing zero bits in EXPR, or, if the value of
2495 EXPR is known to be zero, the precision of it's type. */
2498 tree_ctz (const_tree expr)
2500 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2501 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2504 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2505 switch (TREE_CODE (expr))
2508 ret1 = wi::ctz (expr);
2509 return MIN (ret1, prec);
2511 ret1 = wi::ctz (get_nonzero_bits (expr));
2512 return MIN (ret1, prec);
2519 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2522 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2523 return MIN (ret1, ret2);
2524 case POINTER_PLUS_EXPR:
2525 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2526 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2527 /* Second operand is sizetype, which could be in theory
2528 wider than pointer's precision. Make sure we never
2529 return more than prec. */
2530 ret2 = MIN (ret2, prec);
2531 return MIN (ret1, ret2);
2533 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2534 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2535 return MAX (ret1, ret2);
2537 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2538 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2539 return MIN (ret1 + ret2, prec);
2541 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2542 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2543 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2545 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2546 return MIN (ret1 + ret2, prec);
2550 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2551 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2553 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2554 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2559 case TRUNC_DIV_EXPR:
2561 case FLOOR_DIV_EXPR:
2562 case ROUND_DIV_EXPR:
2563 case EXACT_DIV_EXPR:
2564 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2565 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2567 int l = tree_log2 (TREE_OPERAND (expr, 1));
2570 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2578 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2579 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2581 return MIN (ret1, prec);
2583 return tree_ctz (TREE_OPERAND (expr, 0));
2585 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2588 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2589 return MIN (ret1, ret2);
2591 return tree_ctz (TREE_OPERAND (expr, 1));
2593 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2594 if (ret1 > BITS_PER_UNIT)
2596 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2597 return MIN (ret1, prec);
2605 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2606 decimal float constants, so don't return 1 for them. */
2609 real_zerop (const_tree expr)
2611 switch (TREE_CODE (expr))
2614 return real_equal (&TREE_REAL_CST (expr), &dconst0)
2615 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2617 return real_zerop (TREE_REALPART (expr))
2618 && real_zerop (TREE_IMAGPART (expr));
2622 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2623 if (!real_zerop (VECTOR_CST_ELT (expr, i)))
2632 /* Return 1 if EXPR is the real constant one in real or complex form.
2633 Trailing zeroes matter for decimal float constants, so don't return
2637 real_onep (const_tree expr)
2639 switch (TREE_CODE (expr))
2642 return real_equal (&TREE_REAL_CST (expr), &dconst1)
2643 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2645 return real_onep (TREE_REALPART (expr))
2646 && real_zerop (TREE_IMAGPART (expr));
2650 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2651 if (!real_onep (VECTOR_CST_ELT (expr, i)))
2660 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2661 matter for decimal float constants, so don't return 1 for them. */
2664 real_minus_onep (const_tree expr)
2666 switch (TREE_CODE (expr))
2669 return real_equal (&TREE_REAL_CST (expr), &dconstm1)
2670 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2672 return real_minus_onep (TREE_REALPART (expr))
2673 && real_zerop (TREE_IMAGPART (expr));
2677 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2678 if (!real_minus_onep (VECTOR_CST_ELT (expr, i)))
2687 /* Nonzero if EXP is a constant or a cast of a constant. */
2690 really_constant_p (const_tree exp)
2692 /* This is not quite the same as STRIP_NOPS. It does more. */
2693 while (CONVERT_EXPR_P (exp)
2694 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2695 exp = TREE_OPERAND (exp, 0);
2696 return TREE_CONSTANT (exp);
2699 /* Return first list element whose TREE_VALUE is ELEM.
2700 Return 0 if ELEM is not in LIST. */
2703 value_member (tree elem, tree list)
2707 if (elem == TREE_VALUE (list))
2709 list = TREE_CHAIN (list);
2714 /* Return first list element whose TREE_PURPOSE is ELEM.
2715 Return 0 if ELEM is not in LIST. */
2718 purpose_member (const_tree elem, tree list)
2722 if (elem == TREE_PURPOSE (list))
2724 list = TREE_CHAIN (list);
2729 /* Return true if ELEM is in V. */
2732 vec_member (const_tree elem, vec<tree, va_gc> *v)
2736 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
2742 /* Returns element number IDX (zero-origin) of chain CHAIN, or
2746 chain_index (int idx, tree chain)
2748 for (; chain && idx > 0; --idx)
2749 chain = TREE_CHAIN (chain);
2753 /* Return nonzero if ELEM is part of the chain CHAIN. */
2756 chain_member (const_tree elem, const_tree chain)
2762 chain = DECL_CHAIN (chain);
2768 /* Return the length of a chain of nodes chained through TREE_CHAIN.
2769 We expect a null pointer to mark the end of the chain.
2770 This is the Lisp primitive `length'. */
2773 list_length (const_tree t)
2776 #ifdef ENABLE_TREE_CHECKING
2784 #ifdef ENABLE_TREE_CHECKING
2787 gcc_assert (p != q);
2795 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
2796 UNION_TYPE TYPE, or NULL_TREE if none. */
2799 first_field (const_tree type)
2801 tree t = TYPE_FIELDS (type);
2802 while (t && TREE_CODE (t) != FIELD_DECL)
2807 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
2808 by modifying the last node in chain 1 to point to chain 2.
2809 This is the Lisp primitive `nconc'. */
2812 chainon (tree op1, tree op2)
2821 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
2823 TREE_CHAIN (t1) = op2;
2825 #ifdef ENABLE_TREE_CHECKING
2828 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
2829 gcc_assert (t2 != t1);
2836 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
2839 tree_last (tree chain)
2843 while ((next = TREE_CHAIN (chain)))
2848 /* Reverse the order of elements in the chain T,
2849 and return the new head of the chain (old last element). */
2854 tree prev = 0, decl, next;
2855 for (decl = t; decl; decl = next)
2857 /* We shouldn't be using this function to reverse BLOCK chains; we
2858 have blocks_nreverse for that. */
2859 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
2860 next = TREE_CHAIN (decl);
2861 TREE_CHAIN (decl) = prev;
2867 /* Return a newly created TREE_LIST node whose
2868 purpose and value fields are PARM and VALUE. */
2871 build_tree_list_stat (tree parm, tree value MEM_STAT_DECL)
2873 tree t = make_node_stat (TREE_LIST PASS_MEM_STAT);
2874 TREE_PURPOSE (t) = parm;
2875 TREE_VALUE (t) = value;
2879 /* Build a chain of TREE_LIST nodes from a vector. */
2882 build_tree_list_vec_stat (const vec<tree, va_gc> *vec MEM_STAT_DECL)
2884 tree ret = NULL_TREE;
2888 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
2890 *pp = build_tree_list_stat (NULL, t PASS_MEM_STAT);
2891 pp = &TREE_CHAIN (*pp);
2896 /* Return a newly created TREE_LIST node whose
2897 purpose and value fields are PURPOSE and VALUE
2898 and whose TREE_CHAIN is CHAIN. */
2901 tree_cons_stat (tree purpose, tree value, tree chain MEM_STAT_DECL)
2905 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
2906 memset (node, 0, sizeof (struct tree_common));
2908 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
2910 TREE_SET_CODE (node, TREE_LIST);
2911 TREE_CHAIN (node) = chain;
2912 TREE_PURPOSE (node) = purpose;
2913 TREE_VALUE (node) = value;
2917 /* Return the values of the elements of a CONSTRUCTOR as a vector of
2921 ctor_to_vec (tree ctor)
2923 vec<tree, va_gc> *vec;
2924 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
2928 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
2929 vec->quick_push (val);
2934 /* Return the size nominally occupied by an object of type TYPE
2935 when it resides in memory. The value is measured in units of bytes,
2936 and its data type is that normally used for type sizes
2937 (which is the first type created by make_signed_type or
2938 make_unsigned_type). */
2941 size_in_bytes_loc (location_t loc, const_tree type)
2945 if (type == error_mark_node)
2946 return integer_zero_node;
2948 type = TYPE_MAIN_VARIANT (type);
2949 t = TYPE_SIZE_UNIT (type);
2953 lang_hooks.types.incomplete_type_error (loc, NULL_TREE, type);
2954 return size_zero_node;
2960 /* Return the size of TYPE (in bytes) as a wide integer
2961 or return -1 if the size can vary or is larger than an integer. */
2964 int_size_in_bytes (const_tree type)
2968 if (type == error_mark_node)
2971 type = TYPE_MAIN_VARIANT (type);
2972 t = TYPE_SIZE_UNIT (type);
2974 if (t && tree_fits_uhwi_p (t))
2975 return TREE_INT_CST_LOW (t);
2980 /* Return the maximum size of TYPE (in bytes) as a wide integer
2981 or return -1 if the size can vary or is larger than an integer. */
2984 max_int_size_in_bytes (const_tree type)
2986 HOST_WIDE_INT size = -1;
2989 /* If this is an array type, check for a possible MAX_SIZE attached. */
2991 if (TREE_CODE (type) == ARRAY_TYPE)
2993 size_tree = TYPE_ARRAY_MAX_SIZE (type);
2995 if (size_tree && tree_fits_uhwi_p (size_tree))
2996 size = tree_to_uhwi (size_tree);
2999 /* If we still haven't been able to get a size, see if the language
3000 can compute a maximum size. */
3004 size_tree = lang_hooks.types.max_size (type);
3006 if (size_tree && tree_fits_uhwi_p (size_tree))
3007 size = tree_to_uhwi (size_tree);
3013 /* Return the bit position of FIELD, in bits from the start of the record.
3014 This is a tree of type bitsizetype. */
3017 bit_position (const_tree field)
3019 return bit_from_pos (DECL_FIELD_OFFSET (field),
3020 DECL_FIELD_BIT_OFFSET (field));
3023 /* Return the byte position of FIELD, in bytes from the start of the record.
3024 This is a tree of type sizetype. */
3027 byte_position (const_tree field)
3029 return byte_from_pos (DECL_FIELD_OFFSET (field),
3030 DECL_FIELD_BIT_OFFSET (field));
3033 /* Likewise, but return as an integer. It must be representable in
3034 that way (since it could be a signed value, we don't have the
3035 option of returning -1 like int_size_in_byte can. */
3038 int_byte_position (const_tree field)
3040 return tree_to_shwi (byte_position (field));
3043 /* Return the strictest alignment, in bits, that T is known to have. */
3046 expr_align (const_tree t)
3048 unsigned int align0, align1;
3050 switch (TREE_CODE (t))
3052 CASE_CONVERT: case NON_LVALUE_EXPR:
3053 /* If we have conversions, we know that the alignment of the
3054 object must meet each of the alignments of the types. */
3055 align0 = expr_align (TREE_OPERAND (t, 0));
3056 align1 = TYPE_ALIGN (TREE_TYPE (t));
3057 return MAX (align0, align1);
3059 case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR:
3060 case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR:
3061 case CLEANUP_POINT_EXPR:
3062 /* These don't change the alignment of an object. */
3063 return expr_align (TREE_OPERAND (t, 0));
3066 /* The best we can do is say that the alignment is the least aligned
3068 align0 = expr_align (TREE_OPERAND (t, 1));
3069 align1 = expr_align (TREE_OPERAND (t, 2));
3070 return MIN (align0, align1);
3072 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
3073 meaningfully, it's always 1. */
3074 case LABEL_DECL: case CONST_DECL:
3075 case VAR_DECL: case PARM_DECL: case RESULT_DECL:
3077 gcc_assert (DECL_ALIGN (t) != 0);
3078 return DECL_ALIGN (t);
3084 /* Otherwise take the alignment from that of the type. */
3085 return TYPE_ALIGN (TREE_TYPE (t));
3088 /* Return, as a tree node, the number of elements for TYPE (which is an
3089 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3092 array_type_nelts (const_tree type)
3094 tree index_type, min, max;
3096 /* If they did it with unspecified bounds, then we should have already
3097 given an error about it before we got here. */
3098 if (! TYPE_DOMAIN (type))
3099 return error_mark_node;
3101 index_type = TYPE_DOMAIN (type);
3102 min = TYPE_MIN_VALUE (index_type);
3103 max = TYPE_MAX_VALUE (index_type);
3105 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3107 return error_mark_node;
3109 return (integer_zerop (min)
3111 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
3114 /* If arg is static -- a reference to an object in static storage -- then
3115 return the object. This is not the same as the C meaning of `static'.
3116 If arg isn't static, return NULL. */
3121 switch (TREE_CODE (arg))
3124 /* Nested functions are static, even though taking their address will
3125 involve a trampoline as we unnest the nested function and create
3126 the trampoline on the tree level. */
3130 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3131 && ! DECL_THREAD_LOCAL_P (arg)
3132 && ! DECL_DLLIMPORT_P (arg)
3136 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3140 return TREE_STATIC (arg) ? arg : NULL;
3147 /* If the thing being referenced is not a field, then it is
3148 something language specific. */
3149 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
3151 /* If we are referencing a bitfield, we can't evaluate an
3152 ADDR_EXPR at compile time and so it isn't a constant. */
3153 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
3156 return staticp (TREE_OPERAND (arg, 0));
3162 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3165 case ARRAY_RANGE_REF:
3166 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3167 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3168 return staticp (TREE_OPERAND (arg, 0));
3172 case COMPOUND_LITERAL_EXPR:
3173 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3183 /* Return whether OP is a DECL whose address is function-invariant. */
3186 decl_address_invariant_p (const_tree op)
3188 /* The conditions below are slightly less strict than the one in
3191 switch (TREE_CODE (op))
3200 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3201 || DECL_THREAD_LOCAL_P (op)
3202 || DECL_CONTEXT (op) == current_function_decl
3203 || decl_function_context (op) == current_function_decl)
3208 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3209 || decl_function_context (op) == current_function_decl)
3220 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3223 decl_address_ip_invariant_p (const_tree op)
3225 /* The conditions below are slightly less strict than the one in
3228 switch (TREE_CODE (op))
3236 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3237 && !DECL_DLLIMPORT_P (op))
3238 || DECL_THREAD_LOCAL_P (op))
3243 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3255 /* Return true if T is function-invariant (internal function, does
3256 not handle arithmetic; that's handled in skip_simple_arithmetic and
3257 tree_invariant_p). */
3260 tree_invariant_p_1 (tree t)
3264 if (TREE_CONSTANT (t)
3265 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3268 switch (TREE_CODE (t))
3274 op = TREE_OPERAND (t, 0);
3275 while (handled_component_p (op))
3277 switch (TREE_CODE (op))
3280 case ARRAY_RANGE_REF:
3281 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3282 || TREE_OPERAND (op, 2) != NULL_TREE
3283 || TREE_OPERAND (op, 3) != NULL_TREE)
3288 if (TREE_OPERAND (op, 2) != NULL_TREE)
3294 op = TREE_OPERAND (op, 0);
3297 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3306 /* Return true if T is function-invariant. */
3309 tree_invariant_p (tree t)
3311 tree inner = skip_simple_arithmetic (t);
3312 return tree_invariant_p_1 (inner);
3315 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3316 Do this to any expression which may be used in more than one place,
3317 but must be evaluated only once.
3319 Normally, expand_expr would reevaluate the expression each time.
3320 Calling save_expr produces something that is evaluated and recorded
3321 the first time expand_expr is called on it. Subsequent calls to
3322 expand_expr just reuse the recorded value.
3324 The call to expand_expr that generates code that actually computes
3325 the value is the first call *at compile time*. Subsequent calls
3326 *at compile time* generate code to use the saved value.
3327 This produces correct result provided that *at run time* control
3328 always flows through the insns made by the first expand_expr
3329 before reaching the other places where the save_expr was evaluated.
3330 You, the caller of save_expr, must make sure this is so.
3332 Constants, and certain read-only nodes, are returned with no
3333 SAVE_EXPR because that is safe. Expressions containing placeholders
3334 are not touched; see tree.def for an explanation of what these
3338 save_expr (tree expr)
3342 /* If the tree evaluates to a constant, then we don't want to hide that
3343 fact (i.e. this allows further folding, and direct checks for constants).
3344 However, a read-only object that has side effects cannot be bypassed.
3345 Since it is no problem to reevaluate literals, we just return the
3347 inner = skip_simple_arithmetic (expr);
3348 if (TREE_CODE (inner) == ERROR_MARK)
3351 if (tree_invariant_p_1 (inner))
3354 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3355 it means that the size or offset of some field of an object depends on
3356 the value within another field.
3358 Note that it must not be the case that EXPR contains both a PLACEHOLDER_EXPR
3359 and some variable since it would then need to be both evaluated once and
3360 evaluated more than once. Front-ends must assure this case cannot
3361 happen by surrounding any such subexpressions in their own SAVE_EXPR
3362 and forcing evaluation at the proper time. */
3363 if (contains_placeholder_p (inner))
3366 expr = build1_loc (EXPR_LOCATION (expr), SAVE_EXPR, TREE_TYPE (expr), expr);
3368 /* This expression might be placed ahead of a jump to ensure that the
3369 value was computed on both sides of the jump. So make sure it isn't
3370 eliminated as dead. */
3371 TREE_SIDE_EFFECTS (expr) = 1;
3375 /* Look inside EXPR into any simple arithmetic operations. Return the
3376 outermost non-arithmetic or non-invariant node. */
3379 skip_simple_arithmetic (tree expr)
3381 /* We don't care about whether this can be used as an lvalue in this
3383 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3384 expr = TREE_OPERAND (expr, 0);
3386 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3387 a constant, it will be more efficient to not make another SAVE_EXPR since
3388 it will allow better simplification and GCSE will be able to merge the
3389 computations if they actually occur. */
3392 if (UNARY_CLASS_P (expr))
3393 expr = TREE_OPERAND (expr, 0);
3394 else if (BINARY_CLASS_P (expr))
3396 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3397 expr = TREE_OPERAND (expr, 0);
3398 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3399 expr = TREE_OPERAND (expr, 1);
3410 /* Look inside EXPR into simple arithmetic operations involving constants.
3411 Return the outermost non-arithmetic or non-constant node. */
3414 skip_simple_constant_arithmetic (tree expr)
3416 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3417 expr = TREE_OPERAND (expr, 0);
3421 if (UNARY_CLASS_P (expr))
3422 expr = TREE_OPERAND (expr, 0);
3423 else if (BINARY_CLASS_P (expr))
3425 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3426 expr = TREE_OPERAND (expr, 0);
3427 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3428 expr = TREE_OPERAND (expr, 1);
3439 /* Return which tree structure is used by T. */
3441 enum tree_node_structure_enum
3442 tree_node_structure (const_tree t)
3444 const enum tree_code code = TREE_CODE (t);
3445 return tree_node_structure_for_code (code);
3448 /* Set various status flags when building a CALL_EXPR object T. */
3451 process_call_operands (tree t)
3453 bool side_effects = TREE_SIDE_EFFECTS (t);
3454 bool read_only = false;
3455 int i = call_expr_flags (t);
3457 /* Calls have side-effects, except those to const or pure functions. */
3458 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3459 side_effects = true;
3460 /* Propagate TREE_READONLY of arguments for const functions. */
3464 if (!side_effects || read_only)
3465 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3467 tree op = TREE_OPERAND (t, i);
3468 if (op && TREE_SIDE_EFFECTS (op))
3469 side_effects = true;
3470 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3474 TREE_SIDE_EFFECTS (t) = side_effects;
3475 TREE_READONLY (t) = read_only;
3478 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3479 size or offset that depends on a field within a record. */
3482 contains_placeholder_p (const_tree exp)
3484 enum tree_code code;
3489 code = TREE_CODE (exp);
3490 if (code == PLACEHOLDER_EXPR)
3493 switch (TREE_CODE_CLASS (code))
3496 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3497 position computations since they will be converted into a
3498 WITH_RECORD_EXPR involving the reference, which will assume
3499 here will be valid. */
3500 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3502 case tcc_exceptional:
3503 if (code == TREE_LIST)
3504 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3505 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3510 case tcc_comparison:
3511 case tcc_expression:
3515 /* Ignoring the first operand isn't quite right, but works best. */
3516 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3519 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3520 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3521 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3524 /* The save_expr function never wraps anything containing
3525 a PLACEHOLDER_EXPR. */
3532 switch (TREE_CODE_LENGTH (code))
3535 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3537 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3538 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3549 const_call_expr_arg_iterator iter;
3550 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3551 if (CONTAINS_PLACEHOLDER_P (arg))
3565 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3566 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3570 type_contains_placeholder_1 (const_tree type)
3572 /* If the size contains a placeholder or the parent type (component type in
3573 the case of arrays) type involves a placeholder, this type does. */
3574 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3575 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3576 || (!POINTER_TYPE_P (type)
3578 && type_contains_placeholder_p (TREE_TYPE (type))))
3581 /* Now do type-specific checks. Note that the last part of the check above
3582 greatly limits what we have to do below. */
3583 switch (TREE_CODE (type))
3586 case POINTER_BOUNDS_TYPE:
3592 case REFERENCE_TYPE:
3601 case FIXED_POINT_TYPE:
3602 /* Here we just check the bounds. */
3603 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3604 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3607 /* We have already checked the component type above, so just check
3608 the domain type. Flexible array members have a null domain. */
3609 return TYPE_DOMAIN (type) ?
3610 type_contains_placeholder_p (TYPE_DOMAIN (type)) : false;
3614 case QUAL_UNION_TYPE:
3618 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3619 if (TREE_CODE (field) == FIELD_DECL
3620 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3621 || (TREE_CODE (type) == QUAL_UNION_TYPE
3622 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3623 || type_contains_placeholder_p (TREE_TYPE (field))))
3634 /* Wrapper around above function used to cache its result. */
3637 type_contains_placeholder_p (tree type)
3641 /* If the contains_placeholder_bits field has been initialized,
3642 then we know the answer. */
3643 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3644 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3646 /* Indicate that we've seen this type node, and the answer is false.
3647 This is what we want to return if we run into recursion via fields. */
3648 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3650 /* Compute the real value. */
3651 result = type_contains_placeholder_1 (type);
3653 /* Store the real value. */
3654 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3659 /* Push tree EXP onto vector QUEUE if it is not already present. */
3662 push_without_duplicates (tree exp, vec<tree> *queue)
3667 FOR_EACH_VEC_ELT (*queue, i, iter)
3668 if (simple_cst_equal (iter, exp) == 1)
3672 queue->safe_push (exp);
3675 /* Given a tree EXP, find all occurrences of references to fields
3676 in a PLACEHOLDER_EXPR and place them in vector REFS without
3677 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3678 we assume here that EXP contains only arithmetic expressions
3679 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3683 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3685 enum tree_code code = TREE_CODE (exp);
3689 /* We handle TREE_LIST and COMPONENT_REF separately. */
3690 if (code == TREE_LIST)
3692 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
3693 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
3695 else if (code == COMPONENT_REF)
3697 for (inner = TREE_OPERAND (exp, 0);
3698 REFERENCE_CLASS_P (inner);
3699 inner = TREE_OPERAND (inner, 0))
3702 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
3703 push_without_duplicates (exp, refs);
3705 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
3708 switch (TREE_CODE_CLASS (code))
3713 case tcc_declaration:
3714 /* Variables allocated to static storage can stay. */
3715 if (!TREE_STATIC (exp))
3716 push_without_duplicates (exp, refs);
3719 case tcc_expression:
3720 /* This is the pattern built in ada/make_aligning_type. */
3721 if (code == ADDR_EXPR
3722 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
3724 push_without_duplicates (exp, refs);
3730 case tcc_exceptional:
3733 case tcc_comparison:
3735 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
3736 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3740 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3741 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3749 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
3750 return a tree with all occurrences of references to F in a
3751 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
3752 CONST_DECLs. Note that we assume here that EXP contains only
3753 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
3754 occurring only in their argument list. */
3757 substitute_in_expr (tree exp, tree f, tree r)
3759 enum tree_code code = TREE_CODE (exp);
3760 tree op0, op1, op2, op3;
3763 /* We handle TREE_LIST and COMPONENT_REF separately. */
3764 if (code == TREE_LIST)
3766 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
3767 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
3768 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3771 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3773 else if (code == COMPONENT_REF)
3777 /* If this expression is getting a value from a PLACEHOLDER_EXPR
3778 and it is the right field, replace it with R. */
3779 for (inner = TREE_OPERAND (exp, 0);
3780 REFERENCE_CLASS_P (inner);
3781 inner = TREE_OPERAND (inner, 0))
3785 op1 = TREE_OPERAND (exp, 1);
3787 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
3790 /* If this expression hasn't been completed let, leave it alone. */
3791 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
3794 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3795 if (op0 == TREE_OPERAND (exp, 0))
3799 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
3802 switch (TREE_CODE_CLASS (code))
3807 case tcc_declaration:
3813 case tcc_expression:
3819 case tcc_exceptional:
3822 case tcc_comparison:
3824 switch (TREE_CODE_LENGTH (code))
3830 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3831 if (op0 == TREE_OPERAND (exp, 0))
3834 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3838 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3839 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3841 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3844 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3848 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3849 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3850 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3852 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3853 && op2 == TREE_OPERAND (exp, 2))
3856 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3860 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3861 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3862 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3863 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
3865 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3866 && op2 == TREE_OPERAND (exp, 2)
3867 && op3 == TREE_OPERAND (exp, 3))
3871 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3883 new_tree = NULL_TREE;
3885 /* If we are trying to replace F with a constant or with another
3886 instance of one of the arguments of the call, inline back
3887 functions which do nothing else than computing a value from
3888 the arguments they are passed. This makes it possible to
3889 fold partially or entirely the replacement expression. */
3890 if (code == CALL_EXPR)
3892 bool maybe_inline = false;
3893 if (CONSTANT_CLASS_P (r))
3894 maybe_inline = true;
3896 for (i = 3; i < TREE_OPERAND_LENGTH (exp); i++)
3897 if (operand_equal_p (TREE_OPERAND (exp, i), r, 0))
3899 maybe_inline = true;
3904 tree t = maybe_inline_call_in_expr (exp);
3906 return SUBSTITUTE_IN_EXPR (t, f, r);
3910 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3912 tree op = TREE_OPERAND (exp, i);
3913 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
3917 new_tree = copy_node (exp);
3918 TREE_OPERAND (new_tree, i) = new_op;
3924 new_tree = fold (new_tree);
3925 if (TREE_CODE (new_tree) == CALL_EXPR)
3926 process_call_operands (new_tree);
3937 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3939 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3940 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3945 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
3946 for it within OBJ, a tree that is an object or a chain of references. */
3949 substitute_placeholder_in_expr (tree exp, tree obj)
3951 enum tree_code code = TREE_CODE (exp);
3952 tree op0, op1, op2, op3;
3955 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
3956 in the chain of OBJ. */
3957 if (code == PLACEHOLDER_EXPR)
3959 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
3962 for (elt = obj; elt != 0;
3963 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3964 || TREE_CODE (elt) == COND_EXPR)
3965 ? TREE_OPERAND (elt, 1)
3966 : (REFERENCE_CLASS_P (elt)
3967 || UNARY_CLASS_P (elt)
3968 || BINARY_CLASS_P (elt)
3969 || VL_EXP_CLASS_P (elt)
3970 || EXPRESSION_CLASS_P (elt))
3971 ? TREE_OPERAND (elt, 0) : 0))
3972 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
3975 for (elt = obj; elt != 0;
3976 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3977 || TREE_CODE (elt) == COND_EXPR)
3978 ? TREE_OPERAND (elt, 1)
3979 : (REFERENCE_CLASS_P (elt)
3980 || UNARY_CLASS_P (elt)
3981 || BINARY_CLASS_P (elt)
3982 || VL_EXP_CLASS_P (elt)
3983 || EXPRESSION_CLASS_P (elt))
3984 ? TREE_OPERAND (elt, 0) : 0))
3985 if (POINTER_TYPE_P (TREE_TYPE (elt))
3986 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
3988 return fold_build1 (INDIRECT_REF, need_type, elt);
3990 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
3991 survives until RTL generation, there will be an error. */
3995 /* TREE_LIST is special because we need to look at TREE_VALUE
3996 and TREE_CHAIN, not TREE_OPERANDS. */
3997 else if (code == TREE_LIST)
3999 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
4000 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
4001 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
4004 return tree_cons (TREE_PURPOSE (exp), op1, op0);
4007 switch (TREE_CODE_CLASS (code))
4010 case tcc_declaration:
4013 case tcc_exceptional:
4016 case tcc_comparison:
4017 case tcc_expression:
4020 switch (TREE_CODE_LENGTH (code))
4026 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4027 if (op0 == TREE_OPERAND (exp, 0))
4030 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4034 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4035 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4037 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4040 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4044 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4045 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4046 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4048 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4049 && op2 == TREE_OPERAND (exp, 2))
4052 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4056 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4057 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4058 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4059 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
4061 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4062 && op2 == TREE_OPERAND (exp, 2)
4063 && op3 == TREE_OPERAND (exp, 3))
4067 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4079 new_tree = NULL_TREE;
4081 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4083 tree op = TREE_OPERAND (exp, i);
4084 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
4088 new_tree = copy_node (exp);
4089 TREE_OPERAND (new_tree, i) = new_op;
4095 new_tree = fold (new_tree);
4096 if (TREE_CODE (new_tree) == CALL_EXPR)
4097 process_call_operands (new_tree);
4108 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4110 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4111 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4117 /* Subroutine of stabilize_reference; this is called for subtrees of
4118 references. Any expression with side-effects must be put in a SAVE_EXPR
4119 to ensure that it is only evaluated once.
4121 We don't put SAVE_EXPR nodes around everything, because assigning very
4122 simple expressions to temporaries causes us to miss good opportunities
4123 for optimizations. Among other things, the opportunity to fold in the
4124 addition of a constant into an addressing mode often gets lost, e.g.
4125 "y[i+1] += x;". In general, we take the approach that we should not make
4126 an assignment unless we are forced into it - i.e., that any non-side effect
4127 operator should be allowed, and that cse should take care of coalescing
4128 multiple utterances of the same expression should that prove fruitful. */
4131 stabilize_reference_1 (tree e)
4134 enum tree_code code = TREE_CODE (e);
4136 /* We cannot ignore const expressions because it might be a reference
4137 to a const array but whose index contains side-effects. But we can
4138 ignore things that are actual constant or that already have been
4139 handled by this function. */
4141 if (tree_invariant_p (e))
4144 switch (TREE_CODE_CLASS (code))
4146 case tcc_exceptional:
4148 case tcc_declaration:
4149 case tcc_comparison:
4151 case tcc_expression:
4154 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4155 so that it will only be evaluated once. */
4156 /* The reference (r) and comparison (<) classes could be handled as
4157 below, but it is generally faster to only evaluate them once. */
4158 if (TREE_SIDE_EFFECTS (e))
4159 return save_expr (e);
4163 /* Constants need no processing. In fact, we should never reach
4168 /* Division is slow and tends to be compiled with jumps,
4169 especially the division by powers of 2 that is often
4170 found inside of an array reference. So do it just once. */
4171 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4172 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4173 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4174 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4175 return save_expr (e);
4176 /* Recursively stabilize each operand. */
4177 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4178 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4182 /* Recursively stabilize each operand. */
4183 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4190 TREE_TYPE (result) = TREE_TYPE (e);
4191 TREE_READONLY (result) = TREE_READONLY (e);
4192 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4193 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4198 /* Stabilize a reference so that we can use it any number of times
4199 without causing its operands to be evaluated more than once.
4200 Returns the stabilized reference. This works by means of save_expr,
4201 so see the caveats in the comments about save_expr.
4203 Also allows conversion expressions whose operands are references.
4204 Any other kind of expression is returned unchanged. */
4207 stabilize_reference (tree ref)
4210 enum tree_code code = TREE_CODE (ref);
4217 /* No action is needed in this case. */
4222 case FIX_TRUNC_EXPR:
4223 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4227 result = build_nt (INDIRECT_REF,
4228 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4232 result = build_nt (COMPONENT_REF,
4233 stabilize_reference (TREE_OPERAND (ref, 0)),
4234 TREE_OPERAND (ref, 1), NULL_TREE);
4238 result = build_nt (BIT_FIELD_REF,
4239 stabilize_reference (TREE_OPERAND (ref, 0)),
4240 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4241 REF_REVERSE_STORAGE_ORDER (result) = REF_REVERSE_STORAGE_ORDER (ref);
4245 result = build_nt (ARRAY_REF,
4246 stabilize_reference (TREE_OPERAND (ref, 0)),
4247 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4248 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4251 case ARRAY_RANGE_REF:
4252 result = build_nt (ARRAY_RANGE_REF,
4253 stabilize_reference (TREE_OPERAND (ref, 0)),
4254 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4255 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4259 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4260 it wouldn't be ignored. This matters when dealing with
4262 return stabilize_reference_1 (ref);
4264 /* If arg isn't a kind of lvalue we recognize, make no change.
4265 Caller should recognize the error for an invalid lvalue. */
4270 return error_mark_node;
4273 TREE_TYPE (result) = TREE_TYPE (ref);
4274 TREE_READONLY (result) = TREE_READONLY (ref);
4275 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4276 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4281 /* Low-level constructors for expressions. */
4283 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4284 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4287 recompute_tree_invariant_for_addr_expr (tree t)
4290 bool tc = true, se = false;
4292 gcc_assert (TREE_CODE (t) == ADDR_EXPR);
4294 /* We started out assuming this address is both invariant and constant, but
4295 does not have side effects. Now go down any handled components and see if
4296 any of them involve offsets that are either non-constant or non-invariant.
4297 Also check for side-effects.
4299 ??? Note that this code makes no attempt to deal with the case where
4300 taking the address of something causes a copy due to misalignment. */
4302 #define UPDATE_FLAGS(NODE) \
4303 do { tree _node = (NODE); \
4304 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4305 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4307 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4308 node = TREE_OPERAND (node, 0))
4310 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4311 array reference (probably made temporarily by the G++ front end),
4312 so ignore all the operands. */
4313 if ((TREE_CODE (node) == ARRAY_REF
4314 || TREE_CODE (node) == ARRAY_RANGE_REF)
4315 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4317 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4318 if (TREE_OPERAND (node, 2))
4319 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4320 if (TREE_OPERAND (node, 3))
4321 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4323 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4324 FIELD_DECL, apparently. The G++ front end can put something else
4325 there, at least temporarily. */
4326 else if (TREE_CODE (node) == COMPONENT_REF
4327 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4329 if (TREE_OPERAND (node, 2))
4330 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4334 node = lang_hooks.expr_to_decl (node, &tc, &se);
4336 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4337 the address, since &(*a)->b is a form of addition. If it's a constant, the
4338 address is constant too. If it's a decl, its address is constant if the
4339 decl is static. Everything else is not constant and, furthermore,
4340 taking the address of a volatile variable is not volatile. */
4341 if (TREE_CODE (node) == INDIRECT_REF
4342 || TREE_CODE (node) == MEM_REF)
4343 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4344 else if (CONSTANT_CLASS_P (node))
4346 else if (DECL_P (node))
4347 tc &= (staticp (node) != NULL_TREE);
4351 se |= TREE_SIDE_EFFECTS (node);
4355 TREE_CONSTANT (t) = tc;
4356 TREE_SIDE_EFFECTS (t) = se;
4360 /* Build an expression of code CODE, data type TYPE, and operands as
4361 specified. Expressions and reference nodes can be created this way.
4362 Constants, decls, types and misc nodes cannot be.
4364 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4365 enough for all extant tree codes. */
4368 build0_stat (enum tree_code code, tree tt MEM_STAT_DECL)
4372 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4374 t = make_node_stat (code PASS_MEM_STAT);
4381 build1_stat (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4383 int length = sizeof (struct tree_exp);
4386 record_node_allocation_statistics (code, length);
4388 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4390 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4392 memset (t, 0, sizeof (struct tree_common));
4394 TREE_SET_CODE (t, code);
4396 TREE_TYPE (t) = type;
4397 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4398 TREE_OPERAND (t, 0) = node;
4399 if (node && !TYPE_P (node))
4401 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4402 TREE_READONLY (t) = TREE_READONLY (node);
4405 if (TREE_CODE_CLASS (code) == tcc_statement)
4406 TREE_SIDE_EFFECTS (t) = 1;
4410 /* All of these have side-effects, no matter what their
4412 TREE_SIDE_EFFECTS (t) = 1;
4413 TREE_READONLY (t) = 0;
4417 /* Whether a dereference is readonly has nothing to do with whether
4418 its operand is readonly. */
4419 TREE_READONLY (t) = 0;
4424 recompute_tree_invariant_for_addr_expr (t);
4428 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4429 && node && !TYPE_P (node)
4430 && TREE_CONSTANT (node))
4431 TREE_CONSTANT (t) = 1;
4432 if (TREE_CODE_CLASS (code) == tcc_reference
4433 && node && TREE_THIS_VOLATILE (node))
4434 TREE_THIS_VOLATILE (t) = 1;
4441 #define PROCESS_ARG(N) \
4443 TREE_OPERAND (t, N) = arg##N; \
4444 if (arg##N &&!TYPE_P (arg##N)) \
4446 if (TREE_SIDE_EFFECTS (arg##N)) \
4448 if (!TREE_READONLY (arg##N) \
4449 && !CONSTANT_CLASS_P (arg##N)) \
4450 (void) (read_only = 0); \
4451 if (!TREE_CONSTANT (arg##N)) \
4452 (void) (constant = 0); \
4457 build2_stat (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4459 bool constant, read_only, side_effects;
4462 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4464 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4465 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4466 /* When sizetype precision doesn't match that of pointers
4467 we need to be able to build explicit extensions or truncations
4468 of the offset argument. */
4469 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4470 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4471 && TREE_CODE (arg1) == INTEGER_CST);
4473 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4474 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4475 && ptrofftype_p (TREE_TYPE (arg1)));
4477 t = make_node_stat (code PASS_MEM_STAT);
4480 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4481 result based on those same flags for the arguments. But if the
4482 arguments aren't really even `tree' expressions, we shouldn't be trying
4485 /* Expressions without side effects may be constant if their
4486 arguments are as well. */
4487 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4488 || TREE_CODE_CLASS (code) == tcc_binary);
4490 side_effects = TREE_SIDE_EFFECTS (t);
4495 TREE_SIDE_EFFECTS (t) = side_effects;
4496 if (code == MEM_REF)
4498 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4500 tree o = TREE_OPERAND (arg0, 0);
4501 TREE_READONLY (t) = TREE_READONLY (o);
4502 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4507 TREE_READONLY (t) = read_only;
4508 TREE_CONSTANT (t) = constant;
4509 TREE_THIS_VOLATILE (t)
4510 = (TREE_CODE_CLASS (code) == tcc_reference
4511 && arg0 && TREE_THIS_VOLATILE (arg0));
4519 build3_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4520 tree arg2 MEM_STAT_DECL)
4522 bool constant, read_only, side_effects;
4525 gcc_assert (TREE_CODE_LENGTH (code) == 3);
4526 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4528 t = make_node_stat (code PASS_MEM_STAT);
4533 /* As a special exception, if COND_EXPR has NULL branches, we
4534 assume that it is a gimple statement and always consider
4535 it to have side effects. */
4536 if (code == COND_EXPR
4537 && tt == void_type_node
4538 && arg1 == NULL_TREE
4539 && arg2 == NULL_TREE)
4540 side_effects = true;
4542 side_effects = TREE_SIDE_EFFECTS (t);
4548 if (code == COND_EXPR)
4549 TREE_READONLY (t) = read_only;
4551 TREE_SIDE_EFFECTS (t) = side_effects;
4552 TREE_THIS_VOLATILE (t)
4553 = (TREE_CODE_CLASS (code) == tcc_reference
4554 && arg0 && TREE_THIS_VOLATILE (arg0));
4560 build4_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4561 tree arg2, tree arg3 MEM_STAT_DECL)
4563 bool constant, read_only, side_effects;
4566 gcc_assert (TREE_CODE_LENGTH (code) == 4);
4568 t = make_node_stat (code PASS_MEM_STAT);
4571 side_effects = TREE_SIDE_EFFECTS (t);
4578 TREE_SIDE_EFFECTS (t) = side_effects;
4579 TREE_THIS_VOLATILE (t)
4580 = (TREE_CODE_CLASS (code) == tcc_reference
4581 && arg0 && TREE_THIS_VOLATILE (arg0));
4587 build5_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4588 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4590 bool constant, read_only, side_effects;
4593 gcc_assert (TREE_CODE_LENGTH (code) == 5);
4595 t = make_node_stat (code PASS_MEM_STAT);
4598 side_effects = TREE_SIDE_EFFECTS (t);
4606 TREE_SIDE_EFFECTS (t) = side_effects;
4607 if (code == TARGET_MEM_REF)
4609 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4611 tree o = TREE_OPERAND (arg0, 0);
4612 TREE_READONLY (t) = TREE_READONLY (o);
4613 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4617 TREE_THIS_VOLATILE (t)
4618 = (TREE_CODE_CLASS (code) == tcc_reference
4619 && arg0 && TREE_THIS_VOLATILE (arg0));
4624 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4625 on the pointer PTR. */
4628 build_simple_mem_ref_loc (location_t loc, tree ptr)
4630 HOST_WIDE_INT offset = 0;
4631 tree ptype = TREE_TYPE (ptr);
4633 /* For convenience allow addresses that collapse to a simple base
4635 if (TREE_CODE (ptr) == ADDR_EXPR
4636 && (handled_component_p (TREE_OPERAND (ptr, 0))
4637 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4639 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4641 ptr = build_fold_addr_expr (ptr);
4642 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4644 tem = build2 (MEM_REF, TREE_TYPE (ptype),
4645 ptr, build_int_cst (ptype, offset));
4646 SET_EXPR_LOCATION (tem, loc);
4650 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4653 mem_ref_offset (const_tree t)
4655 return offset_int::from (TREE_OPERAND (t, 1), SIGNED);
4658 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4659 offsetted by OFFSET units. */
4662 build_invariant_address (tree type, tree base, HOST_WIDE_INT offset)
4664 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
4665 build_fold_addr_expr (base),
4666 build_int_cst (ptr_type_node, offset));
4667 tree addr = build1 (ADDR_EXPR, type, ref);
4668 recompute_tree_invariant_for_addr_expr (addr);
4672 /* Similar except don't specify the TREE_TYPE
4673 and leave the TREE_SIDE_EFFECTS as 0.
4674 It is permissible for arguments to be null,
4675 or even garbage if their values do not matter. */
4678 build_nt (enum tree_code code, ...)
4685 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4689 t = make_node (code);
4690 length = TREE_CODE_LENGTH (code);
4692 for (i = 0; i < length; i++)
4693 TREE_OPERAND (t, i) = va_arg (p, tree);
4699 /* Similar to build_nt, but for creating a CALL_EXPR object with a
4703 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
4708 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
4709 CALL_EXPR_FN (ret) = fn;
4710 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
4711 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
4712 CALL_EXPR_ARG (ret, ix) = t;
4716 /* Create a DECL_... node of code CODE, name NAME and data type TYPE.
4717 We do NOT enter this node in any sort of symbol table.
4719 LOC is the location of the decl.
4721 layout_decl is used to set up the decl's storage layout.
4722 Other slots are initialized to 0 or null pointers. */
4725 build_decl_stat (location_t loc, enum tree_code code, tree name,
4726 tree type MEM_STAT_DECL)
4730 t = make_node_stat (code PASS_MEM_STAT);
4731 DECL_SOURCE_LOCATION (t) = loc;
4733 /* if (type == error_mark_node)
4734 type = integer_type_node; */
4735 /* That is not done, deliberately, so that having error_mark_node
4736 as the type can suppress useless errors in the use of this variable. */
4738 DECL_NAME (t) = name;
4739 TREE_TYPE (t) = type;
4741 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
4747 /* Builds and returns function declaration with NAME and TYPE. */
4750 build_fn_decl (const char *name, tree type)
4752 tree id = get_identifier (name);
4753 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
4755 DECL_EXTERNAL (decl) = 1;
4756 TREE_PUBLIC (decl) = 1;
4757 DECL_ARTIFICIAL (decl) = 1;
4758 TREE_NOTHROW (decl) = 1;
4763 vec<tree, va_gc> *all_translation_units;
4765 /* Builds a new translation-unit decl with name NAME, queues it in the
4766 global list of translation-unit decls and returns it. */
4769 build_translation_unit_decl (tree name)
4771 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
4773 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
4774 vec_safe_push (all_translation_units, tu);
4779 /* BLOCK nodes are used to represent the structure of binding contours
4780 and declarations, once those contours have been exited and their contents
4781 compiled. This information is used for outputting debugging info. */
4784 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
4786 tree block = make_node (BLOCK);
4788 BLOCK_VARS (block) = vars;
4789 BLOCK_SUBBLOCKS (block) = subblocks;
4790 BLOCK_SUPERCONTEXT (block) = supercontext;
4791 BLOCK_CHAIN (block) = chain;
4796 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
4798 LOC is the location to use in tree T. */
4801 protected_set_expr_location (tree t, location_t loc)
4803 if (CAN_HAVE_LOCATION_P (t))
4804 SET_EXPR_LOCATION (t, loc);
4807 /* Return a declaration like DDECL except that its DECL_ATTRIBUTES
4811 build_decl_attribute_variant (tree ddecl, tree attribute)
4813 DECL_ATTRIBUTES (ddecl) = attribute;
4817 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4818 is ATTRIBUTE and its qualifiers are QUALS.
4820 Record such modified types already made so we don't make duplicates. */
4823 build_type_attribute_qual_variant (tree ttype, tree attribute, int quals)
4825 if (! attribute_list_equal (TYPE_ATTRIBUTES (ttype), attribute))
4829 /* Building a distinct copy of a tagged type is inappropriate; it
4830 causes breakage in code that expects there to be a one-to-one
4831 relationship between a struct and its fields.
4832 build_duplicate_type is another solution (as used in
4833 handle_transparent_union_attribute), but that doesn't play well
4834 with the stronger C++ type identity model. */
4835 if (TREE_CODE (ttype) == RECORD_TYPE
4836 || TREE_CODE (ttype) == UNION_TYPE
4837 || TREE_CODE (ttype) == QUAL_UNION_TYPE
4838 || TREE_CODE (ttype) == ENUMERAL_TYPE)
4840 warning (OPT_Wattributes,
4841 "ignoring attributes applied to %qT after definition",
4842 TYPE_MAIN_VARIANT (ttype));
4843 return build_qualified_type (ttype, quals);
4846 ttype = build_qualified_type (ttype, TYPE_UNQUALIFIED);
4847 ntype = build_distinct_type_copy (ttype);
4849 TYPE_ATTRIBUTES (ntype) = attribute;
4851 hashval_t hash = type_hash_canon_hash (ntype);
4852 ntype = type_hash_canon (hash, ntype);
4854 /* If the target-dependent attributes make NTYPE different from
4855 its canonical type, we will need to use structural equality
4856 checks for this type. */
4857 if (TYPE_STRUCTURAL_EQUALITY_P (ttype)
4858 || !comp_type_attributes (ntype, ttype))
4859 SET_TYPE_STRUCTURAL_EQUALITY (ntype);
4860 else if (TYPE_CANONICAL (ntype) == ntype)
4861 TYPE_CANONICAL (ntype) = TYPE_CANONICAL (ttype);
4863 ttype = build_qualified_type (ntype, quals);
4865 else if (TYPE_QUALS (ttype) != quals)
4866 ttype = build_qualified_type (ttype, quals);
4871 /* Check if "omp declare simd" attribute arguments, CLAUSES1 and CLAUSES2, are
4875 omp_declare_simd_clauses_equal (tree clauses1, tree clauses2)
4878 for (cl1 = clauses1, cl2 = clauses2;
4880 cl1 = OMP_CLAUSE_CHAIN (cl1), cl2 = OMP_CLAUSE_CHAIN (cl2))
4882 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_CODE (cl2))
4884 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_SIMDLEN)
4886 if (simple_cst_equal (OMP_CLAUSE_DECL (cl1),
4887 OMP_CLAUSE_DECL (cl2)) != 1)
4890 switch (OMP_CLAUSE_CODE (cl1))
4892 case OMP_CLAUSE_ALIGNED:
4893 if (simple_cst_equal (OMP_CLAUSE_ALIGNED_ALIGNMENT (cl1),
4894 OMP_CLAUSE_ALIGNED_ALIGNMENT (cl2)) != 1)
4897 case OMP_CLAUSE_LINEAR:
4898 if (simple_cst_equal (OMP_CLAUSE_LINEAR_STEP (cl1),
4899 OMP_CLAUSE_LINEAR_STEP (cl2)) != 1)
4902 case OMP_CLAUSE_SIMDLEN:
4903 if (simple_cst_equal (OMP_CLAUSE_SIMDLEN_EXPR (cl1),
4904 OMP_CLAUSE_SIMDLEN_EXPR (cl2)) != 1)
4913 /* Compare two constructor-element-type constants. Return 1 if the lists
4914 are known to be equal; otherwise return 0. */
4917 simple_cst_list_equal (const_tree l1, const_tree l2)
4919 while (l1 != NULL_TREE && l2 != NULL_TREE)
4921 if (simple_cst_equal (TREE_VALUE (l1), TREE_VALUE (l2)) != 1)
4924 l1 = TREE_CHAIN (l1);
4925 l2 = TREE_CHAIN (l2);
4931 /* Compare two identifier nodes representing attributes. Either one may
4932 be in wrapped __ATTR__ form. Return true if they are the same, false
4936 cmp_attrib_identifiers (const_tree attr1, const_tree attr2)
4938 /* Make sure we're dealing with IDENTIFIER_NODEs. */
4939 gcc_checking_assert (TREE_CODE (attr1) == IDENTIFIER_NODE
4940 && TREE_CODE (attr2) == IDENTIFIER_NODE);
4942 /* Identifiers can be compared directly for equality. */
4946 /* If they are not equal, they may still be one in the form
4947 'text' while the other one is in the form '__text__'. TODO:
4948 If we were storing attributes in normalized 'text' form, then
4949 this could all go away and we could take full advantage of
4950 the fact that we're comparing identifiers. :-) */
4951 const size_t attr1_len = IDENTIFIER_LENGTH (attr1);
4952 const size_t attr2_len = IDENTIFIER_LENGTH (attr2);
4954 if (attr2_len == attr1_len + 4)
4956 const char *p = IDENTIFIER_POINTER (attr2);
4957 const char *q = IDENTIFIER_POINTER (attr1);
4958 if (p[0] == '_' && p[1] == '_'
4959 && p[attr2_len - 2] == '_' && p[attr2_len - 1] == '_'
4960 && strncmp (q, p + 2, attr1_len) == 0)
4963 else if (attr2_len + 4 == attr1_len)
4965 const char *p = IDENTIFIER_POINTER (attr2);
4966 const char *q = IDENTIFIER_POINTER (attr1);
4967 if (q[0] == '_' && q[1] == '_'
4968 && q[attr1_len - 2] == '_' && q[attr1_len - 1] == '_'
4969 && strncmp (q + 2, p, attr2_len) == 0)
4976 /* Compare two attributes for their value identity. Return true if the
4977 attribute values are known to be equal; otherwise return false. */
4980 attribute_value_equal (const_tree attr1, const_tree attr2)
4982 if (TREE_VALUE (attr1) == TREE_VALUE (attr2))
4985 if (TREE_VALUE (attr1) != NULL_TREE
4986 && TREE_CODE (TREE_VALUE (attr1)) == TREE_LIST
4987 && TREE_VALUE (attr2) != NULL_TREE
4988 && TREE_CODE (TREE_VALUE (attr2)) == TREE_LIST)
4990 /* Handle attribute format. */
4991 if (is_attribute_p ("format", get_attribute_name (attr1)))
4993 attr1 = TREE_VALUE (attr1);
4994 attr2 = TREE_VALUE (attr2);
4995 /* Compare the archetypes (printf/scanf/strftime/...). */
4996 if (!cmp_attrib_identifiers (TREE_VALUE (attr1),
4997 TREE_VALUE (attr2)))
4999 /* Archetypes are the same. Compare the rest. */
5000 return (simple_cst_list_equal (TREE_CHAIN (attr1),
5001 TREE_CHAIN (attr2)) == 1);
5003 return (simple_cst_list_equal (TREE_VALUE (attr1),
5004 TREE_VALUE (attr2)) == 1);
5007 if ((flag_openmp || flag_openmp_simd)
5008 && TREE_VALUE (attr1) && TREE_VALUE (attr2)
5009 && TREE_CODE (TREE_VALUE (attr1)) == OMP_CLAUSE
5010 && TREE_CODE (TREE_VALUE (attr2)) == OMP_CLAUSE)
5011 return omp_declare_simd_clauses_equal (TREE_VALUE (attr1),
5012 TREE_VALUE (attr2));
5014 return (simple_cst_equal (TREE_VALUE (attr1), TREE_VALUE (attr2)) == 1);
5017 /* Return 0 if the attributes for two types are incompatible, 1 if they
5018 are compatible, and 2 if they are nearly compatible (which causes a
5019 warning to be generated). */
5021 comp_type_attributes (const_tree type1, const_tree type2)
5023 const_tree a1 = TYPE_ATTRIBUTES (type1);
5024 const_tree a2 = TYPE_ATTRIBUTES (type2);
5029 for (a = a1; a != NULL_TREE; a = TREE_CHAIN (a))
5031 const struct attribute_spec *as;
5034 as = lookup_attribute_spec (get_attribute_name (a));
5035 if (!as || as->affects_type_identity == false)
5038 attr = lookup_attribute (as->name, CONST_CAST_TREE (a2));
5039 if (!attr || !attribute_value_equal (a, attr))
5044 for (a = a2; a != NULL_TREE; a = TREE_CHAIN (a))
5046 const struct attribute_spec *as;
5048 as = lookup_attribute_spec (get_attribute_name (a));
5049 if (!as || as->affects_type_identity == false)
5052 if (!lookup_attribute (as->name, CONST_CAST_TREE (a1)))
5054 /* We don't need to compare trees again, as we did this
5055 already in first loop. */
5057 /* All types - affecting identity - are equal, so
5058 there is no need to call target hook for comparison. */
5062 if (lookup_attribute ("transaction_safe", CONST_CAST_TREE (a)))
5064 /* As some type combinations - like default calling-convention - might
5065 be compatible, we have to call the target hook to get the final result. */
5066 return targetm.comp_type_attributes (type1, type2);
5069 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
5072 Record such modified types already made so we don't make duplicates. */
5075 build_type_attribute_variant (tree ttype, tree attribute)
5077 return build_type_attribute_qual_variant (ttype, attribute,
5078 TYPE_QUALS (ttype));
5082 /* Reset the expression *EXPR_P, a size or position.
5084 ??? We could reset all non-constant sizes or positions. But it's cheap
5085 enough to not do so and refrain from adding workarounds to dwarf2out.c.
5087 We need to reset self-referential sizes or positions because they cannot
5088 be gimplified and thus can contain a CALL_EXPR after the gimplification
5089 is finished, which will run afoul of LTO streaming. And they need to be
5090 reset to something essentially dummy but not constant, so as to preserve
5091 the properties of the object they are attached to. */
5094 free_lang_data_in_one_sizepos (tree *expr_p)
5096 tree expr = *expr_p;
5097 if (CONTAINS_PLACEHOLDER_P (expr))
5098 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
5102 /* Reset all the fields in a binfo node BINFO. We only keep
5103 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
5106 free_lang_data_in_binfo (tree binfo)
5111 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
5113 BINFO_VIRTUALS (binfo) = NULL_TREE;
5114 BINFO_BASE_ACCESSES (binfo) = NULL;
5115 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
5116 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
5118 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
5119 free_lang_data_in_binfo (t);
5123 /* Reset all language specific information still present in TYPE. */
5126 free_lang_data_in_type (tree type)
5128 gcc_assert (TYPE_P (type));
5130 /* Give the FE a chance to remove its own data first. */
5131 lang_hooks.free_lang_data (type);
5133 TREE_LANG_FLAG_0 (type) = 0;
5134 TREE_LANG_FLAG_1 (type) = 0;
5135 TREE_LANG_FLAG_2 (type) = 0;
5136 TREE_LANG_FLAG_3 (type) = 0;
5137 TREE_LANG_FLAG_4 (type) = 0;
5138 TREE_LANG_FLAG_5 (type) = 0;
5139 TREE_LANG_FLAG_6 (type) = 0;
5141 if (TREE_CODE (type) == FUNCTION_TYPE)
5143 /* Remove the const and volatile qualifiers from arguments. The
5144 C++ front end removes them, but the C front end does not,
5145 leading to false ODR violation errors when merging two
5146 instances of the same function signature compiled by
5147 different front ends. */
5150 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5152 tree arg_type = TREE_VALUE (p);
5154 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
5156 int quals = TYPE_QUALS (arg_type)
5158 & ~TYPE_QUAL_VOLATILE;
5159 TREE_VALUE (p) = build_qualified_type (arg_type, quals);
5160 free_lang_data_in_type (TREE_VALUE (p));
5162 /* C++ FE uses TREE_PURPOSE to store initial values. */
5163 TREE_PURPOSE (p) = NULL;
5166 if (TREE_CODE (type) == METHOD_TYPE)
5170 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5172 /* C++ FE uses TREE_PURPOSE to store initial values. */
5173 TREE_PURPOSE (p) = NULL;
5177 /* Remove members that are not actually FIELD_DECLs from the field
5178 list of an aggregate. These occur in C++. */
5179 if (RECORD_OR_UNION_TYPE_P (type))
5183 /* Note that TYPE_FIELDS can be shared across distinct
5184 TREE_TYPEs. Therefore, if the first field of TYPE_FIELDS is
5185 to be removed, we cannot set its TREE_CHAIN to NULL.
5186 Otherwise, we would not be able to find all the other fields
5187 in the other instances of this TREE_TYPE.
5189 This was causing an ICE in testsuite/g++.dg/lto/20080915.C. */
5191 member = TYPE_FIELDS (type);
5194 if (TREE_CODE (member) == FIELD_DECL
5195 || (TREE_CODE (member) == TYPE_DECL
5196 && !DECL_IGNORED_P (member)
5197 && debug_info_level > DINFO_LEVEL_TERSE
5198 && !is_redundant_typedef (member)))
5201 TREE_CHAIN (prev) = member;
5203 TYPE_FIELDS (type) = member;
5207 member = TREE_CHAIN (member);
5211 TREE_CHAIN (prev) = NULL_TREE;
5213 TYPE_FIELDS (type) = NULL_TREE;
5215 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
5216 and danagle the pointer from time to time. */
5217 if (TYPE_VFIELD (type) && TREE_CODE (TYPE_VFIELD (type)) != FIELD_DECL)
5218 TYPE_VFIELD (type) = NULL_TREE;
5220 /* Remove TYPE_METHODS list. While it would be nice to keep it
5221 to enable ODR warnings about different method lists, doing so
5222 seems to impractically increase size of LTO data streamed.
5223 Keep the information if TYPE_METHODS was non-NULL. This is used
5224 by function.c and pretty printers. */
5225 if (TYPE_METHODS (type))
5226 TYPE_METHODS (type) = error_mark_node;
5227 if (TYPE_BINFO (type))
5229 free_lang_data_in_binfo (TYPE_BINFO (type));
5230 /* We need to preserve link to bases and virtual table for all
5231 polymorphic types to make devirtualization machinery working.
5232 Debug output cares only about bases, but output also
5233 virtual table pointers so merging of -fdevirtualize and
5234 -fno-devirtualize units is easier. */
5235 if ((!BINFO_VTABLE (TYPE_BINFO (type))
5236 || !flag_devirtualize)
5237 && ((!BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
5238 && !BINFO_VTABLE (TYPE_BINFO (type)))
5239 || debug_info_level != DINFO_LEVEL_NONE))
5240 TYPE_BINFO (type) = NULL;
5245 /* For non-aggregate types, clear out the language slot (which
5246 overloads TYPE_BINFO). */
5247 TYPE_LANG_SLOT_1 (type) = NULL_TREE;
5249 if (INTEGRAL_TYPE_P (type)
5250 || SCALAR_FLOAT_TYPE_P (type)
5251 || FIXED_POINT_TYPE_P (type))
5253 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
5254 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
5258 free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
5259 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
5261 if (TYPE_CONTEXT (type)
5262 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
5264 tree ctx = TYPE_CONTEXT (type);
5267 ctx = BLOCK_SUPERCONTEXT (ctx);
5269 while (ctx && TREE_CODE (ctx) == BLOCK);
5270 TYPE_CONTEXT (type) = ctx;
5275 /* Return true if DECL may need an assembler name to be set. */
5278 need_assembler_name_p (tree decl)
5280 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
5281 Rule merging. This makes type_odr_p to return true on those types during
5282 LTO and by comparing the mangled name, we can say what types are intended
5283 to be equivalent across compilation unit.
5285 We do not store names of type_in_anonymous_namespace_p.
5287 Record, union and enumeration type have linkage that allows use
5288 to check type_in_anonymous_namespace_p. We do not mangle compound types
5289 that always can be compared structurally.
5291 Similarly for builtin types, we compare properties of their main variant.
5292 A special case are integer types where mangling do make differences
5293 between char/signed char/unsigned char etc. Storing name for these makes
5294 e.g. -fno-signed-char/-fsigned-char mismatches to be handled well.
5295 See cp/mangle.c:write_builtin_type for details. */
5297 if (flag_lto_odr_type_mering
5298 && TREE_CODE (decl) == TYPE_DECL
5300 && decl == TYPE_NAME (TREE_TYPE (decl))
5301 && TYPE_MAIN_VARIANT (TREE_TYPE (decl)) == TREE_TYPE (decl)
5302 && !TYPE_ARTIFICIAL (TREE_TYPE (decl))
5303 && (type_with_linkage_p (TREE_TYPE (decl))
5304 || TREE_CODE (TREE_TYPE (decl)) == INTEGER_TYPE)
5305 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
5306 return !DECL_ASSEMBLER_NAME_SET_P (decl);
5307 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
5308 if (!VAR_OR_FUNCTION_DECL_P (decl))
5311 /* If DECL already has its assembler name set, it does not need a
5313 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
5314 || DECL_ASSEMBLER_NAME_SET_P (decl))
5317 /* Abstract decls do not need an assembler name. */
5318 if (DECL_ABSTRACT_P (decl))
5321 /* For VAR_DECLs, only static, public and external symbols need an
5324 && !TREE_STATIC (decl)
5325 && !TREE_PUBLIC (decl)
5326 && !DECL_EXTERNAL (decl))
5329 if (TREE_CODE (decl) == FUNCTION_DECL)
5331 /* Do not set assembler name on builtins. Allow RTL expansion to
5332 decide whether to expand inline or via a regular call. */
5333 if (DECL_BUILT_IN (decl)
5334 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
5337 /* Functions represented in the callgraph need an assembler name. */
5338 if (cgraph_node::get (decl) != NULL)
5341 /* Unused and not public functions don't need an assembler name. */
5342 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
5350 /* Reset all language specific information still present in symbol
5354 free_lang_data_in_decl (tree decl)
5356 gcc_assert (DECL_P (decl));
5358 /* Give the FE a chance to remove its own data first. */
5359 lang_hooks.free_lang_data (decl);
5361 TREE_LANG_FLAG_0 (decl) = 0;
5362 TREE_LANG_FLAG_1 (decl) = 0;
5363 TREE_LANG_FLAG_2 (decl) = 0;
5364 TREE_LANG_FLAG_3 (decl) = 0;
5365 TREE_LANG_FLAG_4 (decl) = 0;
5366 TREE_LANG_FLAG_5 (decl) = 0;
5367 TREE_LANG_FLAG_6 (decl) = 0;
5369 free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
5370 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
5371 if (TREE_CODE (decl) == FIELD_DECL)
5373 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
5374 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5375 DECL_QUALIFIER (decl) = NULL_TREE;
5378 if (TREE_CODE (decl) == FUNCTION_DECL)
5380 struct cgraph_node *node;
5381 if (!(node = cgraph_node::get (decl))
5382 || (!node->definition && !node->clones))
5385 node->release_body ();
5388 release_function_body (decl);
5389 DECL_ARGUMENTS (decl) = NULL;
5390 DECL_RESULT (decl) = NULL;
5391 DECL_INITIAL (decl) = error_mark_node;
5394 if (gimple_has_body_p (decl) || (node && node->thunk.thunk_p))
5398 /* If DECL has a gimple body, then the context for its
5399 arguments must be DECL. Otherwise, it doesn't really
5400 matter, as we will not be emitting any code for DECL. In
5401 general, there may be other instances of DECL created by
5402 the front end and since PARM_DECLs are generally shared,
5403 their DECL_CONTEXT changes as the replicas of DECL are
5404 created. The only time where DECL_CONTEXT is important
5405 is for the FUNCTION_DECLs that have a gimple body (since
5406 the PARM_DECL will be used in the function's body). */
5407 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5408 DECL_CONTEXT (t) = decl;
5409 if (!DECL_FUNCTION_SPECIFIC_TARGET (decl))
5410 DECL_FUNCTION_SPECIFIC_TARGET (decl)
5411 = target_option_default_node;
5412 if (!DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl))
5413 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
5414 = optimization_default_node;
5417 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5418 At this point, it is not needed anymore. */
5419 DECL_SAVED_TREE (decl) = NULL_TREE;
5421 /* Clear the abstract origin if it refers to a method. Otherwise
5422 dwarf2out.c will ICE as we clear TYPE_METHODS and thus the
5423 origin will not be output correctly. */
5424 if (DECL_ABSTRACT_ORIGIN (decl)
5425 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5426 && RECORD_OR_UNION_TYPE_P
5427 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5428 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5430 /* Sometimes the C++ frontend doesn't manage to transform a temporary
5431 DECL_VINDEX referring to itself into a vtable slot number as it
5432 should. Happens with functions that are copied and then forgotten
5433 about. Just clear it, it won't matter anymore. */
5434 if (DECL_VINDEX (decl) && !tree_fits_shwi_p (DECL_VINDEX (decl)))
5435 DECL_VINDEX (decl) = NULL_TREE;
5437 else if (VAR_P (decl))
5439 if ((DECL_EXTERNAL (decl)
5440 && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5441 || (decl_function_context (decl) && !TREE_STATIC (decl)))
5442 DECL_INITIAL (decl) = NULL_TREE;
5444 else if (TREE_CODE (decl) == TYPE_DECL)
5446 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5447 DECL_VISIBILITY_SPECIFIED (decl) = 0;
5448 DECL_INITIAL (decl) = NULL_TREE;
5450 else if (TREE_CODE (decl) == FIELD_DECL)
5451 DECL_INITIAL (decl) = NULL_TREE;
5452 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5453 && DECL_INITIAL (decl)
5454 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5456 /* Strip builtins from the translation-unit BLOCK. We still have targets
5457 without builtin_decl_explicit support and also builtins are shared
5458 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5459 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5463 if (TREE_CODE (var) == FUNCTION_DECL
5464 && DECL_BUILT_IN (var))
5465 *nextp = TREE_CHAIN (var);
5467 nextp = &TREE_CHAIN (var);
5473 /* Data used when collecting DECLs and TYPEs for language data removal. */
5475 struct free_lang_data_d
5477 free_lang_data_d () : decls (100), types (100) {}
5479 /* Worklist to avoid excessive recursion. */
5480 auto_vec<tree> worklist;
5482 /* Set of traversed objects. Used to avoid duplicate visits. */
5483 hash_set<tree> pset;
5485 /* Array of symbols to process with free_lang_data_in_decl. */
5486 auto_vec<tree> decls;
5488 /* Array of types to process with free_lang_data_in_type. */
5489 auto_vec<tree> types;
5493 /* Save all language fields needed to generate proper debug information
5494 for DECL. This saves most fields cleared out by free_lang_data_in_decl. */
5497 save_debug_info_for_decl (tree t)
5499 /*struct saved_debug_info_d *sdi;*/
5501 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && DECL_P (t));
5503 /* FIXME. Partial implementation for saving debug info removed. */
5507 /* Save all language fields needed to generate proper debug information
5508 for TYPE. This saves most fields cleared out by free_lang_data_in_type. */
5511 save_debug_info_for_type (tree t)
5513 /*struct saved_debug_info_d *sdi;*/
5515 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && TYPE_P (t));
5517 /* FIXME. Partial implementation for saving debug info removed. */
5521 /* Add type or decl T to one of the list of tree nodes that need their
5522 language data removed. The lists are held inside FLD. */
5525 add_tree_to_fld_list (tree t, struct free_lang_data_d *fld)
5529 fld->decls.safe_push (t);
5530 if (debug_info_level > DINFO_LEVEL_TERSE)
5531 save_debug_info_for_decl (t);
5533 else if (TYPE_P (t))
5535 fld->types.safe_push (t);
5536 if (debug_info_level > DINFO_LEVEL_TERSE)
5537 save_debug_info_for_type (t);
5543 /* Push tree node T into FLD->WORKLIST. */
5546 fld_worklist_push (tree t, struct free_lang_data_d *fld)
5548 if (t && !is_lang_specific (t) && !fld->pset.contains (t))
5549 fld->worklist.safe_push ((t));
5553 /* Operand callback helper for free_lang_data_in_node. *TP is the
5554 subtree operand being considered. */
5557 find_decls_types_r (tree *tp, int *ws, void *data)
5560 struct free_lang_data_d *fld = (struct free_lang_data_d *) data;
5562 if (TREE_CODE (t) == TREE_LIST)
5565 /* Language specific nodes will be removed, so there is no need
5566 to gather anything under them. */
5567 if (is_lang_specific (t))
5575 /* Note that walk_tree does not traverse every possible field in
5576 decls, so we have to do our own traversals here. */
5577 add_tree_to_fld_list (t, fld);
5579 fld_worklist_push (DECL_NAME (t), fld);
5580 fld_worklist_push (DECL_CONTEXT (t), fld);
5581 fld_worklist_push (DECL_SIZE (t), fld);
5582 fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5584 /* We are going to remove everything under DECL_INITIAL for
5585 TYPE_DECLs. No point walking them. */
5586 if (TREE_CODE (t) != TYPE_DECL)
5587 fld_worklist_push (DECL_INITIAL (t), fld);
5589 fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5590 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5592 if (TREE_CODE (t) == FUNCTION_DECL)
5594 fld_worklist_push (DECL_ARGUMENTS (t), fld);
5595 fld_worklist_push (DECL_RESULT (t), fld);
5597 else if (TREE_CODE (t) == TYPE_DECL)
5599 fld_worklist_push (DECL_ORIGINAL_TYPE (t), fld);
5601 else if (TREE_CODE (t) == FIELD_DECL)
5603 fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5604 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5605 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5606 fld_worklist_push (DECL_FCONTEXT (t), fld);
5609 if ((VAR_P (t) || TREE_CODE (t) == PARM_DECL)
5610 && DECL_HAS_VALUE_EXPR_P (t))
5611 fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5613 if (TREE_CODE (t) != FIELD_DECL
5614 && TREE_CODE (t) != TYPE_DECL)
5615 fld_worklist_push (TREE_CHAIN (t), fld);
5618 else if (TYPE_P (t))
5620 /* Note that walk_tree does not traverse every possible field in
5621 types, so we have to do our own traversals here. */
5622 add_tree_to_fld_list (t, fld);
5624 if (!RECORD_OR_UNION_TYPE_P (t))
5625 fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5626 fld_worklist_push (TYPE_SIZE (t), fld);
5627 fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5628 fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5629 fld_worklist_push (TYPE_POINTER_TO (t), fld);
5630 fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5631 fld_worklist_push (TYPE_NAME (t), fld);
5632 /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream
5633 them and thus do not and want not to reach unused pointer types
5635 if (!POINTER_TYPE_P (t))
5636 fld_worklist_push (TYPE_MINVAL (t), fld);
5637 if (!RECORD_OR_UNION_TYPE_P (t))
5638 fld_worklist_push (TYPE_MAXVAL (t), fld);
5639 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5640 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5641 do not and want not to reach unused variants this way. */
5642 if (TYPE_CONTEXT (t))
5644 tree ctx = TYPE_CONTEXT (t);
5645 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5646 So push that instead. */
5647 while (ctx && TREE_CODE (ctx) == BLOCK)
5648 ctx = BLOCK_SUPERCONTEXT (ctx);
5649 fld_worklist_push (ctx, fld);
5651 /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not
5652 and want not to reach unused types this way. */
5654 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5658 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5659 fld_worklist_push (TREE_TYPE (tem), fld);
5660 fld_worklist_push (BINFO_VIRTUALS (TYPE_BINFO (t)), fld);
5662 if (RECORD_OR_UNION_TYPE_P (t))
5665 /* Push all TYPE_FIELDS - there can be interleaving interesting
5666 and non-interesting things. */
5667 tem = TYPE_FIELDS (t);
5670 if (TREE_CODE (tem) == FIELD_DECL
5671 || (TREE_CODE (tem) == TYPE_DECL
5672 && !DECL_IGNORED_P (tem)
5673 && debug_info_level > DINFO_LEVEL_TERSE
5674 && !is_redundant_typedef (tem)))
5675 fld_worklist_push (tem, fld);
5676 tem = TREE_CHAIN (tem);
5680 fld_worklist_push (TYPE_STUB_DECL (t), fld);
5683 else if (TREE_CODE (t) == BLOCK)
5686 for (tem = BLOCK_VARS (t); tem; tem = TREE_CHAIN (tem))
5687 fld_worklist_push (tem, fld);
5688 for (tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
5689 fld_worklist_push (tem, fld);
5690 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
5693 if (TREE_CODE (t) != IDENTIFIER_NODE
5694 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
5695 fld_worklist_push (TREE_TYPE (t), fld);
5701 /* Find decls and types in T. */
5704 find_decls_types (tree t, struct free_lang_data_d *fld)
5708 if (!fld->pset.contains (t))
5709 walk_tree (&t, find_decls_types_r, fld, &fld->pset);
5710 if (fld->worklist.is_empty ())
5712 t = fld->worklist.pop ();
5716 /* Translate all the types in LIST with the corresponding runtime
5720 get_eh_types_for_runtime (tree list)
5724 if (list == NULL_TREE)
5727 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5729 list = TREE_CHAIN (list);
5732 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5733 TREE_CHAIN (prev) = n;
5734 prev = TREE_CHAIN (prev);
5735 list = TREE_CHAIN (list);
5742 /* Find decls and types referenced in EH region R and store them in
5743 FLD->DECLS and FLD->TYPES. */
5746 find_decls_types_in_eh_region (eh_region r, struct free_lang_data_d *fld)
5757 /* The types referenced in each catch must first be changed to the
5758 EH types used at runtime. This removes references to FE types
5760 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
5762 c->type_list = get_eh_types_for_runtime (c->type_list);
5763 walk_tree (&c->type_list, find_decls_types_r, fld, &fld->pset);
5768 case ERT_ALLOWED_EXCEPTIONS:
5769 r->u.allowed.type_list
5770 = get_eh_types_for_runtime (r->u.allowed.type_list);
5771 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, &fld->pset);
5774 case ERT_MUST_NOT_THROW:
5775 walk_tree (&r->u.must_not_throw.failure_decl,
5776 find_decls_types_r, fld, &fld->pset);
5782 /* Find decls and types referenced in cgraph node N and store them in
5783 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5784 look for *every* kind of DECL and TYPE node reachable from N,
5785 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5786 NAMESPACE_DECLs, etc). */
5789 find_decls_types_in_node (struct cgraph_node *n, struct free_lang_data_d *fld)
5792 struct function *fn;
5796 find_decls_types (n->decl, fld);
5798 if (!gimple_has_body_p (n->decl))
5801 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
5803 fn = DECL_STRUCT_FUNCTION (n->decl);
5805 /* Traverse locals. */
5806 FOR_EACH_LOCAL_DECL (fn, ix, t)
5807 find_decls_types (t, fld);
5809 /* Traverse EH regions in FN. */
5812 FOR_ALL_EH_REGION_FN (r, fn)
5813 find_decls_types_in_eh_region (r, fld);
5816 /* Traverse every statement in FN. */
5817 FOR_EACH_BB_FN (bb, fn)
5820 gimple_stmt_iterator si;
5823 for (psi = gsi_start_phis (bb); !gsi_end_p (psi); gsi_next (&psi))
5825 gphi *phi = psi.phi ();
5827 for (i = 0; i < gimple_phi_num_args (phi); i++)
5829 tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
5830 find_decls_types (*arg_p, fld);
5834 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5836 gimple *stmt = gsi_stmt (si);
5838 if (is_gimple_call (stmt))
5839 find_decls_types (gimple_call_fntype (stmt), fld);
5841 for (i = 0; i < gimple_num_ops (stmt); i++)
5843 tree arg = gimple_op (stmt, i);
5844 find_decls_types (arg, fld);
5851 /* Find decls and types referenced in varpool node N and store them in
5852 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5853 look for *every* kind of DECL and TYPE node reachable from N,
5854 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5855 NAMESPACE_DECLs, etc). */
5858 find_decls_types_in_var (varpool_node *v, struct free_lang_data_d *fld)
5860 find_decls_types (v->decl, fld);
5863 /* If T needs an assembler name, have one created for it. */
5866 assign_assembler_name_if_needed (tree t)
5868 if (need_assembler_name_p (t))
5870 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
5871 diagnostics that use input_location to show locus
5872 information. The problem here is that, at this point,
5873 input_location is generally anchored to the end of the file
5874 (since the parser is long gone), so we don't have a good
5875 position to pin it to.
5877 To alleviate this problem, this uses the location of T's
5878 declaration. Examples of this are
5879 testsuite/g++.dg/template/cond2.C and
5880 testsuite/g++.dg/template/pr35240.C. */
5881 location_t saved_location = input_location;
5882 input_location = DECL_SOURCE_LOCATION (t);
5884 decl_assembler_name (t);
5886 input_location = saved_location;
5891 /* Free language specific information for every operand and expression
5892 in every node of the call graph. This process operates in three stages:
5894 1- Every callgraph node and varpool node is traversed looking for
5895 decls and types embedded in them. This is a more exhaustive
5896 search than that done by find_referenced_vars, because it will
5897 also collect individual fields, decls embedded in types, etc.
5899 2- All the decls found are sent to free_lang_data_in_decl.
5901 3- All the types found are sent to free_lang_data_in_type.
5903 The ordering between decls and types is important because
5904 free_lang_data_in_decl sets assembler names, which includes
5905 mangling. So types cannot be freed up until assembler names have
5909 free_lang_data_in_cgraph (void)
5911 struct cgraph_node *n;
5913 struct free_lang_data_d fld;
5918 /* Find decls and types in the body of every function in the callgraph. */
5919 FOR_EACH_FUNCTION (n)
5920 find_decls_types_in_node (n, &fld);
5922 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
5923 find_decls_types (p->decl, &fld);
5925 /* Find decls and types in every varpool symbol. */
5926 FOR_EACH_VARIABLE (v)
5927 find_decls_types_in_var (v, &fld);
5929 /* Set the assembler name on every decl found. We need to do this
5930 now because free_lang_data_in_decl will invalidate data needed
5931 for mangling. This breaks mangling on interdependent decls. */
5932 FOR_EACH_VEC_ELT (fld.decls, i, t)
5933 assign_assembler_name_if_needed (t);
5935 /* Traverse every decl found freeing its language data. */
5936 FOR_EACH_VEC_ELT (fld.decls, i, t)
5937 free_lang_data_in_decl (t);
5939 /* Traverse every type found freeing its language data. */
5940 FOR_EACH_VEC_ELT (fld.types, i, t)
5941 free_lang_data_in_type (t);
5944 FOR_EACH_VEC_ELT (fld.types, i, t)
5950 /* Free resources that are used by FE but are not needed once they are done. */
5953 free_lang_data (void)
5957 /* If we are the LTO frontend we have freed lang-specific data already. */
5959 || (!flag_generate_lto && !flag_generate_offload))
5962 /* Allocate and assign alias sets to the standard integer types
5963 while the slots are still in the way the frontends generated them. */
5964 for (i = 0; i < itk_none; ++i)
5965 if (integer_types[i])
5966 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
5968 /* Traverse the IL resetting language specific information for
5969 operands, expressions, etc. */
5970 free_lang_data_in_cgraph ();
5972 /* Create gimple variants for common types. */
5973 for (unsigned i = 0;
5974 i < sizeof (builtin_structptr_types) / sizeof (builtin_structptr_type);
5976 builtin_structptr_types[i].node = builtin_structptr_types[i].base;
5978 /* Reset some langhooks. Do not reset types_compatible_p, it may
5979 still be used indirectly via the get_alias_set langhook. */
5980 lang_hooks.dwarf_name = lhd_dwarf_name;
5981 lang_hooks.decl_printable_name = gimple_decl_printable_name;
5982 lang_hooks.gimplify_expr = lhd_gimplify_expr;
5984 /* We do not want the default decl_assembler_name implementation,
5985 rather if we have fixed everything we want a wrapper around it
5986 asserting that all non-local symbols already got their assembler
5987 name and only produce assembler names for local symbols. Or rather
5988 make sure we never call decl_assembler_name on local symbols and
5989 devise a separate, middle-end private scheme for it. */
5991 /* Reset diagnostic machinery. */
5992 tree_diagnostics_defaults (global_dc);
6000 const pass_data pass_data_ipa_free_lang_data =
6002 SIMPLE_IPA_PASS, /* type */
6003 "*free_lang_data", /* name */
6004 OPTGROUP_NONE, /* optinfo_flags */
6005 TV_IPA_FREE_LANG_DATA, /* tv_id */
6006 0, /* properties_required */
6007 0, /* properties_provided */
6008 0, /* properties_destroyed */
6009 0, /* todo_flags_start */
6010 0, /* todo_flags_finish */
6013 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
6016 pass_ipa_free_lang_data (gcc::context *ctxt)
6017 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
6020 /* opt_pass methods: */
6021 virtual unsigned int execute (function *) { return free_lang_data (); }
6023 }; // class pass_ipa_free_lang_data
6027 simple_ipa_opt_pass *
6028 make_pass_ipa_free_lang_data (gcc::context *ctxt)
6030 return new pass_ipa_free_lang_data (ctxt);
6033 /* The backbone of is_attribute_p(). ATTR_LEN is the string length of
6034 ATTR_NAME. Also used internally by remove_attribute(). */
6036 private_is_attribute_p (const char *attr_name, size_t attr_len, const_tree ident)
6038 size_t ident_len = IDENTIFIER_LENGTH (ident);
6040 if (ident_len == attr_len)
6042 if (id_equal (ident, attr_name))
6045 else if (ident_len == attr_len + 4)
6047 /* There is the possibility that ATTR is 'text' and IDENT is
6049 const char *p = IDENTIFIER_POINTER (ident);
6050 if (p[0] == '_' && p[1] == '_'
6051 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
6052 && strncmp (attr_name, p + 2, attr_len) == 0)
6059 /* The backbone of lookup_attribute(). ATTR_LEN is the string length
6060 of ATTR_NAME, and LIST is not NULL_TREE. */
6062 private_lookup_attribute (const char *attr_name, size_t attr_len, tree list)
6066 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
6068 if (ident_len == attr_len)
6070 if (!strcmp (attr_name,
6071 IDENTIFIER_POINTER (get_attribute_name (list))))
6074 /* TODO: If we made sure that attributes were stored in the
6075 canonical form without '__...__' (ie, as in 'text' as opposed
6076 to '__text__') then we could avoid the following case. */
6077 else if (ident_len == attr_len + 4)
6079 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
6080 if (p[0] == '_' && p[1] == '_'
6081 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
6082 && strncmp (attr_name, p + 2, attr_len) == 0)
6085 list = TREE_CHAIN (list);
6091 /* Given an attribute name ATTR_NAME and a list of attributes LIST,
6092 return a pointer to the attribute's list first element if the attribute
6093 starts with ATTR_NAME. ATTR_NAME must be in the form 'text' (not
6097 private_lookup_attribute_by_prefix (const char *attr_name, size_t attr_len,
6102 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
6104 if (attr_len > ident_len)
6106 list = TREE_CHAIN (list);
6110 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
6112 if (strncmp (attr_name, p, attr_len) == 0)
6115 /* TODO: If we made sure that attributes were stored in the
6116 canonical form without '__...__' (ie, as in 'text' as opposed
6117 to '__text__') then we could avoid the following case. */
6118 if (p[0] == '_' && p[1] == '_' &&
6119 strncmp (attr_name, p + 2, attr_len) == 0)
6122 list = TREE_CHAIN (list);
6129 /* A variant of lookup_attribute() that can be used with an identifier
6130 as the first argument, and where the identifier can be either
6131 'text' or '__text__'.
6133 Given an attribute ATTR_IDENTIFIER, and a list of attributes LIST,
6134 return a pointer to the attribute's list element if the attribute
6135 is part of the list, or NULL_TREE if not found. If the attribute
6136 appears more than once, this only returns the first occurrence; the
6137 TREE_CHAIN of the return value should be passed back in if further
6138 occurrences are wanted. ATTR_IDENTIFIER must be an identifier but
6139 can be in the form 'text' or '__text__'. */
6141 lookup_ident_attribute (tree attr_identifier, tree list)
6143 gcc_checking_assert (TREE_CODE (attr_identifier) == IDENTIFIER_NODE);
6147 gcc_checking_assert (TREE_CODE (get_attribute_name (list))
6148 == IDENTIFIER_NODE);
6150 if (cmp_attrib_identifiers (attr_identifier,
6151 get_attribute_name (list)))
6154 list = TREE_CHAIN (list);
6160 /* Remove any instances of attribute ATTR_NAME in LIST and return the
6164 remove_attribute (const char *attr_name, tree list)
6167 size_t attr_len = strlen (attr_name);
6169 gcc_checking_assert (attr_name[0] != '_');
6171 for (p = &list; *p; )
6174 /* TODO: If we were storing attributes in normalized form, here
6175 we could use a simple strcmp(). */
6176 if (private_is_attribute_p (attr_name, attr_len, get_attribute_name (l)))
6177 *p = TREE_CHAIN (l);
6179 p = &TREE_CHAIN (l);
6185 /* Return an attribute list that is the union of a1 and a2. */
6188 merge_attributes (tree a1, tree a2)
6192 /* Either one unset? Take the set one. */
6194 if ((attributes = a1) == 0)
6197 /* One that completely contains the other? Take it. */
6199 else if (a2 != 0 && ! attribute_list_contained (a1, a2))
6201 if (attribute_list_contained (a2, a1))
6205 /* Pick the longest list, and hang on the other list. */
6207 if (list_length (a1) < list_length (a2))
6208 attributes = a2, a2 = a1;
6210 for (; a2 != 0; a2 = TREE_CHAIN (a2))
6213 for (a = lookup_ident_attribute (get_attribute_name (a2),
6215 a != NULL_TREE && !attribute_value_equal (a, a2);
6216 a = lookup_ident_attribute (get_attribute_name (a2),
6221 a1 = copy_node (a2);
6222 TREE_CHAIN (a1) = attributes;
6231 /* Given types T1 and T2, merge their attributes and return
6235 merge_type_attributes (tree t1, tree t2)
6237 return merge_attributes (TYPE_ATTRIBUTES (t1),
6238 TYPE_ATTRIBUTES (t2));
6241 /* Given decls OLDDECL and NEWDECL, merge their attributes and return
6245 merge_decl_attributes (tree olddecl, tree newdecl)
6247 return merge_attributes (DECL_ATTRIBUTES (olddecl),
6248 DECL_ATTRIBUTES (newdecl));
6251 #if TARGET_DLLIMPORT_DECL_ATTRIBUTES
6253 /* Specialization of merge_decl_attributes for various Windows targets.
6255 This handles the following situation:
6257 __declspec (dllimport) int foo;
6260 The second instance of `foo' nullifies the dllimport. */
6263 merge_dllimport_decl_attributes (tree old, tree new_tree)
6266 int delete_dllimport_p = 1;
6268 /* What we need to do here is remove from `old' dllimport if it doesn't
6269 appear in `new'. dllimport behaves like extern: if a declaration is
6270 marked dllimport and a definition appears later, then the object
6271 is not dllimport'd. We also remove a `new' dllimport if the old list
6272 contains dllexport: dllexport always overrides dllimport, regardless
6273 of the order of declaration. */
6274 if (!VAR_OR_FUNCTION_DECL_P (new_tree))
6275 delete_dllimport_p = 0;
6276 else if (DECL_DLLIMPORT_P (new_tree)
6277 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (old)))
6279 DECL_DLLIMPORT_P (new_tree) = 0;
6280 warning (OPT_Wattributes, "%q+D already declared with dllexport attribute: "
6281 "dllimport ignored", new_tree);
6283 else if (DECL_DLLIMPORT_P (old) && !DECL_DLLIMPORT_P (new_tree))
6285 /* Warn about overriding a symbol that has already been used, e.g.:
6286 extern int __attribute__ ((dllimport)) foo;
6287 int* bar () {return &foo;}
6290 if (TREE_USED (old))
6292 warning (0, "%q+D redeclared without dllimport attribute "
6293 "after being referenced with dll linkage", new_tree);
6294 /* If we have used a variable's address with dllimport linkage,
6295 keep the old DECL_DLLIMPORT_P flag: the ADDR_EXPR using the
6296 decl may already have had TREE_CONSTANT computed.
6297 We still remove the attribute so that assembler code refers
6298 to '&foo rather than '_imp__foo'. */
6299 if (VAR_P (old) && TREE_ADDRESSABLE (old))
6300 DECL_DLLIMPORT_P (new_tree) = 1;
6303 /* Let an inline definition silently override the external reference,
6304 but otherwise warn about attribute inconsistency. */
6305 else if (VAR_P (new_tree) || !DECL_DECLARED_INLINE_P (new_tree))
6306 warning (OPT_Wattributes, "%q+D redeclared without dllimport attribute: "
6307 "previous dllimport ignored", new_tree);
6310 delete_dllimport_p = 0;
6312 a = merge_attributes (DECL_ATTRIBUTES (old), DECL_ATTRIBUTES (new_tree));
6314 if (delete_dllimport_p)
6315 a = remove_attribute ("dllimport", a);
6320 /* Handle a "dllimport" or "dllexport" attribute; arguments as in
6321 struct attribute_spec.handler. */
6324 handle_dll_attribute (tree * pnode, tree name, tree args, int flags,
6330 /* These attributes may apply to structure and union types being created,
6331 but otherwise should pass to the declaration involved. */
6334 if (flags & ((int) ATTR_FLAG_DECL_NEXT | (int) ATTR_FLAG_FUNCTION_NEXT
6335 | (int) ATTR_FLAG_ARRAY_NEXT))
6337 *no_add_attrs = true;
6338 return tree_cons (name, args, NULL_TREE);
6340 if (TREE_CODE (node) == RECORD_TYPE
6341 || TREE_CODE (node) == UNION_TYPE)
6343 node = TYPE_NAME (node);
6349 warning (OPT_Wattributes, "%qE attribute ignored",
6351 *no_add_attrs = true;
6356 if (!VAR_OR_FUNCTION_DECL_P (node) && TREE_CODE (node) != TYPE_DECL)
6358 *no_add_attrs = true;
6359 warning (OPT_Wattributes, "%qE attribute ignored",
6364 if (TREE_CODE (node) == TYPE_DECL
6365 && TREE_CODE (TREE_TYPE (node)) != RECORD_TYPE
6366 && TREE_CODE (TREE_TYPE (node)) != UNION_TYPE)
6368 *no_add_attrs = true;
6369 warning (OPT_Wattributes, "%qE attribute ignored",
6374 is_dllimport = is_attribute_p ("dllimport", name);
6376 /* Report error on dllimport ambiguities seen now before they cause
6380 /* Honor any target-specific overrides. */
6381 if (!targetm.valid_dllimport_attribute_p (node))
6382 *no_add_attrs = true;
6384 else if (TREE_CODE (node) == FUNCTION_DECL
6385 && DECL_DECLARED_INLINE_P (node))
6387 warning (OPT_Wattributes, "inline function %q+D declared as "
6388 " dllimport: attribute ignored", node);
6389 *no_add_attrs = true;
6391 /* Like MS, treat definition of dllimported variables and
6392 non-inlined functions on declaration as syntax errors. */
6393 else if (TREE_CODE (node) == FUNCTION_DECL && DECL_INITIAL (node))
6395 error ("function %q+D definition is marked dllimport", node);
6396 *no_add_attrs = true;
6399 else if (VAR_P (node))
6401 if (DECL_INITIAL (node))
6403 error ("variable %q+D definition is marked dllimport",
6405 *no_add_attrs = true;
6408 /* `extern' needn't be specified with dllimport.
6409 Specify `extern' now and hope for the best. Sigh. */
6410 DECL_EXTERNAL (node) = 1;
6411 /* Also, implicitly give dllimport'd variables declared within
6412 a function global scope, unless declared static. */
6413 if (current_function_decl != NULL_TREE && !TREE_STATIC (node))
6414 TREE_PUBLIC (node) = 1;
6417 if (*no_add_attrs == false)
6418 DECL_DLLIMPORT_P (node) = 1;
6420 else if (TREE_CODE (node) == FUNCTION_DECL
6421 && DECL_DECLARED_INLINE_P (node)
6422 && flag_keep_inline_dllexport)
6423 /* An exported function, even if inline, must be emitted. */
6424 DECL_EXTERNAL (node) = 0;
6426 /* Report error if symbol is not accessible at global scope. */
6427 if (!TREE_PUBLIC (node) && VAR_OR_FUNCTION_DECL_P (node))
6429 error ("external linkage required for symbol %q+D because of "
6430 "%qE attribute", node, name);
6431 *no_add_attrs = true;
6434 /* A dllexport'd entity must have default visibility so that other
6435 program units (shared libraries or the main executable) can see
6436 it. A dllimport'd entity must have default visibility so that
6437 the linker knows that undefined references within this program
6438 unit can be resolved by the dynamic linker. */
6441 if (DECL_VISIBILITY_SPECIFIED (node)
6442 && DECL_VISIBILITY (node) != VISIBILITY_DEFAULT)
6443 error ("%qE implies default visibility, but %qD has already "
6444 "been declared with a different visibility",
6446 DECL_VISIBILITY (node) = VISIBILITY_DEFAULT;
6447 DECL_VISIBILITY_SPECIFIED (node) = 1;
6453 #endif /* TARGET_DLLIMPORT_DECL_ATTRIBUTES */
6455 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6456 of the various TYPE_QUAL values. */
6459 set_type_quals (tree type, int type_quals)
6461 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
6462 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
6463 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
6464 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
6465 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
6468 /* Returns true iff CAND and BASE have equivalent language-specific
6472 check_lang_type (const_tree cand, const_tree base)
6474 if (lang_hooks.types.type_hash_eq == NULL)
6476 /* type_hash_eq currently only applies to these types. */
6477 if (TREE_CODE (cand) != FUNCTION_TYPE
6478 && TREE_CODE (cand) != METHOD_TYPE)
6480 return lang_hooks.types.type_hash_eq (cand, base);
6483 /* Returns true iff unqualified CAND and BASE are equivalent. */
6486 check_base_type (const_tree cand, const_tree base)
6488 return (TYPE_NAME (cand) == TYPE_NAME (base)
6489 /* Apparently this is needed for Objective-C. */
6490 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6491 /* Check alignment. */
6492 && TYPE_ALIGN (cand) == TYPE_ALIGN (base)
6493 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6494 TYPE_ATTRIBUTES (base)));
6497 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
6500 check_qualified_type (const_tree cand, const_tree base, int type_quals)
6502 return (TYPE_QUALS (cand) == type_quals
6503 && check_base_type (cand, base)
6504 && check_lang_type (cand, base));
6507 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
6510 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
6512 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
6513 && TYPE_NAME (cand) == TYPE_NAME (base)
6514 /* Apparently this is needed for Objective-C. */
6515 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6516 /* Check alignment. */
6517 && TYPE_ALIGN (cand) == align
6518 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6519 TYPE_ATTRIBUTES (base))
6520 && check_lang_type (cand, base));
6523 /* This function checks to see if TYPE matches the size one of the built-in
6524 atomic types, and returns that core atomic type. */
6527 find_atomic_core_type (tree type)
6529 tree base_atomic_type;
6531 /* Only handle complete types. */
6532 if (TYPE_SIZE (type) == NULL_TREE)
6535 HOST_WIDE_INT type_size = tree_to_uhwi (TYPE_SIZE (type));
6539 base_atomic_type = atomicQI_type_node;
6543 base_atomic_type = atomicHI_type_node;
6547 base_atomic_type = atomicSI_type_node;
6551 base_atomic_type = atomicDI_type_node;
6555 base_atomic_type = atomicTI_type_node;
6559 base_atomic_type = NULL_TREE;
6562 return base_atomic_type;
6565 /* Return a version of the TYPE, qualified as indicated by the
6566 TYPE_QUALS, if one exists. If no qualified version exists yet,
6567 return NULL_TREE. */
6570 get_qualified_type (tree type, int type_quals)
6574 if (TYPE_QUALS (type) == type_quals)
6577 /* Search the chain of variants to see if there is already one there just
6578 like the one we need to have. If so, use that existing one. We must
6579 preserve the TYPE_NAME, since there is code that depends on this. */
6580 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6581 if (check_qualified_type (t, type, type_quals))
6587 /* Like get_qualified_type, but creates the type if it does not
6588 exist. This function never returns NULL_TREE. */
6591 build_qualified_type (tree type, int type_quals MEM_STAT_DECL)
6595 /* See if we already have the appropriate qualified variant. */
6596 t = get_qualified_type (type, type_quals);
6598 /* If not, build it. */
6601 t = build_variant_type_copy (type PASS_MEM_STAT);
6602 set_type_quals (t, type_quals);
6604 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
6606 /* See if this object can map to a basic atomic type. */
6607 tree atomic_type = find_atomic_core_type (type);
6610 /* Ensure the alignment of this type is compatible with
6611 the required alignment of the atomic type. */
6612 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
6613 SET_TYPE_ALIGN (t, TYPE_ALIGN (atomic_type));
6617 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6618 /* Propagate structural equality. */
6619 SET_TYPE_STRUCTURAL_EQUALITY (t);
6620 else if (TYPE_CANONICAL (type) != type)
6621 /* Build the underlying canonical type, since it is different
6624 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
6625 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
6628 /* T is its own canonical type. */
6629 TYPE_CANONICAL (t) = t;
6636 /* Create a variant of type T with alignment ALIGN. */
6639 build_aligned_type (tree type, unsigned int align)
6643 if (TYPE_PACKED (type)
6644 || TYPE_ALIGN (type) == align)
6647 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6648 if (check_aligned_type (t, type, align))
6651 t = build_variant_type_copy (type);
6652 SET_TYPE_ALIGN (t, align);
6653 TYPE_USER_ALIGN (t) = 1;
6658 /* Create a new distinct copy of TYPE. The new type is made its own
6659 MAIN_VARIANT. If TYPE requires structural equality checks, the
6660 resulting type requires structural equality checks; otherwise, its
6661 TYPE_CANONICAL points to itself. */
6664 build_distinct_type_copy (tree type MEM_STAT_DECL)
6666 tree t = copy_node_stat (type PASS_MEM_STAT);
6668 TYPE_POINTER_TO (t) = 0;
6669 TYPE_REFERENCE_TO (t) = 0;
6671 /* Set the canonical type either to a new equivalence class, or
6672 propagate the need for structural equality checks. */
6673 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6674 SET_TYPE_STRUCTURAL_EQUALITY (t);
6676 TYPE_CANONICAL (t) = t;
6678 /* Make it its own variant. */
6679 TYPE_MAIN_VARIANT (t) = t;
6680 TYPE_NEXT_VARIANT (t) = 0;
6682 /* We do not record methods in type copies nor variants
6683 so we do not need to keep them up to date when new method
6685 if (RECORD_OR_UNION_TYPE_P (t))
6686 TYPE_METHODS (t) = NULL_TREE;
6688 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6689 whose TREE_TYPE is not t. This can also happen in the Ada
6690 frontend when using subtypes. */
6695 /* Create a new variant of TYPE, equivalent but distinct. This is so
6696 the caller can modify it. TYPE_CANONICAL for the return type will
6697 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6698 are considered equal by the language itself (or that both types
6699 require structural equality checks). */
6702 build_variant_type_copy (tree type MEM_STAT_DECL)
6704 tree t, m = TYPE_MAIN_VARIANT (type);
6706 t = build_distinct_type_copy (type PASS_MEM_STAT);
6708 /* Since we're building a variant, assume that it is a non-semantic
6709 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6710 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6711 /* Type variants have no alias set defined. */
6712 TYPE_ALIAS_SET (t) = -1;
6714 /* Add the new type to the chain of variants of TYPE. */
6715 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6716 TYPE_NEXT_VARIANT (m) = t;
6717 TYPE_MAIN_VARIANT (t) = m;
6722 /* Return true if the from tree in both tree maps are equal. */
6725 tree_map_base_eq (const void *va, const void *vb)
6727 const struct tree_map_base *const a = (const struct tree_map_base *) va,
6728 *const b = (const struct tree_map_base *) vb;
6729 return (a->from == b->from);
6732 /* Hash a from tree in a tree_base_map. */
6735 tree_map_base_hash (const void *item)
6737 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6740 /* Return true if this tree map structure is marked for garbage collection
6741 purposes. We simply return true if the from tree is marked, so that this
6742 structure goes away when the from tree goes away. */
6745 tree_map_base_marked_p (const void *p)
6747 return ggc_marked_p (((const struct tree_map_base *) p)->from);
6750 /* Hash a from tree in a tree_map. */
6753 tree_map_hash (const void *item)
6755 return (((const struct tree_map *) item)->hash);
6758 /* Hash a from tree in a tree_decl_map. */
6761 tree_decl_map_hash (const void *item)
6763 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6766 /* Return the initialization priority for DECL. */
6769 decl_init_priority_lookup (tree decl)
6771 symtab_node *snode = symtab_node::get (decl);
6774 return DEFAULT_INIT_PRIORITY;
6776 snode->get_init_priority ();
6779 /* Return the finalization priority for DECL. */
6782 decl_fini_priority_lookup (tree decl)
6784 cgraph_node *node = cgraph_node::get (decl);
6787 return DEFAULT_INIT_PRIORITY;
6789 node->get_fini_priority ();
6792 /* Set the initialization priority for DECL to PRIORITY. */
6795 decl_init_priority_insert (tree decl, priority_type priority)
6797 struct symtab_node *snode;
6799 if (priority == DEFAULT_INIT_PRIORITY)
6801 snode = symtab_node::get (decl);
6805 else if (VAR_P (decl))
6806 snode = varpool_node::get_create (decl);
6808 snode = cgraph_node::get_create (decl);
6809 snode->set_init_priority (priority);
6812 /* Set the finalization priority for DECL to PRIORITY. */
6815 decl_fini_priority_insert (tree decl, priority_type priority)
6817 struct cgraph_node *node;
6819 if (priority == DEFAULT_INIT_PRIORITY)
6821 node = cgraph_node::get (decl);
6826 node = cgraph_node::get_create (decl);
6827 node->set_fini_priority (priority);
6830 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6833 print_debug_expr_statistics (void)
6835 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6836 (long) debug_expr_for_decl->size (),
6837 (long) debug_expr_for_decl->elements (),
6838 debug_expr_for_decl->collisions ());
6841 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6844 print_value_expr_statistics (void)
6846 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6847 (long) value_expr_for_decl->size (),
6848 (long) value_expr_for_decl->elements (),
6849 value_expr_for_decl->collisions ());
6852 /* Lookup a debug expression for FROM, and return it if we find one. */
6855 decl_debug_expr_lookup (tree from)
6857 struct tree_decl_map *h, in;
6858 in.base.from = from;
6860 h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6866 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6869 decl_debug_expr_insert (tree from, tree to)
6871 struct tree_decl_map *h;
6873 h = ggc_alloc<tree_decl_map> ();
6874 h->base.from = from;
6876 *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6879 /* Lookup a value expression for FROM, and return it if we find one. */
6882 decl_value_expr_lookup (tree from)
6884 struct tree_decl_map *h, in;
6885 in.base.from = from;
6887 h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6893 /* Insert a mapping FROM->TO in the value expression hashtable. */
6896 decl_value_expr_insert (tree from, tree to)
6898 struct tree_decl_map *h;
6900 h = ggc_alloc<tree_decl_map> ();
6901 h->base.from = from;
6903 *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6906 /* Lookup a vector of debug arguments for FROM, and return it if we
6910 decl_debug_args_lookup (tree from)
6912 struct tree_vec_map *h, in;
6914 if (!DECL_HAS_DEBUG_ARGS_P (from))
6916 gcc_checking_assert (debug_args_for_decl != NULL);
6917 in.base.from = from;
6918 h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
6924 /* Insert a mapping FROM->empty vector of debug arguments in the value
6925 expression hashtable. */
6928 decl_debug_args_insert (tree from)
6930 struct tree_vec_map *h;
6933 if (DECL_HAS_DEBUG_ARGS_P (from))
6934 return decl_debug_args_lookup (from);
6935 if (debug_args_for_decl == NULL)
6936 debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
6937 h = ggc_alloc<tree_vec_map> ();
6938 h->base.from = from;
6940 loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
6942 DECL_HAS_DEBUG_ARGS_P (from) = 1;
6946 /* Hashing of types so that we don't make duplicates.
6947 The entry point is `type_hash_canon'. */
6949 /* Generate the default hash code for TYPE. This is designed for
6950 speed, rather than maximum entropy. */
6953 type_hash_canon_hash (tree type)
6955 inchash::hash hstate;
6957 hstate.add_int (TREE_CODE (type));
6959 if (TREE_TYPE (type))
6960 hstate.add_object (TYPE_HASH (TREE_TYPE (type)));
6962 for (tree t = TYPE_ATTRIBUTES (type); t; t = TREE_CHAIN (t))
6963 /* Just the identifier is adequate to distinguish. */
6964 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (t)));
6966 switch (TREE_CODE (type))
6969 hstate.add_object (TYPE_HASH (TYPE_METHOD_BASETYPE (type)));
6972 for (tree t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
6973 if (TREE_VALUE (t) != error_mark_node)
6974 hstate.add_object (TYPE_HASH (TREE_VALUE (t)));
6978 hstate.add_object (TYPE_HASH (TYPE_OFFSET_BASETYPE (type)));
6983 if (TYPE_DOMAIN (type))
6984 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (type)));
6985 if (!AGGREGATE_TYPE_P (TREE_TYPE (type)))
6987 unsigned typeless = TYPE_TYPELESS_STORAGE (type);
6988 hstate.add_object (typeless);
6995 tree t = TYPE_MAX_VALUE (type);
6997 t = TYPE_MIN_VALUE (type);
6998 for (int i = 0; i < TREE_INT_CST_NUNITS (t); i++)
6999 hstate.add_object (TREE_INT_CST_ELT (t, i));
7004 case FIXED_POINT_TYPE:
7006 unsigned prec = TYPE_PRECISION (type);
7007 hstate.add_object (prec);
7013 unsigned nunits = TYPE_VECTOR_SUBPARTS (type);
7014 hstate.add_object (nunits);
7022 return hstate.end ();
7025 /* These are the Hashtable callback functions. */
7027 /* Returns true iff the types are equivalent. */
7030 type_cache_hasher::equal (type_hash *a, type_hash *b)
7032 /* First test the things that are the same for all types. */
7033 if (a->hash != b->hash
7034 || TREE_CODE (a->type) != TREE_CODE (b->type)
7035 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
7036 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
7037 TYPE_ATTRIBUTES (b->type))
7038 || (TREE_CODE (a->type) != COMPLEX_TYPE
7039 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
7042 /* Be careful about comparing arrays before and after the element type
7043 has been completed; don't compare TYPE_ALIGN unless both types are
7045 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
7046 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
7047 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
7050 switch (TREE_CODE (a->type))
7055 case REFERENCE_TYPE:
7060 return TYPE_VECTOR_SUBPARTS (a->type) == TYPE_VECTOR_SUBPARTS (b->type);
7063 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
7064 && !(TYPE_VALUES (a->type)
7065 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
7066 && TYPE_VALUES (b->type)
7067 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
7068 && type_list_equal (TYPE_VALUES (a->type),
7069 TYPE_VALUES (b->type))))
7077 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
7079 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
7080 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
7081 TYPE_MAX_VALUE (b->type)))
7082 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
7083 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
7084 TYPE_MIN_VALUE (b->type))));
7086 case FIXED_POINT_TYPE:
7087 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
7090 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
7093 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
7094 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
7095 || (TYPE_ARG_TYPES (a->type)
7096 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
7097 && TYPE_ARG_TYPES (b->type)
7098 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
7099 && type_list_equal (TYPE_ARG_TYPES (a->type),
7100 TYPE_ARG_TYPES (b->type)))))
7104 /* Don't compare TYPE_TYPELESS_STORAGE flag on aggregates,
7105 where the flag should be inherited from the element type
7106 and can change after ARRAY_TYPEs are created; on non-aggregates
7107 compare it and hash it, scalars will never have that flag set
7108 and we need to differentiate between arrays created by different
7109 front-ends or middle-end created arrays. */
7110 return (TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type)
7111 && (AGGREGATE_TYPE_P (TREE_TYPE (a->type))
7112 || (TYPE_TYPELESS_STORAGE (a->type)
7113 == TYPE_TYPELESS_STORAGE (b->type))));
7117 case QUAL_UNION_TYPE:
7118 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
7119 || (TYPE_FIELDS (a->type)
7120 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
7121 && TYPE_FIELDS (b->type)
7122 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
7123 && type_list_equal (TYPE_FIELDS (a->type),
7124 TYPE_FIELDS (b->type))));
7127 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
7128 || (TYPE_ARG_TYPES (a->type)
7129 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
7130 && TYPE_ARG_TYPES (b->type)
7131 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
7132 && type_list_equal (TYPE_ARG_TYPES (a->type),
7133 TYPE_ARG_TYPES (b->type))))
7141 if (lang_hooks.types.type_hash_eq != NULL)
7142 return lang_hooks.types.type_hash_eq (a->type, b->type);
7147 /* Given TYPE, and HASHCODE its hash code, return the canonical
7148 object for an identical type if one already exists.
7149 Otherwise, return TYPE, and record it as the canonical object.
7151 To use this function, first create a type of the sort you want.
7152 Then compute its hash code from the fields of the type that
7153 make it different from other similar types.
7154 Then call this function and use the value. */
7157 type_hash_canon (unsigned int hashcode, tree type)
7162 /* The hash table only contains main variants, so ensure that's what we're
7164 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
7166 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
7167 must call that routine before comparing TYPE_ALIGNs. */
7173 loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
7176 tree t1 = ((type_hash *) *loc)->type;
7177 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1);
7178 if (TYPE_UID (type) + 1 == next_type_uid)
7180 /* Free also min/max values and the cache for integer
7181 types. This can't be done in free_node, as LTO frees
7182 those on its own. */
7183 if (TREE_CODE (type) == INTEGER_TYPE)
7185 if (TYPE_MIN_VALUE (type)
7186 && TREE_TYPE (TYPE_MIN_VALUE (type)) == type)
7187 ggc_free (TYPE_MIN_VALUE (type));
7188 if (TYPE_MAX_VALUE (type)
7189 && TREE_TYPE (TYPE_MAX_VALUE (type)) == type)
7190 ggc_free (TYPE_MAX_VALUE (type));
7191 if (TYPE_CACHED_VALUES_P (type))
7192 ggc_free (TYPE_CACHED_VALUES (type));
7199 struct type_hash *h;
7201 h = ggc_alloc<type_hash> ();
7211 print_type_hash_statistics (void)
7213 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
7214 (long) type_hash_table->size (),
7215 (long) type_hash_table->elements (),
7216 type_hash_table->collisions ());
7219 /* Given two lists of attributes, return true if list l2 is
7220 equivalent to l1. */
7223 attribute_list_equal (const_tree l1, const_tree l2)
7228 return attribute_list_contained (l1, l2)
7229 && attribute_list_contained (l2, l1);
7232 /* Given two lists of attributes, return true if list L2 is
7233 completely contained within L1. */
7234 /* ??? This would be faster if attribute names were stored in a canonicalized
7235 form. Otherwise, if L1 uses `foo' and L2 uses `__foo__', the long method
7236 must be used to show these elements are equivalent (which they are). */
7237 /* ??? It's not clear that attributes with arguments will always be handled
7241 attribute_list_contained (const_tree l1, const_tree l2)
7245 /* First check the obvious, maybe the lists are identical. */
7249 /* Maybe the lists are similar. */
7250 for (t1 = l1, t2 = l2;
7252 && get_attribute_name (t1) == get_attribute_name (t2)
7253 && TREE_VALUE (t1) == TREE_VALUE (t2);
7254 t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
7257 /* Maybe the lists are equal. */
7258 if (t1 == 0 && t2 == 0)
7261 for (; t2 != 0; t2 = TREE_CHAIN (t2))
7264 /* This CONST_CAST is okay because lookup_attribute does not
7265 modify its argument and the return value is assigned to a
7267 for (attr = lookup_ident_attribute (get_attribute_name (t2),
7268 CONST_CAST_TREE (l1));
7269 attr != NULL_TREE && !attribute_value_equal (t2, attr);
7270 attr = lookup_ident_attribute (get_attribute_name (t2),
7274 if (attr == NULL_TREE)
7281 /* Given two lists of types
7282 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
7283 return 1 if the lists contain the same types in the same order.
7284 Also, the TREE_PURPOSEs must match. */
7287 type_list_equal (const_tree l1, const_tree l2)
7291 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
7292 if (TREE_VALUE (t1) != TREE_VALUE (t2)
7293 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
7294 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
7295 && (TREE_TYPE (TREE_PURPOSE (t1))
7296 == TREE_TYPE (TREE_PURPOSE (t2))))))
7302 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
7303 given by TYPE. If the argument list accepts variable arguments,
7304 then this function counts only the ordinary arguments. */
7307 type_num_arguments (const_tree type)
7312 for (t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
7313 /* If the function does not take a variable number of arguments,
7314 the last element in the list will have type `void'. */
7315 if (VOID_TYPE_P (TREE_VALUE (t)))
7323 /* Nonzero if integer constants T1 and T2
7324 represent the same constant value. */
7327 tree_int_cst_equal (const_tree t1, const_tree t2)
7332 if (t1 == 0 || t2 == 0)
7335 if (TREE_CODE (t1) == INTEGER_CST
7336 && TREE_CODE (t2) == INTEGER_CST
7337 && wi::to_widest (t1) == wi::to_widest (t2))
7343 /* Return true if T is an INTEGER_CST whose numerical value (extended
7344 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
7347 tree_fits_shwi_p (const_tree t)
7349 return (t != NULL_TREE
7350 && TREE_CODE (t) == INTEGER_CST
7351 && wi::fits_shwi_p (wi::to_widest (t)));
7354 /* Return true if T is an INTEGER_CST whose numerical value (extended
7355 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
7358 tree_fits_uhwi_p (const_tree t)
7360 return (t != NULL_TREE
7361 && TREE_CODE (t) == INTEGER_CST
7362 && wi::fits_uhwi_p (wi::to_widest (t)));
7365 /* T is an INTEGER_CST whose numerical value (extended according to
7366 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
7370 tree_to_shwi (const_tree t)
7372 gcc_assert (tree_fits_shwi_p (t));
7373 return TREE_INT_CST_LOW (t);
7376 /* T is an INTEGER_CST whose numerical value (extended according to
7377 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
7380 unsigned HOST_WIDE_INT
7381 tree_to_uhwi (const_tree t)
7383 gcc_assert (tree_fits_uhwi_p (t));
7384 return TREE_INT_CST_LOW (t);
7387 /* Return the most significant (sign) bit of T. */
7390 tree_int_cst_sign_bit (const_tree t)
7392 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
7394 return wi::extract_uhwi (t, bitno, 1);
7397 /* Return an indication of the sign of the integer constant T.
7398 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
7399 Note that -1 will never be returned if T's type is unsigned. */
7402 tree_int_cst_sgn (const_tree t)
7404 if (wi::eq_p (t, 0))
7406 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
7408 else if (wi::neg_p (t))
7414 /* Return the minimum number of bits needed to represent VALUE in a
7415 signed or unsigned type, UNSIGNEDP says which. */
7418 tree_int_cst_min_precision (tree value, signop sgn)
7420 /* If the value is negative, compute its negative minus 1. The latter
7421 adjustment is because the absolute value of the largest negative value
7422 is one larger than the largest positive value. This is equivalent to
7423 a bit-wise negation, so use that operation instead. */
7425 if (tree_int_cst_sgn (value) < 0)
7426 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
7428 /* Return the number of bits needed, taking into account the fact
7429 that we need one more bit for a signed than unsigned type.
7430 If value is 0 or -1, the minimum precision is 1 no matter
7431 whether unsignedp is true or false. */
7433 if (integer_zerop (value))
7436 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
7439 /* Return truthvalue of whether T1 is the same tree structure as T2.
7440 Return 1 if they are the same.
7441 Return 0 if they are understandably different.
7442 Return -1 if either contains tree structure not understood by
7446 simple_cst_equal (const_tree t1, const_tree t2)
7448 enum tree_code code1, code2;
7454 if (t1 == 0 || t2 == 0)
7457 code1 = TREE_CODE (t1);
7458 code2 = TREE_CODE (t2);
7460 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
7462 if (CONVERT_EXPR_CODE_P (code2)
7463 || code2 == NON_LVALUE_EXPR)
7464 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7466 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
7469 else if (CONVERT_EXPR_CODE_P (code2)
7470 || code2 == NON_LVALUE_EXPR)
7471 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
7479 return wi::to_widest (t1) == wi::to_widest (t2);
7482 return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2));
7485 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
7488 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
7489 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
7490 TREE_STRING_LENGTH (t1)));
7494 unsigned HOST_WIDE_INT idx;
7495 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
7496 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
7498 if (vec_safe_length (v1) != vec_safe_length (v2))
7501 for (idx = 0; idx < vec_safe_length (v1); ++idx)
7502 /* ??? Should we handle also fields here? */
7503 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
7509 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7512 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
7515 if (call_expr_nargs (t1) != call_expr_nargs (t2))
7518 const_tree arg1, arg2;
7519 const_call_expr_arg_iterator iter1, iter2;
7520 for (arg1 = first_const_call_expr_arg (t1, &iter1),
7521 arg2 = first_const_call_expr_arg (t2, &iter2);
7523 arg1 = next_const_call_expr_arg (&iter1),
7524 arg2 = next_const_call_expr_arg (&iter2))
7526 cmp = simple_cst_equal (arg1, arg2);
7530 return arg1 == arg2;
7534 /* Special case: if either target is an unallocated VAR_DECL,
7535 it means that it's going to be unified with whatever the
7536 TARGET_EXPR is really supposed to initialize, so treat it
7537 as being equivalent to anything. */
7538 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
7539 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
7540 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
7541 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
7542 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
7543 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
7546 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7551 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
7553 case WITH_CLEANUP_EXPR:
7554 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7558 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
7561 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
7562 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7576 /* This general rule works for most tree codes. All exceptions should be
7577 handled above. If this is a language-specific tree code, we can't
7578 trust what might be in the operand, so say we don't know
7580 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
7583 switch (TREE_CODE_CLASS (code1))
7587 case tcc_comparison:
7588 case tcc_expression:
7592 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
7594 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
7606 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7607 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7608 than U, respectively. */
7611 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
7613 if (tree_int_cst_sgn (t) < 0)
7615 else if (!tree_fits_uhwi_p (t))
7617 else if (TREE_INT_CST_LOW (t) == u)
7619 else if (TREE_INT_CST_LOW (t) < u)
7625 /* Return true if SIZE represents a constant size that is in bounds of
7626 what the middle-end and the backend accepts (covering not more than
7627 half of the address-space). */
7630 valid_constant_size_p (const_tree size)
7632 if (! tree_fits_uhwi_p (size)
7633 || TREE_OVERFLOW (size)
7634 || tree_int_cst_sign_bit (size) != 0)
7639 /* Return the precision of the type, or for a complex or vector type the
7640 precision of the type of its elements. */
7643 element_precision (const_tree type)
7646 type = TREE_TYPE (type);
7647 enum tree_code code = TREE_CODE (type);
7648 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
7649 type = TREE_TYPE (type);
7651 return TYPE_PRECISION (type);
7654 /* Return true if CODE represents an associative tree code. Otherwise
7657 associative_tree_code (enum tree_code code)
7676 /* Return true if CODE represents a commutative tree code. Otherwise
7679 commutative_tree_code (enum tree_code code)
7685 case MULT_HIGHPART_EXPR:
7693 case UNORDERED_EXPR:
7697 case TRUTH_AND_EXPR:
7698 case TRUTH_XOR_EXPR:
7700 case WIDEN_MULT_EXPR:
7701 case VEC_WIDEN_MULT_HI_EXPR:
7702 case VEC_WIDEN_MULT_LO_EXPR:
7703 case VEC_WIDEN_MULT_EVEN_EXPR:
7704 case VEC_WIDEN_MULT_ODD_EXPR:
7713 /* Return true if CODE represents a ternary tree code for which the
7714 first two operands are commutative. Otherwise return false. */
7716 commutative_ternary_tree_code (enum tree_code code)
7720 case WIDEN_MULT_PLUS_EXPR:
7721 case WIDEN_MULT_MINUS_EXPR:
7732 /* Returns true if CODE can overflow. */
7735 operation_can_overflow (enum tree_code code)
7743 /* Can overflow in various ways. */
7745 case TRUNC_DIV_EXPR:
7746 case EXACT_DIV_EXPR:
7747 case FLOOR_DIV_EXPR:
7749 /* For INT_MIN / -1. */
7756 /* These operators cannot overflow. */
7761 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or
7762 ftrapv doesn't generate trapping insns for CODE. */
7765 operation_no_trapping_overflow (tree type, enum tree_code code)
7767 gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type));
7769 /* We don't generate instructions that trap on overflow for complex or vector
7771 if (!INTEGRAL_TYPE_P (type))
7774 if (!TYPE_OVERFLOW_TRAPS (type))
7784 /* These operators can overflow, and -ftrapv generates trapping code for
7787 case TRUNC_DIV_EXPR:
7788 case EXACT_DIV_EXPR:
7789 case FLOOR_DIV_EXPR:
7792 /* These operators can overflow, but -ftrapv does not generate trapping
7796 /* These operators cannot overflow. */
7804 /* Generate a hash value for an expression. This can be used iteratively
7805 by passing a previous result as the HSTATE argument.
7807 This function is intended to produce the same hash for expressions which
7808 would compare equal using operand_equal_p. */
7810 add_expr (const_tree t, inchash::hash &hstate, unsigned int flags)
7813 enum tree_code code;
7814 enum tree_code_class tclass;
7816 if (t == NULL_TREE || t == error_mark_node)
7818 hstate.merge_hash (0);
7822 if (!(flags & OEP_ADDRESS_OF))
7825 code = TREE_CODE (t);
7829 /* Alas, constants aren't shared, so we can't rely on pointer
7832 hstate.merge_hash (0);
7835 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
7836 for (i = 0; i < TREE_INT_CST_EXT_NUNITS (t); i++)
7837 hstate.add_wide_int (TREE_INT_CST_ELT (t, i));
7842 if (!HONOR_SIGNED_ZEROS (t) && real_zerop (t))
7845 val2 = real_hash (TREE_REAL_CST_PTR (t));
7846 hstate.merge_hash (val2);
7851 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
7852 hstate.merge_hash (val2);
7856 hstate.add ((const void *) TREE_STRING_POINTER (t),
7857 TREE_STRING_LENGTH (t));
7860 inchash::add_expr (TREE_REALPART (t), hstate, flags);
7861 inchash::add_expr (TREE_IMAGPART (t), hstate, flags);
7866 for (i = 0; i < VECTOR_CST_NELTS (t); ++i)
7867 inchash::add_expr (VECTOR_CST_ELT (t, i), hstate, flags);
7871 /* We can just compare by pointer. */
7872 hstate.add_wide_int (SSA_NAME_VERSION (t));
7874 case PLACEHOLDER_EXPR:
7875 /* The node itself doesn't matter. */
7882 /* A list of expressions, for a CALL_EXPR or as the elements of a
7884 for (; t; t = TREE_CHAIN (t))
7885 inchash::add_expr (TREE_VALUE (t), hstate, flags);
7889 unsigned HOST_WIDE_INT idx;
7891 flags &= ~OEP_ADDRESS_OF;
7892 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
7894 inchash::add_expr (field, hstate, flags);
7895 inchash::add_expr (value, hstate, flags);
7899 case STATEMENT_LIST:
7901 tree_stmt_iterator i;
7902 for (i = tsi_start (CONST_CAST_TREE (t));
7903 !tsi_end_p (i); tsi_next (&i))
7904 inchash::add_expr (tsi_stmt (i), hstate, flags);
7908 for (i = 0; i < TREE_VEC_LENGTH (t); ++i)
7909 inchash::add_expr (TREE_VEC_ELT (t, i), hstate, flags);
7912 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7913 Otherwise nodes that compare equal according to operand_equal_p might
7914 get different hash codes. However, don't do this for machine specific
7915 or front end builtins, since the function code is overloaded in those
7917 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
7918 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
7920 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
7921 code = TREE_CODE (t);
7925 tclass = TREE_CODE_CLASS (code);
7927 if (tclass == tcc_declaration)
7929 /* DECL's have a unique ID */
7930 hstate.add_wide_int (DECL_UID (t));
7932 else if (tclass == tcc_comparison && !commutative_tree_code (code))
7934 /* For comparisons that can be swapped, use the lower
7936 enum tree_code ccode = swap_tree_comparison (code);
7939 hstate.add_object (ccode);
7940 inchash::add_expr (TREE_OPERAND (t, ccode != code), hstate, flags);
7941 inchash::add_expr (TREE_OPERAND (t, ccode == code), hstate, flags);
7943 else if (CONVERT_EXPR_CODE_P (code))
7945 /* NOP_EXPR and CONVERT_EXPR are considered equal by
7947 enum tree_code ccode = NOP_EXPR;
7948 hstate.add_object (ccode);
7950 /* Don't hash the type, that can lead to having nodes which
7951 compare equal according to operand_equal_p, but which
7952 have different hash codes. Make sure to include signedness
7953 in the hash computation. */
7954 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
7955 inchash::add_expr (TREE_OPERAND (t, 0), hstate, flags);
7957 /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl. */
7958 else if (code == MEM_REF
7959 && (flags & OEP_ADDRESS_OF) != 0
7960 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
7961 && DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
7962 && integer_zerop (TREE_OPERAND (t, 1)))
7963 inchash::add_expr (TREE_OPERAND (TREE_OPERAND (t, 0), 0),
7965 /* Don't ICE on FE specific trees, or their arguments etc.
7966 during operand_equal_p hash verification. */
7967 else if (!IS_EXPR_CODE_CLASS (tclass))
7968 gcc_assert (flags & OEP_HASH_CHECK);
7971 unsigned int sflags = flags;
7973 hstate.add_object (code);
7978 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
7979 flags |= OEP_ADDRESS_OF;
7985 case TARGET_MEM_REF:
7986 flags &= ~OEP_ADDRESS_OF;
7991 case ARRAY_RANGE_REF:
7994 sflags &= ~OEP_ADDRESS_OF;
7998 flags &= ~OEP_ADDRESS_OF;
8002 case WIDEN_MULT_PLUS_EXPR:
8003 case WIDEN_MULT_MINUS_EXPR:
8005 /* The multiplication operands are commutative. */
8006 inchash::hash one, two;
8007 inchash::add_expr (TREE_OPERAND (t, 0), one, flags);
8008 inchash::add_expr (TREE_OPERAND (t, 1), two, flags);
8009 hstate.add_commutative (one, two);
8010 inchash::add_expr (TREE_OPERAND (t, 2), two, flags);
8015 if (CALL_EXPR_FN (t) == NULL_TREE)
8016 hstate.add_int (CALL_EXPR_IFN (t));
8020 /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT.
8021 Usually different TARGET_EXPRs just should use
8022 different temporaries in their slots. */
8023 inchash::add_expr (TARGET_EXPR_SLOT (t), hstate, flags);
8030 /* Don't hash the type, that can lead to having nodes which
8031 compare equal according to operand_equal_p, but which
8032 have different hash codes. */
8033 if (code == NON_LVALUE_EXPR)
8035 /* Make sure to include signness in the hash computation. */
8036 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
8037 inchash::add_expr (TREE_OPERAND (t, 0), hstate, flags);
8040 else if (commutative_tree_code (code))
8042 /* It's a commutative expression. We want to hash it the same
8043 however it appears. We do this by first hashing both operands
8044 and then rehashing based on the order of their independent
8046 inchash::hash one, two;
8047 inchash::add_expr (TREE_OPERAND (t, 0), one, flags);
8048 inchash::add_expr (TREE_OPERAND (t, 1), two, flags);
8049 hstate.add_commutative (one, two);
8052 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
8053 inchash::add_expr (TREE_OPERAND (t, i), hstate,
8054 i == 0 ? flags : sflags);
8062 /* Constructors for pointer, array and function types.
8063 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
8064 constructed by language-dependent code, not here.) */
8066 /* Construct, lay out and return the type of pointers to TO_TYPE with
8067 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
8068 reference all of memory. If such a type has already been
8069 constructed, reuse it. */
8072 build_pointer_type_for_mode (tree to_type, machine_mode mode,
8076 bool could_alias = can_alias_all;
8078 if (to_type == error_mark_node)
8079 return error_mark_node;
8081 /* If the pointed-to type has the may_alias attribute set, force
8082 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
8083 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
8084 can_alias_all = true;
8086 /* In some cases, languages will have things that aren't a POINTER_TYPE
8087 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
8088 In that case, return that type without regard to the rest of our
8091 ??? This is a kludge, but consistent with the way this function has
8092 always operated and there doesn't seem to be a good way to avoid this
8094 if (TYPE_POINTER_TO (to_type) != 0
8095 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
8096 return TYPE_POINTER_TO (to_type);
8098 /* First, if we already have a type for pointers to TO_TYPE and it's
8099 the proper mode, use it. */
8100 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
8101 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
8104 t = make_node (POINTER_TYPE);
8106 TREE_TYPE (t) = to_type;
8107 SET_TYPE_MODE (t, mode);
8108 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
8109 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
8110 TYPE_POINTER_TO (to_type) = t;
8112 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
8113 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
8114 SET_TYPE_STRUCTURAL_EQUALITY (t);
8115 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
8117 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
8120 /* Lay out the type. This function has many callers that are concerned
8121 with expression-construction, and this simplifies them all. */
8127 /* By default build pointers in ptr_mode. */
8130 build_pointer_type (tree to_type)
8132 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
8133 : TYPE_ADDR_SPACE (to_type);
8134 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
8135 return build_pointer_type_for_mode (to_type, pointer_mode, false);
8138 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
8141 build_reference_type_for_mode (tree to_type, machine_mode mode,
8145 bool could_alias = can_alias_all;
8147 if (to_type == error_mark_node)
8148 return error_mark_node;
8150 /* If the pointed-to type has the may_alias attribute set, force
8151 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
8152 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
8153 can_alias_all = true;
8155 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
8156 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
8157 In that case, return that type without regard to the rest of our
8160 ??? This is a kludge, but consistent with the way this function has
8161 always operated and there doesn't seem to be a good way to avoid this
8163 if (TYPE_REFERENCE_TO (to_type) != 0
8164 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
8165 return TYPE_REFERENCE_TO (to_type);
8167 /* First, if we already have a type for pointers to TO_TYPE and it's
8168 the proper mode, use it. */
8169 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
8170 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
8173 t = make_node (REFERENCE_TYPE);
8175 TREE_TYPE (t) = to_type;
8176 SET_TYPE_MODE (t, mode);
8177 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
8178 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
8179 TYPE_REFERENCE_TO (to_type) = t;
8181 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
8182 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
8183 SET_TYPE_STRUCTURAL_EQUALITY (t);
8184 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
8186 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
8195 /* Build the node for the type of references-to-TO_TYPE by default
8199 build_reference_type (tree to_type)
8201 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
8202 : TYPE_ADDR_SPACE (to_type);
8203 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
8204 return build_reference_type_for_mode (to_type, pointer_mode, false);
8207 #define MAX_INT_CACHED_PREC \
8208 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
8209 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
8211 /* Builds a signed or unsigned integer type of precision PRECISION.
8212 Used for C bitfields whose precision does not match that of
8213 built-in target types. */
8215 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
8221 unsignedp = MAX_INT_CACHED_PREC + 1;
8223 if (precision <= MAX_INT_CACHED_PREC)
8225 itype = nonstandard_integer_type_cache[precision + unsignedp];
8230 itype = make_node (INTEGER_TYPE);
8231 TYPE_PRECISION (itype) = precision;
8234 fixup_unsigned_type (itype);
8236 fixup_signed_type (itype);
8239 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (itype)))
8240 ret = type_hash_canon (tree_to_uhwi (TYPE_MAX_VALUE (itype)), itype);
8241 if (precision <= MAX_INT_CACHED_PREC)
8242 nonstandard_integer_type_cache[precision + unsignedp] = ret;
8247 #define MAX_BOOL_CACHED_PREC \
8248 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
8249 static GTY(()) tree nonstandard_boolean_type_cache[MAX_BOOL_CACHED_PREC + 1];
8251 /* Builds a boolean type of precision PRECISION.
8252 Used for boolean vectors to choose proper vector element size. */
8254 build_nonstandard_boolean_type (unsigned HOST_WIDE_INT precision)
8258 if (precision <= MAX_BOOL_CACHED_PREC)
8260 type = nonstandard_boolean_type_cache[precision];
8265 type = make_node (BOOLEAN_TYPE);
8266 TYPE_PRECISION (type) = precision;
8267 fixup_signed_type (type);
8269 if (precision <= MAX_INT_CACHED_PREC)
8270 nonstandard_boolean_type_cache[precision] = type;
8275 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
8276 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
8277 is true, reuse such a type that has already been constructed. */
8280 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
8282 tree itype = make_node (INTEGER_TYPE);
8284 TREE_TYPE (itype) = type;
8286 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
8287 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
8289 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
8290 SET_TYPE_MODE (itype, TYPE_MODE (type));
8291 TYPE_SIZE (itype) = TYPE_SIZE (type);
8292 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
8293 SET_TYPE_ALIGN (itype, TYPE_ALIGN (type));
8294 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
8299 if ((TYPE_MIN_VALUE (itype)
8300 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
8301 || (TYPE_MAX_VALUE (itype)
8302 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
8304 /* Since we cannot reliably merge this type, we need to compare it using
8305 structural equality checks. */
8306 SET_TYPE_STRUCTURAL_EQUALITY (itype);
8310 hashval_t hash = type_hash_canon_hash (itype);
8311 itype = type_hash_canon (hash, itype);
8316 /* Wrapper around build_range_type_1 with SHARED set to true. */
8319 build_range_type (tree type, tree lowval, tree highval)
8321 return build_range_type_1 (type, lowval, highval, true);
8324 /* Wrapper around build_range_type_1 with SHARED set to false. */
8327 build_nonshared_range_type (tree type, tree lowval, tree highval)
8329 return build_range_type_1 (type, lowval, highval, false);
8332 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
8333 MAXVAL should be the maximum value in the domain
8334 (one less than the length of the array).
8336 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
8337 We don't enforce this limit, that is up to caller (e.g. language front end).
8338 The limit exists because the result is a signed type and we don't handle
8339 sizes that use more than one HOST_WIDE_INT. */
8342 build_index_type (tree maxval)
8344 return build_range_type (sizetype, size_zero_node, maxval);
8347 /* Return true if the debug information for TYPE, a subtype, should be emitted
8348 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
8349 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
8350 debug info and doesn't reflect the source code. */
8353 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
8355 tree base_type = TREE_TYPE (type), low, high;
8357 /* Subrange types have a base type which is an integral type. */
8358 if (!INTEGRAL_TYPE_P (base_type))
8361 /* Get the real bounds of the subtype. */
8362 if (lang_hooks.types.get_subrange_bounds)
8363 lang_hooks.types.get_subrange_bounds (type, &low, &high);
8366 low = TYPE_MIN_VALUE (type);
8367 high = TYPE_MAX_VALUE (type);
8370 /* If the type and its base type have the same representation and the same
8371 name, then the type is not a subrange but a copy of the base type. */
8372 if ((TREE_CODE (base_type) == INTEGER_TYPE
8373 || TREE_CODE (base_type) == BOOLEAN_TYPE)
8374 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
8375 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
8376 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
8377 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
8387 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
8388 and number of elements specified by the range of values of INDEX_TYPE.
8389 If TYPELESS_STORAGE is true, TYPE_TYPELESS_STORAGE flag is set on the type.
8390 If SHARED is true, reuse such a type that has already been constructed. */
8393 build_array_type_1 (tree elt_type, tree index_type, bool typeless_storage,
8398 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
8400 error ("arrays of functions are not meaningful");
8401 elt_type = integer_type_node;
8404 t = make_node (ARRAY_TYPE);
8405 TREE_TYPE (t) = elt_type;
8406 TYPE_DOMAIN (t) = index_type;
8407 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
8408 TYPE_TYPELESS_STORAGE (t) = typeless_storage;
8411 /* If the element type is incomplete at this point we get marked for
8412 structural equality. Do not record these types in the canonical
8414 if (TYPE_STRUCTURAL_EQUALITY_P (t))
8419 hashval_t hash = type_hash_canon_hash (t);
8420 t = type_hash_canon (hash, t);
8423 if (TYPE_CANONICAL (t) == t)
8425 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
8426 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type))
8428 SET_TYPE_STRUCTURAL_EQUALITY (t);
8429 else if (TYPE_CANONICAL (elt_type) != elt_type
8430 || (index_type && TYPE_CANONICAL (index_type) != index_type))
8432 = build_array_type_1 (TYPE_CANONICAL (elt_type),
8434 ? TYPE_CANONICAL (index_type) : NULL_TREE,
8435 typeless_storage, shared);
8441 /* Wrapper around build_array_type_1 with SHARED set to true. */
8444 build_array_type (tree elt_type, tree index_type, bool typeless_storage)
8446 return build_array_type_1 (elt_type, index_type, typeless_storage, true);
8449 /* Wrapper around build_array_type_1 with SHARED set to false. */
8452 build_nonshared_array_type (tree elt_type, tree index_type)
8454 return build_array_type_1 (elt_type, index_type, false, false);
8457 /* Return a representation of ELT_TYPE[NELTS], using indices of type
8461 build_array_type_nelts (tree elt_type, unsigned HOST_WIDE_INT nelts)
8463 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
8466 /* Recursively examines the array elements of TYPE, until a non-array
8467 element type is found. */
8470 strip_array_types (tree type)
8472 while (TREE_CODE (type) == ARRAY_TYPE)
8473 type = TREE_TYPE (type);
8478 /* Computes the canonical argument types from the argument type list
8481 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
8482 on entry to this function, or if any of the ARGTYPES are
8485 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
8486 true on entry to this function, or if any of the ARGTYPES are
8489 Returns a canonical argument list, which may be ARGTYPES when the
8490 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
8491 true) or would not differ from ARGTYPES. */
8494 maybe_canonicalize_argtypes (tree argtypes,
8495 bool *any_structural_p,
8496 bool *any_noncanonical_p)
8499 bool any_noncanonical_argtypes_p = false;
8501 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
8503 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
8504 /* Fail gracefully by stating that the type is structural. */
8505 *any_structural_p = true;
8506 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
8507 *any_structural_p = true;
8508 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
8509 || TREE_PURPOSE (arg))
8510 /* If the argument has a default argument, we consider it
8511 non-canonical even though the type itself is canonical.
8512 That way, different variants of function and method types
8513 with default arguments will all point to the variant with
8514 no defaults as their canonical type. */
8515 any_noncanonical_argtypes_p = true;
8518 if (*any_structural_p)
8521 if (any_noncanonical_argtypes_p)
8523 /* Build the canonical list of argument types. */
8524 tree canon_argtypes = NULL_TREE;
8525 bool is_void = false;
8527 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
8529 if (arg == void_list_node)
8532 canon_argtypes = tree_cons (NULL_TREE,
8533 TYPE_CANONICAL (TREE_VALUE (arg)),
8537 canon_argtypes = nreverse (canon_argtypes);
8539 canon_argtypes = chainon (canon_argtypes, void_list_node);
8541 /* There is a non-canonical type. */
8542 *any_noncanonical_p = true;
8543 return canon_argtypes;
8546 /* The canonical argument types are the same as ARGTYPES. */
8550 /* Construct, lay out and return
8551 the type of functions returning type VALUE_TYPE
8552 given arguments of types ARG_TYPES.
8553 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
8554 are data type nodes for the arguments of the function.
8555 If such a type has already been constructed, reuse it. */
8558 build_function_type (tree value_type, tree arg_types)
8561 inchash::hash hstate;
8562 bool any_structural_p, any_noncanonical_p;
8563 tree canon_argtypes;
8565 if (TREE_CODE (value_type) == FUNCTION_TYPE)
8567 error ("function return type cannot be function");
8568 value_type = integer_type_node;
8571 /* Make a node of the sort we want. */
8572 t = make_node (FUNCTION_TYPE);
8573 TREE_TYPE (t) = value_type;
8574 TYPE_ARG_TYPES (t) = arg_types;
8576 /* If we already have such a type, use the old one. */
8577 hashval_t hash = type_hash_canon_hash (t);
8578 t = type_hash_canon (hash, t);
8580 /* Set up the canonical type. */
8581 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
8582 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
8583 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
8585 &any_noncanonical_p);
8586 if (any_structural_p)
8587 SET_TYPE_STRUCTURAL_EQUALITY (t);
8588 else if (any_noncanonical_p)
8589 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
8592 if (!COMPLETE_TYPE_P (t))
8597 /* Build a function type. The RETURN_TYPE is the type returned by the
8598 function. If VAARGS is set, no void_type_node is appended to the
8599 list. ARGP must be always be terminated be a NULL_TREE. */
8602 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
8606 t = va_arg (argp, tree);
8607 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
8608 args = tree_cons (NULL_TREE, t, args);
8613 if (args != NULL_TREE)
8614 args = nreverse (args);
8615 gcc_assert (last != void_list_node);
8617 else if (args == NULL_TREE)
8618 args = void_list_node;
8622 args = nreverse (args);
8623 TREE_CHAIN (last) = void_list_node;
8625 args = build_function_type (return_type, args);
8630 /* Build a function type. The RETURN_TYPE is the type returned by the
8631 function. If additional arguments are provided, they are
8632 additional argument types. The list of argument types must always
8633 be terminated by NULL_TREE. */
8636 build_function_type_list (tree return_type, ...)
8641 va_start (p, return_type);
8642 args = build_function_type_list_1 (false, return_type, p);
8647 /* Build a variable argument function type. The RETURN_TYPE is the
8648 type returned by the function. If additional arguments are provided,
8649 they are additional argument types. The list of argument types must
8650 always be terminated by NULL_TREE. */
8653 build_varargs_function_type_list (tree return_type, ...)
8658 va_start (p, return_type);
8659 args = build_function_type_list_1 (true, return_type, p);
8665 /* Build a function type. RETURN_TYPE is the type returned by the
8666 function; VAARGS indicates whether the function takes varargs. The
8667 function takes N named arguments, the types of which are provided in
8671 build_function_type_array_1 (bool vaargs, tree return_type, int n,
8675 tree t = vaargs ? NULL_TREE : void_list_node;
8677 for (i = n - 1; i >= 0; i--)
8678 t = tree_cons (NULL_TREE, arg_types[i], t);
8680 return build_function_type (return_type, t);
8683 /* Build a function type. RETURN_TYPE is the type returned by the
8684 function. The function takes N named arguments, the types of which
8685 are provided in ARG_TYPES. */
8688 build_function_type_array (tree return_type, int n, tree *arg_types)
8690 return build_function_type_array_1 (false, return_type, n, arg_types);
8693 /* Build a variable argument function type. RETURN_TYPE is the type
8694 returned by the function. The function takes N named arguments, the
8695 types of which are provided in ARG_TYPES. */
8698 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
8700 return build_function_type_array_1 (true, return_type, n, arg_types);
8703 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
8704 and ARGTYPES (a TREE_LIST) are the return type and arguments types
8705 for the method. An implicit additional parameter (of type
8706 pointer-to-BASETYPE) is added to the ARGTYPES. */
8709 build_method_type_directly (tree basetype,
8715 bool any_structural_p, any_noncanonical_p;
8716 tree canon_argtypes;
8718 /* Make a node of the sort we want. */
8719 t = make_node (METHOD_TYPE);
8721 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8722 TREE_TYPE (t) = rettype;
8723 ptype = build_pointer_type (basetype);
8725 /* The actual arglist for this function includes a "hidden" argument
8726 which is "this". Put it into the list of argument types. */
8727 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
8728 TYPE_ARG_TYPES (t) = argtypes;
8730 /* If we already have such a type, use the old one. */
8731 hashval_t hash = type_hash_canon_hash (t);
8732 t = type_hash_canon (hash, t);
8734 /* Set up the canonical type. */
8736 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8737 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
8739 = (TYPE_CANONICAL (basetype) != basetype
8740 || TYPE_CANONICAL (rettype) != rettype);
8741 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
8743 &any_noncanonical_p);
8744 if (any_structural_p)
8745 SET_TYPE_STRUCTURAL_EQUALITY (t);
8746 else if (any_noncanonical_p)
8748 = build_method_type_directly (TYPE_CANONICAL (basetype),
8749 TYPE_CANONICAL (rettype),
8751 if (!COMPLETE_TYPE_P (t))
8757 /* Construct, lay out and return the type of methods belonging to class
8758 BASETYPE and whose arguments and values are described by TYPE.
8759 If that type exists already, reuse it.
8760 TYPE must be a FUNCTION_TYPE node. */
8763 build_method_type (tree basetype, tree type)
8765 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8767 return build_method_type_directly (basetype,
8769 TYPE_ARG_TYPES (type));
8772 /* Construct, lay out and return the type of offsets to a value
8773 of type TYPE, within an object of type BASETYPE.
8774 If a suitable offset type exists already, reuse it. */
8777 build_offset_type (tree basetype, tree type)
8781 /* Make a node of the sort we want. */
8782 t = make_node (OFFSET_TYPE);
8784 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8785 TREE_TYPE (t) = type;
8787 /* If we already have such a type, use the old one. */
8788 hashval_t hash = type_hash_canon_hash (t);
8789 t = type_hash_canon (hash, t);
8791 if (!COMPLETE_TYPE_P (t))
8794 if (TYPE_CANONICAL (t) == t)
8796 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8797 || TYPE_STRUCTURAL_EQUALITY_P (type))
8798 SET_TYPE_STRUCTURAL_EQUALITY (t);
8799 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8800 || TYPE_CANONICAL (type) != type)
8802 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8803 TYPE_CANONICAL (type));
8809 /* Create a complex type whose components are COMPONENT_TYPE.
8811 If NAMED is true, the type is given a TYPE_NAME. We do not always
8812 do so because this creates a DECL node and thus make the DECL_UIDs
8813 dependent on the type canonicalization hashtable, which is GC-ed,
8814 so the DECL_UIDs would not be stable wrt garbage collection. */
8817 build_complex_type (tree component_type, bool named)
8821 gcc_assert (INTEGRAL_TYPE_P (component_type)
8822 || SCALAR_FLOAT_TYPE_P (component_type)
8823 || FIXED_POINT_TYPE_P (component_type));
8825 /* Make a node of the sort we want. */
8826 t = make_node (COMPLEX_TYPE);
8828 TREE_TYPE (t) = TYPE_MAIN_VARIANT (component_type);
8830 /* If we already have such a type, use the old one. */
8831 hashval_t hash = type_hash_canon_hash (t);
8832 t = type_hash_canon (hash, t);
8834 if (!COMPLETE_TYPE_P (t))
8837 if (TYPE_CANONICAL (t) == t)
8839 if (TYPE_STRUCTURAL_EQUALITY_P (component_type))
8840 SET_TYPE_STRUCTURAL_EQUALITY (t);
8841 else if (TYPE_CANONICAL (component_type) != component_type)
8843 = build_complex_type (TYPE_CANONICAL (component_type), named);
8846 /* We need to create a name, since complex is a fundamental type. */
8847 if (!TYPE_NAME (t) && named)
8850 if (component_type == char_type_node)
8851 name = "complex char";
8852 else if (component_type == signed_char_type_node)
8853 name = "complex signed char";
8854 else if (component_type == unsigned_char_type_node)
8855 name = "complex unsigned char";
8856 else if (component_type == short_integer_type_node)
8857 name = "complex short int";
8858 else if (component_type == short_unsigned_type_node)
8859 name = "complex short unsigned int";
8860 else if (component_type == integer_type_node)
8861 name = "complex int";
8862 else if (component_type == unsigned_type_node)
8863 name = "complex unsigned int";
8864 else if (component_type == long_integer_type_node)
8865 name = "complex long int";
8866 else if (component_type == long_unsigned_type_node)
8867 name = "complex long unsigned int";
8868 else if (component_type == long_long_integer_type_node)
8869 name = "complex long long int";
8870 else if (component_type == long_long_unsigned_type_node)
8871 name = "complex long long unsigned int";
8876 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8877 get_identifier (name), t);
8880 return build_qualified_type (t, TYPE_QUALS (component_type));
8883 /* If TYPE is a real or complex floating-point type and the target
8884 does not directly support arithmetic on TYPE then return the wider
8885 type to be used for arithmetic on TYPE. Otherwise, return
8889 excess_precision_type (tree type)
8891 /* The target can give two different responses to the question of
8892 which excess precision mode it would like depending on whether we
8893 are in -fexcess-precision=standard or -fexcess-precision=fast. */
8895 enum excess_precision_type requested_type
8896 = (flag_excess_precision == EXCESS_PRECISION_FAST
8897 ? EXCESS_PRECISION_TYPE_FAST
8898 : EXCESS_PRECISION_TYPE_STANDARD);
8900 enum flt_eval_method target_flt_eval_method
8901 = targetm.c.excess_precision (requested_type);
8903 /* The target should not ask for unpredictable float evaluation (though
8904 it might advertise that implicitly the evaluation is unpredictable,
8905 but we don't care about that here, it will have been reported
8906 elsewhere). If it does ask for unpredictable evaluation, we have
8907 nothing to do here. */
8908 gcc_assert (target_flt_eval_method != FLT_EVAL_METHOD_UNPREDICTABLE);
8910 /* Nothing to do. The target has asked for all types we know about
8911 to be computed with their native precision and range. */
8912 if (target_flt_eval_method == FLT_EVAL_METHOD_PROMOTE_TO_FLOAT16)
8915 /* The target will promote this type in a target-dependent way, so excess
8916 precision ought to leave it alone. */
8917 if (targetm.promoted_type (type) != NULL_TREE)
8920 machine_mode float16_type_mode = (float16_type_node
8921 ? TYPE_MODE (float16_type_node)
8923 machine_mode float_type_mode = TYPE_MODE (float_type_node);
8924 machine_mode double_type_mode = TYPE_MODE (double_type_node);
8926 switch (TREE_CODE (type))
8930 machine_mode type_mode = TYPE_MODE (type);
8931 switch (target_flt_eval_method)
8933 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
8934 if (type_mode == float16_type_mode)
8935 return float_type_node;
8937 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
8938 if (type_mode == float16_type_mode
8939 || type_mode == float_type_mode)
8940 return double_type_node;
8942 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
8943 if (type_mode == float16_type_mode
8944 || type_mode == float_type_mode
8945 || type_mode == double_type_mode)
8946 return long_double_type_node;
8955 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8957 machine_mode type_mode = TYPE_MODE (TREE_TYPE (type));
8958 switch (target_flt_eval_method)
8960 case FLT_EVAL_METHOD_PROMOTE_TO_FLOAT:
8961 if (type_mode == float16_type_mode)
8962 return complex_float_type_node;
8964 case FLT_EVAL_METHOD_PROMOTE_TO_DOUBLE:
8965 if (type_mode == float16_type_mode
8966 || type_mode == float_type_mode)
8967 return complex_double_type_node;
8969 case FLT_EVAL_METHOD_PROMOTE_TO_LONG_DOUBLE:
8970 if (type_mode == float16_type_mode
8971 || type_mode == float_type_mode
8972 || type_mode == double_type_mode)
8973 return complex_long_double_type_node;
8987 /* Return OP, stripped of any conversions to wider types as much as is safe.
8988 Converting the value back to OP's type makes a value equivalent to OP.
8990 If FOR_TYPE is nonzero, we return a value which, if converted to
8991 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8993 OP must have integer, real or enumeral type. Pointers are not allowed!
8995 There are some cases where the obvious value we could return
8996 would regenerate to OP if converted to OP's type,
8997 but would not extend like OP to wider types.
8998 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8999 For example, if OP is (unsigned short)(signed char)-1,
9000 we avoid returning (signed char)-1 if FOR_TYPE is int,
9001 even though extending that to an unsigned short would regenerate OP,
9002 since the result of extending (signed char)-1 to (int)
9003 is different from (int) OP. */
9006 get_unwidened (tree op, tree for_type)
9008 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
9009 tree type = TREE_TYPE (op);
9011 = TYPE_PRECISION (for_type != 0 ? for_type : type);
9013 = (for_type != 0 && for_type != type
9014 && final_prec > TYPE_PRECISION (type)
9015 && TYPE_UNSIGNED (type));
9018 while (CONVERT_EXPR_P (op))
9022 /* TYPE_PRECISION on vector types has different meaning
9023 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
9024 so avoid them here. */
9025 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
9028 bitschange = TYPE_PRECISION (TREE_TYPE (op))
9029 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
9031 /* Truncations are many-one so cannot be removed.
9032 Unless we are later going to truncate down even farther. */
9034 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
9037 /* See what's inside this conversion. If we decide to strip it,
9039 op = TREE_OPERAND (op, 0);
9041 /* If we have not stripped any zero-extensions (uns is 0),
9042 we can strip any kind of extension.
9043 If we have previously stripped a zero-extension,
9044 only zero-extensions can safely be stripped.
9045 Any extension can be stripped if the bits it would produce
9046 are all going to be discarded later by truncating to FOR_TYPE. */
9050 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
9052 /* TYPE_UNSIGNED says whether this is a zero-extension.
9053 Let's avoid computing it if it does not affect WIN
9054 and if UNS will not be needed again. */
9056 || CONVERT_EXPR_P (op))
9057 && TYPE_UNSIGNED (TREE_TYPE (op)))
9065 /* If we finally reach a constant see if it fits in sth smaller and
9066 in that case convert it. */
9067 if (TREE_CODE (win) == INTEGER_CST)
9069 tree wtype = TREE_TYPE (win);
9070 unsigned prec = wi::min_precision (win, TYPE_SIGN (wtype));
9072 prec = MAX (prec, final_prec);
9073 if (prec < TYPE_PRECISION (wtype))
9075 tree t = lang_hooks.types.type_for_size (prec, TYPE_UNSIGNED (wtype));
9076 if (t && TYPE_PRECISION (t) < TYPE_PRECISION (wtype))
9077 win = fold_convert (t, win);
9084 /* Return OP or a simpler expression for a narrower value
9085 which can be sign-extended or zero-extended to give back OP.
9086 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
9087 or 0 if the value should be sign-extended. */
9090 get_narrower (tree op, int *unsignedp_ptr)
9095 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
9097 while (TREE_CODE (op) == NOP_EXPR)
9100 = (TYPE_PRECISION (TREE_TYPE (op))
9101 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
9103 /* Truncations are many-one so cannot be removed. */
9107 /* See what's inside this conversion. If we decide to strip it,
9112 op = TREE_OPERAND (op, 0);
9113 /* An extension: the outermost one can be stripped,
9114 but remember whether it is zero or sign extension. */
9116 uns = TYPE_UNSIGNED (TREE_TYPE (op));
9117 /* Otherwise, if a sign extension has been stripped,
9118 only sign extensions can now be stripped;
9119 if a zero extension has been stripped, only zero-extensions. */
9120 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
9124 else /* bitschange == 0 */
9126 /* A change in nominal type can always be stripped, but we must
9127 preserve the unsignedness. */
9129 uns = TYPE_UNSIGNED (TREE_TYPE (op));
9131 op = TREE_OPERAND (op, 0);
9132 /* Keep trying to narrow, but don't assign op to win if it
9133 would turn an integral type into something else. */
9134 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
9141 if (TREE_CODE (op) == COMPONENT_REF
9142 /* Since type_for_size always gives an integer type. */
9143 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
9144 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
9145 /* Ensure field is laid out already. */
9146 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
9147 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
9149 unsigned HOST_WIDE_INT innerprec
9150 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
9151 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
9152 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
9153 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
9155 /* We can get this structure field in a narrower type that fits it,
9156 but the resulting extension to its nominal type (a fullword type)
9157 must satisfy the same conditions as for other extensions.
9159 Do this only for fields that are aligned (not bit-fields),
9160 because when bit-field insns will be used there is no
9161 advantage in doing this. */
9163 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
9164 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
9165 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
9169 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
9170 win = fold_convert (type, op);
9174 *unsignedp_ptr = uns;
9178 /* Return true if integer constant C has a value that is permissible
9179 for TYPE, an integral type. */
9182 int_fits_type_p (const_tree c, const_tree type)
9184 tree type_low_bound, type_high_bound;
9185 bool ok_for_low_bound, ok_for_high_bound;
9186 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
9188 /* Non-standard boolean types can have arbitrary precision but various
9189 transformations assume that they can only take values 0 and +/-1. */
9190 if (TREE_CODE (type) == BOOLEAN_TYPE)
9191 return wi::fits_to_boolean_p (c, type);
9194 type_low_bound = TYPE_MIN_VALUE (type);
9195 type_high_bound = TYPE_MAX_VALUE (type);
9197 /* If at least one bound of the type is a constant integer, we can check
9198 ourselves and maybe make a decision. If no such decision is possible, but
9199 this type is a subtype, try checking against that. Otherwise, use
9200 fits_to_tree_p, which checks against the precision.
9202 Compute the status for each possibly constant bound, and return if we see
9203 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
9204 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
9205 for "constant known to fit". */
9207 /* Check if c >= type_low_bound. */
9208 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
9210 if (tree_int_cst_lt (c, type_low_bound))
9212 ok_for_low_bound = true;
9215 ok_for_low_bound = false;
9217 /* Check if c <= type_high_bound. */
9218 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
9220 if (tree_int_cst_lt (type_high_bound, c))
9222 ok_for_high_bound = true;
9225 ok_for_high_bound = false;
9227 /* If the constant fits both bounds, the result is known. */
9228 if (ok_for_low_bound && ok_for_high_bound)
9231 /* Perform some generic filtering which may allow making a decision
9232 even if the bounds are not constant. First, negative integers
9233 never fit in unsigned types, */
9234 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (c))
9237 /* Second, narrower types always fit in wider ones. */
9238 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
9241 /* Third, unsigned integers with top bit set never fit signed types. */
9242 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
9244 int prec = GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (c))) - 1;
9245 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
9247 /* When a tree_cst is converted to a wide-int, the precision
9248 is taken from the type. However, if the precision of the
9249 mode underneath the type is smaller than that, it is
9250 possible that the value will not fit. The test below
9251 fails if any bit is set between the sign bit of the
9252 underlying mode and the top bit of the type. */
9253 if (wi::ne_p (wi::zext (c, prec - 1), c))
9256 else if (wi::neg_p (c))
9260 /* If we haven't been able to decide at this point, there nothing more we
9261 can check ourselves here. Look at the base type if we have one and it
9262 has the same precision. */
9263 if (TREE_CODE (type) == INTEGER_TYPE
9264 && TREE_TYPE (type) != 0
9265 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
9267 type = TREE_TYPE (type);
9271 /* Or to fits_to_tree_p, if nothing else. */
9272 return wi::fits_to_tree_p (c, type);
9275 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
9276 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
9277 represented (assuming two's-complement arithmetic) within the bit
9278 precision of the type are returned instead. */
9281 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
9283 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
9284 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
9285 wi::to_mpz (TYPE_MIN_VALUE (type), min, TYPE_SIGN (type));
9288 if (TYPE_UNSIGNED (type))
9289 mpz_set_ui (min, 0);
9292 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
9293 wi::to_mpz (mn, min, SIGNED);
9297 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
9298 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
9299 wi::to_mpz (TYPE_MAX_VALUE (type), max, TYPE_SIGN (type));
9302 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
9303 wi::to_mpz (mn, max, TYPE_SIGN (type));
9307 /* Return true if VAR is an automatic variable defined in function FN. */
9310 auto_var_in_fn_p (const_tree var, const_tree fn)
9312 return (DECL_P (var) && DECL_CONTEXT (var) == fn
9313 && ((((VAR_P (var) && ! DECL_EXTERNAL (var))
9314 || TREE_CODE (var) == PARM_DECL)
9315 && ! TREE_STATIC (var))
9316 || TREE_CODE (var) == LABEL_DECL
9317 || TREE_CODE (var) == RESULT_DECL));
9320 /* Subprogram of following function. Called by walk_tree.
9322 Return *TP if it is an automatic variable or parameter of the
9323 function passed in as DATA. */
9326 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
9328 tree fn = (tree) data;
9333 else if (DECL_P (*tp)
9334 && auto_var_in_fn_p (*tp, fn))
9340 /* Returns true if T is, contains, or refers to a type with variable
9341 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
9342 arguments, but not the return type. If FN is nonzero, only return
9343 true if a modifier of the type or position of FN is a variable or
9344 parameter inside FN.
9346 This concept is more general than that of C99 'variably modified types':
9347 in C99, a struct type is never variably modified because a VLA may not
9348 appear as a structure member. However, in GNU C code like:
9350 struct S { int i[f()]; };
9352 is valid, and other languages may define similar constructs. */
9355 variably_modified_type_p (tree type, tree fn)
9359 /* Test if T is either variable (if FN is zero) or an expression containing
9360 a variable in FN. If TYPE isn't gimplified, return true also if
9361 gimplify_one_sizepos would gimplify the expression into a local
9363 #define RETURN_TRUE_IF_VAR(T) \
9364 do { tree _t = (T); \
9365 if (_t != NULL_TREE \
9366 && _t != error_mark_node \
9367 && TREE_CODE (_t) != INTEGER_CST \
9368 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
9370 || (!TYPE_SIZES_GIMPLIFIED (type) \
9371 && !is_gimple_sizepos (_t)) \
9372 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
9373 return true; } while (0)
9375 if (type == error_mark_node)
9378 /* If TYPE itself has variable size, it is variably modified. */
9379 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
9380 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
9382 switch (TREE_CODE (type))
9385 case REFERENCE_TYPE:
9387 if (variably_modified_type_p (TREE_TYPE (type), fn))
9393 /* If TYPE is a function type, it is variably modified if the
9394 return type is variably modified. */
9395 if (variably_modified_type_p (TREE_TYPE (type), fn))
9401 case FIXED_POINT_TYPE:
9404 /* Scalar types are variably modified if their end points
9406 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
9407 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
9412 case QUAL_UNION_TYPE:
9413 /* We can't see if any of the fields are variably-modified by the
9414 definition we normally use, since that would produce infinite
9415 recursion via pointers. */
9416 /* This is variably modified if some field's type is. */
9417 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
9418 if (TREE_CODE (t) == FIELD_DECL)
9420 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
9421 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
9422 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
9424 if (TREE_CODE (type) == QUAL_UNION_TYPE)
9425 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
9430 /* Do not call ourselves to avoid infinite recursion. This is
9431 variably modified if the element type is. */
9432 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
9433 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
9440 /* The current language may have other cases to check, but in general,
9441 all other types are not variably modified. */
9442 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
9444 #undef RETURN_TRUE_IF_VAR
9447 /* Given a DECL or TYPE, return the scope in which it was declared, or
9448 NULL_TREE if there is no containing scope. */
9451 get_containing_scope (const_tree t)
9453 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
9456 /* Return the innermost context enclosing DECL that is
9457 a FUNCTION_DECL, or zero if none. */
9460 decl_function_context (const_tree decl)
9464 if (TREE_CODE (decl) == ERROR_MARK)
9467 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
9468 where we look up the function at runtime. Such functions always take
9469 a first argument of type 'pointer to real context'.
9471 C++ should really be fixed to use DECL_CONTEXT for the real context,
9472 and use something else for the "virtual context". */
9473 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VINDEX (decl))
9476 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
9478 context = DECL_CONTEXT (decl);
9480 while (context && TREE_CODE (context) != FUNCTION_DECL)
9482 if (TREE_CODE (context) == BLOCK)
9483 context = BLOCK_SUPERCONTEXT (context);
9485 context = get_containing_scope (context);
9491 /* Return the innermost context enclosing DECL that is
9492 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
9493 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
9496 decl_type_context (const_tree decl)
9498 tree context = DECL_CONTEXT (decl);
9501 switch (TREE_CODE (context))
9503 case NAMESPACE_DECL:
9504 case TRANSLATION_UNIT_DECL:
9509 case QUAL_UNION_TYPE:
9514 context = DECL_CONTEXT (context);
9518 context = BLOCK_SUPERCONTEXT (context);
9528 /* CALL is a CALL_EXPR. Return the declaration for the function
9529 called, or NULL_TREE if the called function cannot be
9533 get_callee_fndecl (const_tree call)
9537 if (call == error_mark_node)
9538 return error_mark_node;
9540 /* It's invalid to call this function with anything but a
9542 gcc_assert (TREE_CODE (call) == CALL_EXPR);
9544 /* The first operand to the CALL is the address of the function
9546 addr = CALL_EXPR_FN (call);
9548 /* If there is no function, return early. */
9549 if (addr == NULL_TREE)
9554 /* If this is a readonly function pointer, extract its initial value. */
9555 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
9556 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
9557 && DECL_INITIAL (addr))
9558 addr = DECL_INITIAL (addr);
9560 /* If the address is just `&f' for some function `f', then we know
9561 that `f' is being called. */
9562 if (TREE_CODE (addr) == ADDR_EXPR
9563 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
9564 return TREE_OPERAND (addr, 0);
9566 /* We couldn't figure out what was being called. */
9570 /* If CALL_EXPR CALL calls a normal built-in function or an internal function,
9571 return the associated function code, otherwise return CFN_LAST. */
9574 get_call_combined_fn (const_tree call)
9576 /* It's invalid to call this function with anything but a CALL_EXPR. */
9577 gcc_assert (TREE_CODE (call) == CALL_EXPR);
9579 if (!CALL_EXPR_FN (call))
9580 return as_combined_fn (CALL_EXPR_IFN (call));
9582 tree fndecl = get_callee_fndecl (call);
9583 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
9584 return as_combined_fn (DECL_FUNCTION_CODE (fndecl));
9589 #define TREE_MEM_USAGE_SPACES 40
9591 /* Print debugging information about tree nodes generated during the compile,
9592 and any language-specific information. */
9595 dump_tree_statistics (void)
9597 if (GATHER_STATISTICS)
9600 int total_nodes, total_bytes;
9601 fprintf (stderr, "\nKind Nodes Bytes\n");
9602 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9603 total_nodes = total_bytes = 0;
9604 for (i = 0; i < (int) all_kinds; i++)
9606 fprintf (stderr, "%-20s %7d %10d\n", tree_node_kind_names[i],
9607 tree_node_counts[i], tree_node_sizes[i]);
9608 total_nodes += tree_node_counts[i];
9609 total_bytes += tree_node_sizes[i];
9611 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9612 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_nodes, total_bytes);
9613 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9614 fprintf (stderr, "Code Nodes\n");
9615 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9616 for (i = 0; i < (int) MAX_TREE_CODES; i++)
9617 fprintf (stderr, "%-32s %7d\n", get_tree_code_name ((enum tree_code) i),
9618 tree_code_counts[i]);
9619 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9620 fprintf (stderr, "\n");
9621 ssanames_print_statistics ();
9622 fprintf (stderr, "\n");
9623 phinodes_print_statistics ();
9624 fprintf (stderr, "\n");
9627 fprintf (stderr, "(No per-node statistics)\n");
9629 print_type_hash_statistics ();
9630 print_debug_expr_statistics ();
9631 print_value_expr_statistics ();
9632 lang_hooks.print_statistics ();
9635 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
9637 /* Generate a crc32 of the low BYTES bytes of VALUE. */
9640 crc32_unsigned_n (unsigned chksum, unsigned value, unsigned bytes)
9642 /* This relies on the raw feedback's top 4 bits being zero. */
9643 #define FEEDBACK(X) ((X) * 0x04c11db7)
9644 #define SYNDROME(X) (FEEDBACK ((X) & 1) ^ FEEDBACK ((X) & 2) \
9645 ^ FEEDBACK ((X) & 4) ^ FEEDBACK ((X) & 8))
9646 static const unsigned syndromes[16] =
9648 SYNDROME(0x0), SYNDROME(0x1), SYNDROME(0x2), SYNDROME(0x3),
9649 SYNDROME(0x4), SYNDROME(0x5), SYNDROME(0x6), SYNDROME(0x7),
9650 SYNDROME(0x8), SYNDROME(0x9), SYNDROME(0xa), SYNDROME(0xb),
9651 SYNDROME(0xc), SYNDROME(0xd), SYNDROME(0xe), SYNDROME(0xf),
9656 value <<= (32 - bytes * 8);
9657 for (unsigned ix = bytes * 2; ix--; value <<= 4)
9659 unsigned feedback = syndromes[((value ^ chksum) >> 28) & 0xf];
9661 chksum = (chksum << 4) ^ feedback;
9667 /* Generate a crc32 of a string. */
9670 crc32_string (unsigned chksum, const char *string)
9673 chksum = crc32_byte (chksum, *string);
9678 /* P is a string that will be used in a symbol. Mask out any characters
9679 that are not valid in that context. */
9682 clean_symbol_name (char *p)
9686 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
9689 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
9696 /* For anonymous aggregate types, we need some sort of name to
9697 hold on to. In practice, this should not appear, but it should
9698 not be harmful if it does. */
9700 anon_aggrname_p(const_tree id_node)
9702 #ifndef NO_DOT_IN_LABEL
9703 return (IDENTIFIER_POINTER (id_node)[0] == '.'
9704 && IDENTIFIER_POINTER (id_node)[1] == '_');
9705 #else /* NO_DOT_IN_LABEL */
9706 #ifndef NO_DOLLAR_IN_LABEL
9707 return (IDENTIFIER_POINTER (id_node)[0] == '$' \
9708 && IDENTIFIER_POINTER (id_node)[1] == '_');
9709 #else /* NO_DOLLAR_IN_LABEL */
9710 #define ANON_AGGRNAME_PREFIX "__anon_"
9711 return (!strncmp (IDENTIFIER_POINTER (id_node), ANON_AGGRNAME_PREFIX,
9712 sizeof (ANON_AGGRNAME_PREFIX) - 1));
9713 #endif /* NO_DOLLAR_IN_LABEL */
9714 #endif /* NO_DOT_IN_LABEL */
9717 /* Return a format for an anonymous aggregate name. */
9719 anon_aggrname_format()
9721 #ifndef NO_DOT_IN_LABEL
9723 #else /* NO_DOT_IN_LABEL */
9724 #ifndef NO_DOLLAR_IN_LABEL
9726 #else /* NO_DOLLAR_IN_LABEL */
9728 #endif /* NO_DOLLAR_IN_LABEL */
9729 #endif /* NO_DOT_IN_LABEL */
9732 /* Generate a name for a special-purpose function.
9733 The generated name may need to be unique across the whole link.
9734 Changes to this function may also require corresponding changes to
9735 xstrdup_mask_random.
9736 TYPE is some string to identify the purpose of this function to the
9737 linker or collect2; it must start with an uppercase letter,
9739 I - for constructors
9741 N - for C++ anonymous namespaces
9742 F - for DWARF unwind frame information. */
9745 get_file_function_name (const char *type)
9751 /* If we already have a name we know to be unique, just use that. */
9752 if (first_global_object_name)
9753 p = q = ASTRDUP (first_global_object_name);
9754 /* If the target is handling the constructors/destructors, they
9755 will be local to this file and the name is only necessary for
9757 We also assign sub_I and sub_D sufixes to constructors called from
9758 the global static constructors. These are always local. */
9759 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9760 || (strncmp (type, "sub_", 4) == 0
9761 && (type[4] == 'I' || type[4] == 'D')))
9763 const char *file = main_input_filename;
9765 file = LOCATION_FILE (input_location);
9766 /* Just use the file's basename, because the full pathname
9767 might be quite long. */
9768 p = q = ASTRDUP (lbasename (file));
9772 /* Otherwise, the name must be unique across the entire link.
9773 We don't have anything that we know to be unique to this translation
9774 unit, so use what we do have and throw in some randomness. */
9776 const char *name = weak_global_object_name;
9777 const char *file = main_input_filename;
9782 file = LOCATION_FILE (input_location);
9784 len = strlen (file);
9785 q = (char *) alloca (9 + 19 + len + 1);
9786 memcpy (q, file, len + 1);
9788 snprintf (q + len, 9 + 19 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9789 crc32_string (0, name), get_random_seed (false));
9794 clean_symbol_name (q);
9795 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9798 /* Set up the name of the file-level functions we may need.
9799 Use a global object (which is already required to be unique over
9800 the program) rather than the file name (which imposes extra
9802 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9804 return get_identifier (buf);
9807 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9809 /* Complain that the tree code of NODE does not match the expected 0
9810 terminated list of trailing codes. The trailing code list can be
9811 empty, for a more vague error message. FILE, LINE, and FUNCTION
9812 are of the caller. */
9815 tree_check_failed (const_tree node, const char *file,
9816 int line, const char *function, ...)
9820 unsigned length = 0;
9821 enum tree_code code;
9823 va_start (args, function);
9824 while ((code = (enum tree_code) va_arg (args, int)))
9825 length += 4 + strlen (get_tree_code_name (code));
9830 va_start (args, function);
9831 length += strlen ("expected ");
9832 buffer = tmp = (char *) alloca (length);
9834 while ((code = (enum tree_code) va_arg (args, int)))
9836 const char *prefix = length ? " or " : "expected ";
9838 strcpy (tmp + length, prefix);
9839 length += strlen (prefix);
9840 strcpy (tmp + length, get_tree_code_name (code));
9841 length += strlen (get_tree_code_name (code));
9846 buffer = "unexpected node";
9848 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9849 buffer, get_tree_code_name (TREE_CODE (node)),
9850 function, trim_filename (file), line);
9853 /* Complain that the tree code of NODE does match the expected 0
9854 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9858 tree_not_check_failed (const_tree node, const char *file,
9859 int line, const char *function, ...)
9863 unsigned length = 0;
9864 enum tree_code code;
9866 va_start (args, function);
9867 while ((code = (enum tree_code) va_arg (args, int)))
9868 length += 4 + strlen (get_tree_code_name (code));
9870 va_start (args, function);
9871 buffer = (char *) alloca (length);
9873 while ((code = (enum tree_code) va_arg (args, int)))
9877 strcpy (buffer + length, " or ");
9880 strcpy (buffer + length, get_tree_code_name (code));
9881 length += strlen (get_tree_code_name (code));
9885 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9886 buffer, get_tree_code_name (TREE_CODE (node)),
9887 function, trim_filename (file), line);
9890 /* Similar to tree_check_failed, except that we check for a class of tree
9891 code, given in CL. */
9894 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9895 const char *file, int line, const char *function)
9898 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9899 TREE_CODE_CLASS_STRING (cl),
9900 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9901 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9904 /* Similar to tree_check_failed, except that instead of specifying a
9905 dozen codes, use the knowledge that they're all sequential. */
9908 tree_range_check_failed (const_tree node, const char *file, int line,
9909 const char *function, enum tree_code c1,
9913 unsigned length = 0;
9916 for (c = c1; c <= c2; ++c)
9917 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9919 length += strlen ("expected ");
9920 buffer = (char *) alloca (length);
9923 for (c = c1; c <= c2; ++c)
9925 const char *prefix = length ? " or " : "expected ";
9927 strcpy (buffer + length, prefix);
9928 length += strlen (prefix);
9929 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9930 length += strlen (get_tree_code_name ((enum tree_code) c));
9933 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9934 buffer, get_tree_code_name (TREE_CODE (node)),
9935 function, trim_filename (file), line);
9939 /* Similar to tree_check_failed, except that we check that a tree does
9940 not have the specified code, given in CL. */
9943 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9944 const char *file, int line, const char *function)
9947 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9948 TREE_CODE_CLASS_STRING (cl),
9949 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9950 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9954 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9957 omp_clause_check_failed (const_tree node, const char *file, int line,
9958 const char *function, enum omp_clause_code code)
9960 internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d",
9961 omp_clause_code_name[code], get_tree_code_name (TREE_CODE (node)),
9962 function, trim_filename (file), line);
9966 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9969 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9970 const char *function, enum omp_clause_code c1,
9971 enum omp_clause_code c2)
9974 unsigned length = 0;
9977 for (c = c1; c <= c2; ++c)
9978 length += 4 + strlen (omp_clause_code_name[c]);
9980 length += strlen ("expected ");
9981 buffer = (char *) alloca (length);
9984 for (c = c1; c <= c2; ++c)
9986 const char *prefix = length ? " or " : "expected ";
9988 strcpy (buffer + length, prefix);
9989 length += strlen (prefix);
9990 strcpy (buffer + length, omp_clause_code_name[c]);
9991 length += strlen (omp_clause_code_name[c]);
9994 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9995 buffer, omp_clause_code_name[TREE_CODE (node)],
9996 function, trim_filename (file), line);
10000 #undef DEFTREESTRUCT
10001 #define DEFTREESTRUCT(VAL, NAME) NAME,
10003 static const char *ts_enum_names[] = {
10004 #include "treestruct.def"
10006 #undef DEFTREESTRUCT
10008 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
10010 /* Similar to tree_class_check_failed, except that we check for
10011 whether CODE contains the tree structure identified by EN. */
10014 tree_contains_struct_check_failed (const_tree node,
10015 const enum tree_node_structure_enum en,
10016 const char *file, int line,
10017 const char *function)
10020 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
10022 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
10026 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
10027 (dynamically sized) vector. */
10030 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
10031 const char *function)
10034 ("tree check: accessed elt %d of tree_int_cst with %d elts in %s, at %s:%d",
10035 idx + 1, len, function, trim_filename (file), line);
10038 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
10039 (dynamically sized) vector. */
10042 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
10043 const char *function)
10046 ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d",
10047 idx + 1, len, function, trim_filename (file), line);
10050 /* Similar to above, except that the check is for the bounds of the operand
10051 vector of an expression node EXP. */
10054 tree_operand_check_failed (int idx, const_tree exp, const char *file,
10055 int line, const char *function)
10057 enum tree_code code = TREE_CODE (exp);
10059 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
10060 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
10061 function, trim_filename (file), line);
10064 /* Similar to above, except that the check is for the number of
10065 operands of an OMP_CLAUSE node. */
10068 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
10069 int line, const char *function)
10072 ("tree check: accessed operand %d of omp_clause %s with %d operands "
10073 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
10074 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
10075 trim_filename (file), line);
10077 #endif /* ENABLE_TREE_CHECKING */
10079 /* Create a new vector type node holding SUBPARTS units of type INNERTYPE,
10080 and mapped to the machine mode MODE. Initialize its fields and build
10081 the information necessary for debugging output. */
10084 make_vector_type (tree innertype, int nunits, machine_mode mode)
10087 tree mv_innertype = TYPE_MAIN_VARIANT (innertype);
10089 t = make_node (VECTOR_TYPE);
10090 TREE_TYPE (t) = mv_innertype;
10091 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
10092 SET_TYPE_MODE (t, mode);
10094 if (TYPE_STRUCTURAL_EQUALITY_P (mv_innertype) || in_lto_p)
10095 SET_TYPE_STRUCTURAL_EQUALITY (t);
10096 else if ((TYPE_CANONICAL (mv_innertype) != innertype
10097 || mode != VOIDmode)
10098 && !VECTOR_BOOLEAN_TYPE_P (t))
10100 = make_vector_type (TYPE_CANONICAL (mv_innertype), nunits, VOIDmode);
10104 hashval_t hash = type_hash_canon_hash (t);
10105 t = type_hash_canon (hash, t);
10107 /* We have built a main variant, based on the main variant of the
10108 inner type. Use it to build the variant we return. */
10109 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
10110 && TREE_TYPE (t) != innertype)
10111 return build_type_attribute_qual_variant (t,
10112 TYPE_ATTRIBUTES (innertype),
10113 TYPE_QUALS (innertype));
10119 make_or_reuse_type (unsigned size, int unsignedp)
10123 if (size == INT_TYPE_SIZE)
10124 return unsignedp ? unsigned_type_node : integer_type_node;
10125 if (size == CHAR_TYPE_SIZE)
10126 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
10127 if (size == SHORT_TYPE_SIZE)
10128 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
10129 if (size == LONG_TYPE_SIZE)
10130 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
10131 if (size == LONG_LONG_TYPE_SIZE)
10132 return (unsignedp ? long_long_unsigned_type_node
10133 : long_long_integer_type_node);
10135 for (i = 0; i < NUM_INT_N_ENTS; i ++)
10136 if (size == int_n_data[i].bitsize
10137 && int_n_enabled_p[i])
10138 return (unsignedp ? int_n_trees[i].unsigned_type
10139 : int_n_trees[i].signed_type);
10142 return make_unsigned_type (size);
10144 return make_signed_type (size);
10147 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
10150 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
10154 if (size == SHORT_FRACT_TYPE_SIZE)
10155 return unsignedp ? sat_unsigned_short_fract_type_node
10156 : sat_short_fract_type_node;
10157 if (size == FRACT_TYPE_SIZE)
10158 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
10159 if (size == LONG_FRACT_TYPE_SIZE)
10160 return unsignedp ? sat_unsigned_long_fract_type_node
10161 : sat_long_fract_type_node;
10162 if (size == LONG_LONG_FRACT_TYPE_SIZE)
10163 return unsignedp ? sat_unsigned_long_long_fract_type_node
10164 : sat_long_long_fract_type_node;
10168 if (size == SHORT_FRACT_TYPE_SIZE)
10169 return unsignedp ? unsigned_short_fract_type_node
10170 : short_fract_type_node;
10171 if (size == FRACT_TYPE_SIZE)
10172 return unsignedp ? unsigned_fract_type_node : fract_type_node;
10173 if (size == LONG_FRACT_TYPE_SIZE)
10174 return unsignedp ? unsigned_long_fract_type_node
10175 : long_fract_type_node;
10176 if (size == LONG_LONG_FRACT_TYPE_SIZE)
10177 return unsignedp ? unsigned_long_long_fract_type_node
10178 : long_long_fract_type_node;
10181 return make_fract_type (size, unsignedp, satp);
10184 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
10187 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
10191 if (size == SHORT_ACCUM_TYPE_SIZE)
10192 return unsignedp ? sat_unsigned_short_accum_type_node
10193 : sat_short_accum_type_node;
10194 if (size == ACCUM_TYPE_SIZE)
10195 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
10196 if (size == LONG_ACCUM_TYPE_SIZE)
10197 return unsignedp ? sat_unsigned_long_accum_type_node
10198 : sat_long_accum_type_node;
10199 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
10200 return unsignedp ? sat_unsigned_long_long_accum_type_node
10201 : sat_long_long_accum_type_node;
10205 if (size == SHORT_ACCUM_TYPE_SIZE)
10206 return unsignedp ? unsigned_short_accum_type_node
10207 : short_accum_type_node;
10208 if (size == ACCUM_TYPE_SIZE)
10209 return unsignedp ? unsigned_accum_type_node : accum_type_node;
10210 if (size == LONG_ACCUM_TYPE_SIZE)
10211 return unsignedp ? unsigned_long_accum_type_node
10212 : long_accum_type_node;
10213 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
10214 return unsignedp ? unsigned_long_long_accum_type_node
10215 : long_long_accum_type_node;
10218 return make_accum_type (size, unsignedp, satp);
10222 /* Create an atomic variant node for TYPE. This routine is called
10223 during initialization of data types to create the 5 basic atomic
10224 types. The generic build_variant_type function requires these to
10225 already be set up in order to function properly, so cannot be
10226 called from there. If ALIGN is non-zero, then ensure alignment is
10227 overridden to this value. */
10230 build_atomic_base (tree type, unsigned int align)
10234 /* Make sure its not already registered. */
10235 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
10238 t = build_variant_type_copy (type);
10239 set_type_quals (t, TYPE_QUAL_ATOMIC);
10242 SET_TYPE_ALIGN (t, align);
10247 /* Information about the _FloatN and _FloatNx types. This must be in
10248 the same order as the corresponding TI_* enum values. */
10249 const floatn_type_info floatn_nx_types[NUM_FLOATN_NX_TYPES] =
10261 /* Create nodes for all integer types (and error_mark_node) using the sizes
10262 of C datatypes. SIGNED_CHAR specifies whether char is signed. */
10265 build_common_tree_nodes (bool signed_char)
10269 error_mark_node = make_node (ERROR_MARK);
10270 TREE_TYPE (error_mark_node) = error_mark_node;
10272 initialize_sizetypes ();
10274 /* Define both `signed char' and `unsigned char'. */
10275 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
10276 TYPE_STRING_FLAG (signed_char_type_node) = 1;
10277 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
10278 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
10280 /* Define `char', which is like either `signed char' or `unsigned char'
10281 but not the same as either. */
10284 ? make_signed_type (CHAR_TYPE_SIZE)
10285 : make_unsigned_type (CHAR_TYPE_SIZE));
10286 TYPE_STRING_FLAG (char_type_node) = 1;
10288 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
10289 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
10290 integer_type_node = make_signed_type (INT_TYPE_SIZE);
10291 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
10292 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
10293 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
10294 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
10295 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
10297 for (i = 0; i < NUM_INT_N_ENTS; i ++)
10299 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
10300 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
10301 TYPE_SIZE (int_n_trees[i].signed_type) = bitsize_int (int_n_data[i].bitsize);
10302 TYPE_SIZE (int_n_trees[i].unsigned_type) = bitsize_int (int_n_data[i].bitsize);
10304 if (int_n_data[i].bitsize > LONG_LONG_TYPE_SIZE
10305 && int_n_enabled_p[i])
10307 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
10308 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
10312 /* Define a boolean type. This type only represents boolean values but
10313 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
10314 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
10315 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
10316 TYPE_PRECISION (boolean_type_node) = 1;
10317 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
10319 /* Define what type to use for size_t. */
10320 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
10321 size_type_node = unsigned_type_node;
10322 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
10323 size_type_node = long_unsigned_type_node;
10324 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
10325 size_type_node = long_long_unsigned_type_node;
10326 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
10327 size_type_node = short_unsigned_type_node;
10332 size_type_node = NULL_TREE;
10333 for (i = 0; i < NUM_INT_N_ENTS; i++)
10334 if (int_n_enabled_p[i])
10337 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
10339 if (strcmp (name, SIZE_TYPE) == 0)
10341 size_type_node = int_n_trees[i].unsigned_type;
10344 if (size_type_node == NULL_TREE)
10345 gcc_unreachable ();
10348 /* Define what type to use for ptrdiff_t. */
10349 if (strcmp (PTRDIFF_TYPE, "int") == 0)
10350 ptrdiff_type_node = integer_type_node;
10351 else if (strcmp (PTRDIFF_TYPE, "long int") == 0)
10352 ptrdiff_type_node = long_integer_type_node;
10353 else if (strcmp (PTRDIFF_TYPE, "long long int") == 0)
10354 ptrdiff_type_node = long_long_integer_type_node;
10355 else if (strcmp (PTRDIFF_TYPE, "short int") == 0)
10356 ptrdiff_type_node = short_integer_type_node;
10359 ptrdiff_type_node = NULL_TREE;
10360 for (int i = 0; i < NUM_INT_N_ENTS; i++)
10361 if (int_n_enabled_p[i])
10364 sprintf (name, "__int%d", int_n_data[i].bitsize);
10365 if (strcmp (name, PTRDIFF_TYPE) == 0)
10366 ptrdiff_type_node = int_n_trees[i].signed_type;
10368 if (ptrdiff_type_node == NULL_TREE)
10369 gcc_unreachable ();
10372 /* Fill in the rest of the sized types. Reuse existing type nodes
10374 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
10375 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
10376 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
10377 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
10378 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
10380 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
10381 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
10382 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
10383 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
10384 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
10386 /* Don't call build_qualified type for atomics. That routine does
10387 special processing for atomics, and until they are initialized
10388 it's better not to make that call.
10390 Check to see if there is a target override for atomic types. */
10392 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
10393 targetm.atomic_align_for_mode (QImode));
10394 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
10395 targetm.atomic_align_for_mode (HImode));
10396 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
10397 targetm.atomic_align_for_mode (SImode));
10398 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
10399 targetm.atomic_align_for_mode (DImode));
10400 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
10401 targetm.atomic_align_for_mode (TImode));
10403 access_public_node = get_identifier ("public");
10404 access_protected_node = get_identifier ("protected");
10405 access_private_node = get_identifier ("private");
10407 /* Define these next since types below may used them. */
10408 integer_zero_node = build_int_cst (integer_type_node, 0);
10409 integer_one_node = build_int_cst (integer_type_node, 1);
10410 integer_three_node = build_int_cst (integer_type_node, 3);
10411 integer_minus_one_node = build_int_cst (integer_type_node, -1);
10413 size_zero_node = size_int (0);
10414 size_one_node = size_int (1);
10415 bitsize_zero_node = bitsize_int (0);
10416 bitsize_one_node = bitsize_int (1);
10417 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
10419 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
10420 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
10422 void_type_node = make_node (VOID_TYPE);
10423 layout_type (void_type_node);
10425 pointer_bounds_type_node = targetm.chkp_bound_type ();
10427 /* We are not going to have real types in C with less than byte alignment,
10428 so we might as well not have any types that claim to have it. */
10429 SET_TYPE_ALIGN (void_type_node, BITS_PER_UNIT);
10430 TYPE_USER_ALIGN (void_type_node) = 0;
10432 void_node = make_node (VOID_CST);
10433 TREE_TYPE (void_node) = void_type_node;
10435 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
10436 layout_type (TREE_TYPE (null_pointer_node));
10438 ptr_type_node = build_pointer_type (void_type_node);
10439 const_ptr_type_node
10440 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
10441 for (unsigned i = 0;
10442 i < sizeof (builtin_structptr_types) / sizeof (builtin_structptr_type);
10444 builtin_structptr_types[i].node = builtin_structptr_types[i].base;
10446 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
10448 float_type_node = make_node (REAL_TYPE);
10449 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
10450 layout_type (float_type_node);
10452 double_type_node = make_node (REAL_TYPE);
10453 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
10454 layout_type (double_type_node);
10456 long_double_type_node = make_node (REAL_TYPE);
10457 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
10458 layout_type (long_double_type_node);
10460 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
10462 int n = floatn_nx_types[i].n;
10463 bool extended = floatn_nx_types[i].extended;
10464 machine_mode mode = targetm.floatn_mode (n, extended);
10465 if (mode == VOIDmode)
10467 int precision = GET_MODE_PRECISION (mode);
10468 /* Work around the rs6000 KFmode having precision 113 not
10470 const struct real_format *fmt = REAL_MODE_FORMAT (mode);
10471 gcc_assert (fmt->b == 2 && fmt->emin + fmt->emax == 3);
10472 int min_precision = fmt->p + ceil_log2 (fmt->emax - fmt->emin);
10474 gcc_assert (min_precision == n);
10475 if (precision < min_precision)
10476 precision = min_precision;
10477 FLOATN_NX_TYPE_NODE (i) = make_node (REAL_TYPE);
10478 TYPE_PRECISION (FLOATN_NX_TYPE_NODE (i)) = precision;
10479 layout_type (FLOATN_NX_TYPE_NODE (i));
10480 SET_TYPE_MODE (FLOATN_NX_TYPE_NODE (i), mode);
10483 float_ptr_type_node = build_pointer_type (float_type_node);
10484 double_ptr_type_node = build_pointer_type (double_type_node);
10485 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
10486 integer_ptr_type_node = build_pointer_type (integer_type_node);
10488 /* Fixed size integer types. */
10489 uint16_type_node = make_or_reuse_type (16, 1);
10490 uint32_type_node = make_or_reuse_type (32, 1);
10491 uint64_type_node = make_or_reuse_type (64, 1);
10493 /* Decimal float types. */
10494 dfloat32_type_node = make_node (REAL_TYPE);
10495 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
10496 SET_TYPE_MODE (dfloat32_type_node, SDmode);
10497 layout_type (dfloat32_type_node);
10498 dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node);
10500 dfloat64_type_node = make_node (REAL_TYPE);
10501 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
10502 SET_TYPE_MODE (dfloat64_type_node, DDmode);
10503 layout_type (dfloat64_type_node);
10504 dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node);
10506 dfloat128_type_node = make_node (REAL_TYPE);
10507 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
10508 SET_TYPE_MODE (dfloat128_type_node, TDmode);
10509 layout_type (dfloat128_type_node);
10510 dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node);
10512 complex_integer_type_node = build_complex_type (integer_type_node, true);
10513 complex_float_type_node = build_complex_type (float_type_node, true);
10514 complex_double_type_node = build_complex_type (double_type_node, true);
10515 complex_long_double_type_node = build_complex_type (long_double_type_node,
10518 for (i = 0; i < NUM_FLOATN_NX_TYPES; i++)
10520 if (FLOATN_NX_TYPE_NODE (i) != NULL_TREE)
10521 COMPLEX_FLOATN_NX_TYPE_NODE (i)
10522 = build_complex_type (FLOATN_NX_TYPE_NODE (i));
10525 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
10526 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
10527 sat_ ## KIND ## _type_node = \
10528 make_sat_signed_ ## KIND ## _type (SIZE); \
10529 sat_unsigned_ ## KIND ## _type_node = \
10530 make_sat_unsigned_ ## KIND ## _type (SIZE); \
10531 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10532 unsigned_ ## KIND ## _type_node = \
10533 make_unsigned_ ## KIND ## _type (SIZE);
10535 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
10536 sat_ ## WIDTH ## KIND ## _type_node = \
10537 make_sat_signed_ ## KIND ## _type (SIZE); \
10538 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
10539 make_sat_unsigned_ ## KIND ## _type (SIZE); \
10540 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10541 unsigned_ ## WIDTH ## KIND ## _type_node = \
10542 make_unsigned_ ## KIND ## _type (SIZE);
10544 /* Make fixed-point type nodes based on four different widths. */
10545 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
10546 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
10547 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
10548 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
10549 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
10551 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
10552 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
10553 NAME ## _type_node = \
10554 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
10555 u ## NAME ## _type_node = \
10556 make_or_reuse_unsigned_ ## KIND ## _type \
10557 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
10558 sat_ ## NAME ## _type_node = \
10559 make_or_reuse_sat_signed_ ## KIND ## _type \
10560 (GET_MODE_BITSIZE (MODE ## mode)); \
10561 sat_u ## NAME ## _type_node = \
10562 make_or_reuse_sat_unsigned_ ## KIND ## _type \
10563 (GET_MODE_BITSIZE (U ## MODE ## mode));
10565 /* Fixed-point type and mode nodes. */
10566 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
10567 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
10568 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
10569 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
10570 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
10571 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
10572 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
10573 MAKE_FIXED_MODE_NODE (accum, ha, HA)
10574 MAKE_FIXED_MODE_NODE (accum, sa, SA)
10575 MAKE_FIXED_MODE_NODE (accum, da, DA)
10576 MAKE_FIXED_MODE_NODE (accum, ta, TA)
10579 tree t = targetm.build_builtin_va_list ();
10581 /* Many back-ends define record types without setting TYPE_NAME.
10582 If we copied the record type here, we'd keep the original
10583 record type without a name. This breaks name mangling. So,
10584 don't copy record types and let c_common_nodes_and_builtins()
10585 declare the type to be __builtin_va_list. */
10586 if (TREE_CODE (t) != RECORD_TYPE)
10587 t = build_variant_type_copy (t);
10589 va_list_type_node = t;
10593 /* Modify DECL for given flags.
10594 TM_PURE attribute is set only on types, so the function will modify
10595 DECL's type when ECF_TM_PURE is used. */
10598 set_call_expr_flags (tree decl, int flags)
10600 if (flags & ECF_NOTHROW)
10601 TREE_NOTHROW (decl) = 1;
10602 if (flags & ECF_CONST)
10603 TREE_READONLY (decl) = 1;
10604 if (flags & ECF_PURE)
10605 DECL_PURE_P (decl) = 1;
10606 if (flags & ECF_LOOPING_CONST_OR_PURE)
10607 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
10608 if (flags & ECF_NOVOPS)
10609 DECL_IS_NOVOPS (decl) = 1;
10610 if (flags & ECF_NORETURN)
10611 TREE_THIS_VOLATILE (decl) = 1;
10612 if (flags & ECF_MALLOC)
10613 DECL_IS_MALLOC (decl) = 1;
10614 if (flags & ECF_RETURNS_TWICE)
10615 DECL_IS_RETURNS_TWICE (decl) = 1;
10616 if (flags & ECF_LEAF)
10617 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
10618 NULL, DECL_ATTRIBUTES (decl));
10619 if (flags & ECF_COLD)
10620 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("cold"),
10621 NULL, DECL_ATTRIBUTES (decl));
10622 if (flags & ECF_RET1)
10623 DECL_ATTRIBUTES (decl)
10624 = tree_cons (get_identifier ("fn spec"),
10625 build_tree_list (NULL_TREE, build_string (1, "1")),
10626 DECL_ATTRIBUTES (decl));
10627 if ((flags & ECF_TM_PURE) && flag_tm)
10628 apply_tm_attr (decl, get_identifier ("transaction_pure"));
10629 /* Looping const or pure is implied by noreturn.
10630 There is currently no way to declare looping const or looping pure alone. */
10631 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
10632 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
10636 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
10639 local_define_builtin (const char *name, tree type, enum built_in_function code,
10640 const char *library_name, int ecf_flags)
10644 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
10645 library_name, NULL_TREE);
10646 set_call_expr_flags (decl, ecf_flags);
10648 set_builtin_decl (code, decl, true);
10651 /* Call this function after instantiating all builtins that the language
10652 front end cares about. This will build the rest of the builtins
10653 and internal functions that are relied upon by the tree optimizers and
10657 build_common_builtin_nodes (void)
10662 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE)
10663 || !builtin_decl_explicit_p (BUILT_IN_ABORT))
10665 ftype = build_function_type (void_type_node, void_list_node);
10666 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
10667 local_define_builtin ("__builtin_unreachable", ftype,
10668 BUILT_IN_UNREACHABLE,
10669 "__builtin_unreachable",
10670 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
10671 | ECF_CONST | ECF_COLD);
10672 if (!builtin_decl_explicit_p (BUILT_IN_ABORT))
10673 local_define_builtin ("__builtin_abort", ftype, BUILT_IN_ABORT,
10675 ECF_LEAF | ECF_NORETURN | ECF_CONST | ECF_COLD);
10678 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
10679 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10681 ftype = build_function_type_list (ptr_type_node,
10682 ptr_type_node, const_ptr_type_node,
10683 size_type_node, NULL_TREE);
10685 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
10686 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
10687 "memcpy", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
10688 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10689 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
10690 "memmove", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
10693 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
10695 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
10696 const_ptr_type_node, size_type_node,
10698 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
10699 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10702 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
10704 ftype = build_function_type_list (ptr_type_node,
10705 ptr_type_node, integer_type_node,
10706 size_type_node, NULL_TREE);
10707 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
10708 "memset", ECF_NOTHROW | ECF_LEAF | ECF_RET1);
10711 /* If we're checking the stack, `alloca' can throw. */
10712 const int alloca_flags
10713 = ECF_MALLOC | ECF_LEAF | (flag_stack_check ? 0 : ECF_NOTHROW);
10715 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
10717 ftype = build_function_type_list (ptr_type_node,
10718 size_type_node, NULL_TREE);
10719 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
10720 "alloca", alloca_flags);
10723 ftype = build_function_type_list (ptr_type_node, size_type_node,
10724 size_type_node, NULL_TREE);
10725 local_define_builtin ("__builtin_alloca_with_align", ftype,
10726 BUILT_IN_ALLOCA_WITH_ALIGN,
10727 "__builtin_alloca_with_align",
10730 ftype = build_function_type_list (void_type_node,
10731 ptr_type_node, ptr_type_node,
10732 ptr_type_node, NULL_TREE);
10733 local_define_builtin ("__builtin_init_trampoline", ftype,
10734 BUILT_IN_INIT_TRAMPOLINE,
10735 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
10736 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
10737 BUILT_IN_INIT_HEAP_TRAMPOLINE,
10738 "__builtin_init_heap_trampoline",
10739 ECF_NOTHROW | ECF_LEAF);
10740 local_define_builtin ("__builtin_init_descriptor", ftype,
10741 BUILT_IN_INIT_DESCRIPTOR,
10742 "__builtin_init_descriptor", ECF_NOTHROW | ECF_LEAF);
10744 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
10745 local_define_builtin ("__builtin_adjust_trampoline", ftype,
10746 BUILT_IN_ADJUST_TRAMPOLINE,
10747 "__builtin_adjust_trampoline",
10748 ECF_CONST | ECF_NOTHROW);
10749 local_define_builtin ("__builtin_adjust_descriptor", ftype,
10750 BUILT_IN_ADJUST_DESCRIPTOR,
10751 "__builtin_adjust_descriptor",
10752 ECF_CONST | ECF_NOTHROW);
10754 ftype = build_function_type_list (void_type_node,
10755 ptr_type_node, ptr_type_node, NULL_TREE);
10756 local_define_builtin ("__builtin_nonlocal_goto", ftype,
10757 BUILT_IN_NONLOCAL_GOTO,
10758 "__builtin_nonlocal_goto",
10759 ECF_NORETURN | ECF_NOTHROW);
10761 ftype = build_function_type_list (void_type_node,
10762 ptr_type_node, ptr_type_node, NULL_TREE);
10763 local_define_builtin ("__builtin_setjmp_setup", ftype,
10764 BUILT_IN_SETJMP_SETUP,
10765 "__builtin_setjmp_setup", ECF_NOTHROW);
10767 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10768 local_define_builtin ("__builtin_setjmp_receiver", ftype,
10769 BUILT_IN_SETJMP_RECEIVER,
10770 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
10772 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
10773 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
10774 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
10776 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10777 local_define_builtin ("__builtin_stack_restore", ftype,
10778 BUILT_IN_STACK_RESTORE,
10779 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
10781 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
10782 const_ptr_type_node, size_type_node,
10784 local_define_builtin ("__builtin_memcmp_eq", ftype, BUILT_IN_MEMCMP_EQ,
10785 "__builtin_memcmp_eq",
10786 ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10788 /* If there's a possibility that we might use the ARM EABI, build the
10789 alternate __cxa_end_cleanup node used to resume from C++. */
10790 if (targetm.arm_eabi_unwinder)
10792 ftype = build_function_type_list (void_type_node, NULL_TREE);
10793 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
10794 BUILT_IN_CXA_END_CLEANUP,
10795 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
10798 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10799 local_define_builtin ("__builtin_unwind_resume", ftype,
10800 BUILT_IN_UNWIND_RESUME,
10801 ((targetm_common.except_unwind_info (&global_options)
10803 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10806 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
10808 ftype = build_function_type_list (ptr_type_node, integer_type_node,
10810 local_define_builtin ("__builtin_return_address", ftype,
10811 BUILT_IN_RETURN_ADDRESS,
10812 "__builtin_return_address",
10816 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10817 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10819 ftype = build_function_type_list (void_type_node, ptr_type_node,
10820 ptr_type_node, NULL_TREE);
10821 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10822 local_define_builtin ("__cyg_profile_func_enter", ftype,
10823 BUILT_IN_PROFILE_FUNC_ENTER,
10824 "__cyg_profile_func_enter", 0);
10825 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10826 local_define_builtin ("__cyg_profile_func_exit", ftype,
10827 BUILT_IN_PROFILE_FUNC_EXIT,
10828 "__cyg_profile_func_exit", 0);
10831 /* The exception object and filter values from the runtime. The argument
10832 must be zero before exception lowering, i.e. from the front end. After
10833 exception lowering, it will be the region number for the exception
10834 landing pad. These functions are PURE instead of CONST to prevent
10835 them from being hoisted past the exception edge that will initialize
10836 its value in the landing pad. */
10837 ftype = build_function_type_list (ptr_type_node,
10838 integer_type_node, NULL_TREE);
10839 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10840 /* Only use TM_PURE if we have TM language support. */
10841 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10842 ecf_flags |= ECF_TM_PURE;
10843 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10844 "__builtin_eh_pointer", ecf_flags);
10846 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10847 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10848 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10849 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10851 ftype = build_function_type_list (void_type_node,
10852 integer_type_node, integer_type_node,
10854 local_define_builtin ("__builtin_eh_copy_values", ftype,
10855 BUILT_IN_EH_COPY_VALUES,
10856 "__builtin_eh_copy_values", ECF_NOTHROW);
10858 /* Complex multiplication and division. These are handled as builtins
10859 rather than optabs because emit_library_call_value doesn't support
10860 complex. Further, we can do slightly better with folding these
10861 beasties if the real and complex parts of the arguments are separate. */
10865 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10867 char mode_name_buf[4], *q;
10869 enum built_in_function mcode, dcode;
10870 tree type, inner_type;
10871 const char *prefix = "__";
10873 if (targetm.libfunc_gnu_prefix)
10876 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
10879 inner_type = TREE_TYPE (type);
10881 ftype = build_function_type_list (type, inner_type, inner_type,
10882 inner_type, inner_type, NULL_TREE);
10884 mcode = ((enum built_in_function)
10885 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10886 dcode = ((enum built_in_function)
10887 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10889 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10893 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10895 local_define_builtin (built_in_names[mcode], ftype, mcode,
10896 built_in_names[mcode],
10897 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10899 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10901 local_define_builtin (built_in_names[dcode], ftype, dcode,
10902 built_in_names[dcode],
10903 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10907 init_internal_fns ();
10910 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10913 If we requested a pointer to a vector, build up the pointers that
10914 we stripped off while looking for the inner type. Similarly for
10915 return values from functions.
10917 The argument TYPE is the top of the chain, and BOTTOM is the
10918 new type which we will point to. */
10921 reconstruct_complex_type (tree type, tree bottom)
10925 if (TREE_CODE (type) == POINTER_TYPE)
10927 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10928 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10929 TYPE_REF_CAN_ALIAS_ALL (type));
10931 else if (TREE_CODE (type) == REFERENCE_TYPE)
10933 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10934 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10935 TYPE_REF_CAN_ALIAS_ALL (type));
10937 else if (TREE_CODE (type) == ARRAY_TYPE)
10939 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10940 outer = build_array_type (inner, TYPE_DOMAIN (type));
10942 else if (TREE_CODE (type) == FUNCTION_TYPE)
10944 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10945 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
10947 else if (TREE_CODE (type) == METHOD_TYPE)
10949 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10950 /* The build_method_type_directly() routine prepends 'this' to argument list,
10951 so we must compensate by getting rid of it. */
10953 = build_method_type_directly
10954 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10956 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10958 else if (TREE_CODE (type) == OFFSET_TYPE)
10960 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10961 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10966 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10967 TYPE_QUALS (type));
10970 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10973 build_vector_type_for_mode (tree innertype, machine_mode mode)
10977 switch (GET_MODE_CLASS (mode))
10979 case MODE_VECTOR_INT:
10980 case MODE_VECTOR_FLOAT:
10981 case MODE_VECTOR_FRACT:
10982 case MODE_VECTOR_UFRACT:
10983 case MODE_VECTOR_ACCUM:
10984 case MODE_VECTOR_UACCUM:
10985 nunits = GET_MODE_NUNITS (mode);
10989 /* Check that there are no leftover bits. */
10990 gcc_assert (GET_MODE_BITSIZE (mode)
10991 % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10993 nunits = GET_MODE_BITSIZE (mode)
10994 / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10998 gcc_unreachable ();
11001 return make_vector_type (innertype, nunits, mode);
11004 /* Similarly, but takes the inner type and number of units, which must be
11008 build_vector_type (tree innertype, int nunits)
11010 return make_vector_type (innertype, nunits, VOIDmode);
11013 /* Build truth vector with specified length and number of units. */
11016 build_truth_vector_type (unsigned nunits, unsigned vector_size)
11018 machine_mode mask_mode = targetm.vectorize.get_mask_mode (nunits,
11021 gcc_assert (mask_mode != VOIDmode);
11023 unsigned HOST_WIDE_INT vsize;
11024 if (mask_mode == BLKmode)
11025 vsize = vector_size * BITS_PER_UNIT;
11027 vsize = GET_MODE_BITSIZE (mask_mode);
11029 unsigned HOST_WIDE_INT esize = vsize / nunits;
11030 gcc_assert (esize * nunits == vsize);
11032 tree bool_type = build_nonstandard_boolean_type (esize);
11034 return make_vector_type (bool_type, nunits, mask_mode);
11037 /* Returns a vector type corresponding to a comparison of VECTYPE. */
11040 build_same_sized_truth_vector_type (tree vectype)
11042 if (VECTOR_BOOLEAN_TYPE_P (vectype))
11045 unsigned HOST_WIDE_INT size = GET_MODE_SIZE (TYPE_MODE (vectype));
11048 size = tree_to_uhwi (TYPE_SIZE_UNIT (vectype));
11050 return build_truth_vector_type (TYPE_VECTOR_SUBPARTS (vectype), size);
11053 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
11056 build_opaque_vector_type (tree innertype, int nunits)
11058 tree t = make_vector_type (innertype, nunits, VOIDmode);
11060 /* We always build the non-opaque variant before the opaque one,
11061 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
11062 cand = TYPE_NEXT_VARIANT (t);
11064 && TYPE_VECTOR_OPAQUE (cand)
11065 && check_qualified_type (cand, t, TYPE_QUALS (t)))
11067 /* Othewise build a variant type and make sure to queue it after
11068 the non-opaque type. */
11069 cand = build_distinct_type_copy (t);
11070 TYPE_VECTOR_OPAQUE (cand) = true;
11071 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
11072 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
11073 TYPE_NEXT_VARIANT (t) = cand;
11074 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
11079 /* Given an initializer INIT, return TRUE if INIT is zero or some
11080 aggregate of zeros. Otherwise return FALSE. */
11082 initializer_zerop (const_tree init)
11088 switch (TREE_CODE (init))
11091 return integer_zerop (init);
11094 /* ??? Note that this is not correct for C4X float formats. There,
11095 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
11096 negative exponent. */
11097 return real_zerop (init)
11098 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init));
11101 return fixed_zerop (init);
11104 return integer_zerop (init)
11105 || (real_zerop (init)
11106 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
11107 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init))));
11112 for (i = 0; i < VECTOR_CST_NELTS (init); ++i)
11113 if (!initializer_zerop (VECTOR_CST_ELT (init, i)))
11120 unsigned HOST_WIDE_INT idx;
11122 if (TREE_CLOBBER_P (init))
11124 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
11125 if (!initializer_zerop (elt))
11134 /* We need to loop through all elements to handle cases like
11135 "\0" and "\0foobar". */
11136 for (i = 0; i < TREE_STRING_LENGTH (init); ++i)
11137 if (TREE_STRING_POINTER (init)[i] != '\0')
11148 /* Check if vector VEC consists of all the equal elements and
11149 that the number of elements corresponds to the type of VEC.
11150 The function returns first element of the vector
11151 or NULL_TREE if the vector is not uniform. */
11153 uniform_vector_p (const_tree vec)
11158 if (vec == NULL_TREE)
11161 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
11163 if (TREE_CODE (vec) == VECTOR_CST)
11165 first = VECTOR_CST_ELT (vec, 0);
11166 for (i = 1; i < VECTOR_CST_NELTS (vec); ++i)
11167 if (!operand_equal_p (first, VECTOR_CST_ELT (vec, i), 0))
11173 else if (TREE_CODE (vec) == CONSTRUCTOR)
11175 first = error_mark_node;
11177 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
11184 if (!operand_equal_p (first, t, 0))
11187 if (i != TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)))
11196 /* Build an empty statement at location LOC. */
11199 build_empty_stmt (location_t loc)
11201 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
11202 SET_EXPR_LOCATION (t, loc);
11207 /* Build an OpenMP clause with code CODE. LOC is the location of the
11211 build_omp_clause (location_t loc, enum omp_clause_code code)
11216 length = omp_clause_num_ops[code];
11217 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
11219 record_node_allocation_statistics (OMP_CLAUSE, size);
11221 t = (tree) ggc_internal_alloc (size);
11222 memset (t, 0, size);
11223 TREE_SET_CODE (t, OMP_CLAUSE);
11224 OMP_CLAUSE_SET_CODE (t, code);
11225 OMP_CLAUSE_LOCATION (t) = loc;
11230 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
11231 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
11232 Except for the CODE and operand count field, other storage for the
11233 object is initialized to zeros. */
11236 build_vl_exp_stat (enum tree_code code, int len MEM_STAT_DECL)
11239 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
11241 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
11242 gcc_assert (len >= 1);
11244 record_node_allocation_statistics (code, length);
11246 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
11248 TREE_SET_CODE (t, code);
11250 /* Can't use TREE_OPERAND to store the length because if checking is
11251 enabled, it will try to check the length before we store it. :-P */
11252 t->exp.operands[0] = build_int_cst (sizetype, len);
11257 /* Helper function for build_call_* functions; build a CALL_EXPR with
11258 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
11259 the argument slots. */
11262 build_call_1 (tree return_type, tree fn, int nargs)
11266 t = build_vl_exp (CALL_EXPR, nargs + 3);
11267 TREE_TYPE (t) = return_type;
11268 CALL_EXPR_FN (t) = fn;
11269 CALL_EXPR_STATIC_CHAIN (t) = NULL;
11274 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
11275 FN and a null static chain slot. NARGS is the number of call arguments
11276 which are specified as "..." arguments. */
11279 build_call_nary (tree return_type, tree fn, int nargs, ...)
11283 va_start (args, nargs);
11284 ret = build_call_valist (return_type, fn, nargs, args);
11289 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
11290 FN and a null static chain slot. NARGS is the number of call arguments
11291 which are specified as a va_list ARGS. */
11294 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
11299 t = build_call_1 (return_type, fn, nargs);
11300 for (i = 0; i < nargs; i++)
11301 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
11302 process_call_operands (t);
11306 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
11307 FN and a null static chain slot. NARGS is the number of call arguments
11308 which are specified as a tree array ARGS. */
11311 build_call_array_loc (location_t loc, tree return_type, tree fn,
11312 int nargs, const tree *args)
11317 t = build_call_1 (return_type, fn, nargs);
11318 for (i = 0; i < nargs; i++)
11319 CALL_EXPR_ARG (t, i) = args[i];
11320 process_call_operands (t);
11321 SET_EXPR_LOCATION (t, loc);
11325 /* Like build_call_array, but takes a vec. */
11328 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
11333 ret = build_call_1 (return_type, fn, vec_safe_length (args));
11334 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
11335 CALL_EXPR_ARG (ret, ix) = t;
11336 process_call_operands (ret);
11340 /* Conveniently construct a function call expression. FNDECL names the
11341 function to be called and N arguments are passed in the array
11345 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
11347 tree fntype = TREE_TYPE (fndecl);
11348 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11350 return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray);
11353 /* Conveniently construct a function call expression. FNDECL names the
11354 function to be called and the arguments are passed in the vector
11358 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
11360 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
11361 vec_safe_address (vec));
11365 /* Conveniently construct a function call expression. FNDECL names the
11366 function to be called, N is the number of arguments, and the "..."
11367 parameters are the argument expressions. */
11370 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
11373 tree *argarray = XALLOCAVEC (tree, n);
11377 for (i = 0; i < n; i++)
11378 argarray[i] = va_arg (ap, tree);
11380 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11383 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
11384 varargs macros aren't supported by all bootstrap compilers. */
11387 build_call_expr (tree fndecl, int n, ...)
11390 tree *argarray = XALLOCAVEC (tree, n);
11394 for (i = 0; i < n; i++)
11395 argarray[i] = va_arg (ap, tree);
11397 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
11400 /* Build an internal call to IFN, with arguments ARGS[0:N-1] and with return
11401 type TYPE. This is just like CALL_EXPR, except its CALL_EXPR_FN is NULL.
11402 It will get gimplified later into an ordinary internal function. */
11405 build_call_expr_internal_loc_array (location_t loc, internal_fn ifn,
11406 tree type, int n, const tree *args)
11408 tree t = build_call_1 (type, NULL_TREE, n);
11409 for (int i = 0; i < n; ++i)
11410 CALL_EXPR_ARG (t, i) = args[i];
11411 SET_EXPR_LOCATION (t, loc);
11412 CALL_EXPR_IFN (t) = ifn;
11416 /* Build internal call expression. This is just like CALL_EXPR, except
11417 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
11418 internal function. */
11421 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
11422 tree type, int n, ...)
11425 tree *argarray = XALLOCAVEC (tree, n);
11429 for (i = 0; i < n; i++)
11430 argarray[i] = va_arg (ap, tree);
11432 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
11435 /* Return a function call to FN, if the target is guaranteed to support it,
11438 N is the number of arguments, passed in the "...", and TYPE is the
11439 type of the return value. */
11442 maybe_build_call_expr_loc (location_t loc, combined_fn fn, tree type,
11446 tree *argarray = XALLOCAVEC (tree, n);
11450 for (i = 0; i < n; i++)
11451 argarray[i] = va_arg (ap, tree);
11453 if (internal_fn_p (fn))
11455 internal_fn ifn = as_internal_fn (fn);
11456 if (direct_internal_fn_p (ifn))
11458 tree_pair types = direct_internal_fn_types (ifn, type, argarray);
11459 if (!direct_internal_fn_supported_p (ifn, types,
11460 OPTIMIZE_FOR_BOTH))
11463 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
11467 tree fndecl = builtin_decl_implicit (as_builtin_fn (fn));
11470 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11474 /* Create a new constant string literal and return a char* pointer to it.
11475 The STRING_CST value is the LEN characters at STR. */
11477 build_string_literal (int len, const char *str)
11479 tree t, elem, index, type;
11481 t = build_string (len, str);
11482 elem = build_type_variant (char_type_node, 1, 0);
11483 index = build_index_type (size_int (len - 1));
11484 type = build_array_type (elem, index);
11485 TREE_TYPE (t) = type;
11486 TREE_CONSTANT (t) = 1;
11487 TREE_READONLY (t) = 1;
11488 TREE_STATIC (t) = 1;
11490 type = build_pointer_type (elem);
11491 t = build1 (ADDR_EXPR, type,
11492 build4 (ARRAY_REF, elem,
11493 t, integer_zero_node, NULL_TREE, NULL_TREE));
11499 /* Return true if T (assumed to be a DECL) must be assigned a memory
11503 needs_to_live_in_memory (const_tree t)
11505 return (TREE_ADDRESSABLE (t)
11506 || is_global_var (t)
11507 || (TREE_CODE (t) == RESULT_DECL
11508 && !DECL_BY_REFERENCE (t)
11509 && aggregate_value_p (t, current_function_decl)));
11512 /* Return value of a constant X and sign-extend it. */
11515 int_cst_value (const_tree x)
11517 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
11518 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
11520 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
11521 gcc_assert (cst_and_fits_in_hwi (x));
11523 if (bits < HOST_BITS_PER_WIDE_INT)
11525 bool negative = ((val >> (bits - 1)) & 1) != 0;
11527 val |= HOST_WIDE_INT_M1U << (bits - 1) << 1;
11529 val &= ~(HOST_WIDE_INT_M1U << (bits - 1) << 1);
11535 /* If TYPE is an integral or pointer type, return an integer type with
11536 the same precision which is unsigned iff UNSIGNEDP is true, or itself
11537 if TYPE is already an integer type of signedness UNSIGNEDP. */
11540 signed_or_unsigned_type_for (int unsignedp, tree type)
11542 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type) == unsignedp)
11545 if (TREE_CODE (type) == VECTOR_TYPE)
11547 tree inner = TREE_TYPE (type);
11548 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
11551 if (inner == inner2)
11553 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
11556 if (!INTEGRAL_TYPE_P (type)
11557 && !POINTER_TYPE_P (type)
11558 && TREE_CODE (type) != OFFSET_TYPE)
11561 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
11564 /* If TYPE is an integral or pointer type, return an integer type with
11565 the same precision which is unsigned, or itself if TYPE is already an
11566 unsigned integer type. */
11569 unsigned_type_for (tree type)
11571 return signed_or_unsigned_type_for (1, type);
11574 /* If TYPE is an integral or pointer type, return an integer type with
11575 the same precision which is signed, or itself if TYPE is already a
11576 signed integer type. */
11579 signed_type_for (tree type)
11581 return signed_or_unsigned_type_for (0, type);
11584 /* If TYPE is a vector type, return a signed integer vector type with the
11585 same width and number of subparts. Otherwise return boolean_type_node. */
11588 truth_type_for (tree type)
11590 if (TREE_CODE (type) == VECTOR_TYPE)
11592 if (VECTOR_BOOLEAN_TYPE_P (type))
11594 return build_truth_vector_type (TYPE_VECTOR_SUBPARTS (type),
11595 GET_MODE_SIZE (TYPE_MODE (type)));
11598 return boolean_type_node;
11601 /* Returns the largest value obtainable by casting something in INNER type to
11605 upper_bound_in_type (tree outer, tree inner)
11607 unsigned int det = 0;
11608 unsigned oprec = TYPE_PRECISION (outer);
11609 unsigned iprec = TYPE_PRECISION (inner);
11612 /* Compute a unique number for every combination. */
11613 det |= (oprec > iprec) ? 4 : 0;
11614 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
11615 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
11617 /* Determine the exponent to use. */
11622 /* oprec <= iprec, outer: signed, inner: don't care. */
11627 /* oprec <= iprec, outer: unsigned, inner: don't care. */
11631 /* oprec > iprec, outer: signed, inner: signed. */
11635 /* oprec > iprec, outer: signed, inner: unsigned. */
11639 /* oprec > iprec, outer: unsigned, inner: signed. */
11643 /* oprec > iprec, outer: unsigned, inner: unsigned. */
11647 gcc_unreachable ();
11650 return wide_int_to_tree (outer,
11651 wi::mask (prec, false, TYPE_PRECISION (outer)));
11654 /* Returns the smallest value obtainable by casting something in INNER type to
11658 lower_bound_in_type (tree outer, tree inner)
11660 unsigned oprec = TYPE_PRECISION (outer);
11661 unsigned iprec = TYPE_PRECISION (inner);
11663 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
11665 if (TYPE_UNSIGNED (outer)
11666 /* If we are widening something of an unsigned type, OUTER type
11667 contains all values of INNER type. In particular, both INNER
11668 and OUTER types have zero in common. */
11669 || (oprec > iprec && TYPE_UNSIGNED (inner)))
11670 return build_int_cst (outer, 0);
11673 /* If we are widening a signed type to another signed type, we
11674 want to obtain -2^^(iprec-1). If we are keeping the
11675 precision or narrowing to a signed type, we want to obtain
11677 unsigned prec = oprec > iprec ? iprec : oprec;
11678 return wide_int_to_tree (outer,
11679 wi::mask (prec - 1, true,
11680 TYPE_PRECISION (outer)));
11684 /* Return nonzero if two operands that are suitable for PHI nodes are
11685 necessarily equal. Specifically, both ARG0 and ARG1 must be either
11686 SSA_NAME or invariant. Note that this is strictly an optimization.
11687 That is, callers of this function can directly call operand_equal_p
11688 and get the same result, only slower. */
11691 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
11695 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
11697 return operand_equal_p (arg0, arg1, 0);
11700 /* Returns number of zeros at the end of binary representation of X. */
11703 num_ending_zeros (const_tree x)
11705 return build_int_cst (TREE_TYPE (x), wi::ctz (x));
11709 #define WALK_SUBTREE(NODE) \
11712 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
11718 /* This is a subroutine of walk_tree that walks field of TYPE that are to
11719 be walked whenever a type is seen in the tree. Rest of operands and return
11720 value are as for walk_tree. */
11723 walk_type_fields (tree type, walk_tree_fn func, void *data,
11724 hash_set<tree> *pset, walk_tree_lh lh)
11726 tree result = NULL_TREE;
11728 switch (TREE_CODE (type))
11731 case REFERENCE_TYPE:
11733 /* We have to worry about mutually recursive pointers. These can't
11734 be written in C. They can in Ada. It's pathological, but
11735 there's an ACATS test (c38102a) that checks it. Deal with this
11736 by checking if we're pointing to another pointer, that one
11737 points to another pointer, that one does too, and we have no htab.
11738 If so, get a hash table. We check three levels deep to avoid
11739 the cost of the hash table if we don't need one. */
11740 if (POINTER_TYPE_P (TREE_TYPE (type))
11741 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
11742 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
11745 result = walk_tree_without_duplicates (&TREE_TYPE (type),
11756 WALK_SUBTREE (TREE_TYPE (type));
11760 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
11762 /* Fall through. */
11764 case FUNCTION_TYPE:
11765 WALK_SUBTREE (TREE_TYPE (type));
11769 /* We never want to walk into default arguments. */
11770 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
11771 WALK_SUBTREE (TREE_VALUE (arg));
11776 /* Don't follow this nodes's type if a pointer for fear that
11777 we'll have infinite recursion. If we have a PSET, then we
11780 || (!POINTER_TYPE_P (TREE_TYPE (type))
11781 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
11782 WALK_SUBTREE (TREE_TYPE (type));
11783 WALK_SUBTREE (TYPE_DOMAIN (type));
11787 WALK_SUBTREE (TREE_TYPE (type));
11788 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
11798 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
11799 called with the DATA and the address of each sub-tree. If FUNC returns a
11800 non-NULL value, the traversal is stopped, and the value returned by FUNC
11801 is returned. If PSET is non-NULL it is used to record the nodes visited,
11802 and to avoid visiting a node more than once. */
11805 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
11806 hash_set<tree> *pset, walk_tree_lh lh)
11808 enum tree_code code;
11812 #define WALK_SUBTREE_TAIL(NODE) \
11816 goto tail_recurse; \
11821 /* Skip empty subtrees. */
11825 /* Don't walk the same tree twice, if the user has requested
11826 that we avoid doing so. */
11827 if (pset && pset->add (*tp))
11830 /* Call the function. */
11832 result = (*func) (tp, &walk_subtrees, data);
11834 /* If we found something, return it. */
11838 code = TREE_CODE (*tp);
11840 /* Even if we didn't, FUNC may have decided that there was nothing
11841 interesting below this point in the tree. */
11842 if (!walk_subtrees)
11844 /* But we still need to check our siblings. */
11845 if (code == TREE_LIST)
11846 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11847 else if (code == OMP_CLAUSE)
11848 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11855 result = (*lh) (tp, &walk_subtrees, func, data, pset);
11856 if (result || !walk_subtrees)
11863 case IDENTIFIER_NODE:
11870 case PLACEHOLDER_EXPR:
11874 /* None of these have subtrees other than those already walked
11879 WALK_SUBTREE (TREE_VALUE (*tp));
11880 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11885 int len = TREE_VEC_LENGTH (*tp);
11890 /* Walk all elements but the first. */
11892 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
11894 /* Now walk the first one as a tail call. */
11895 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
11899 WALK_SUBTREE (TREE_REALPART (*tp));
11900 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
11904 unsigned HOST_WIDE_INT idx;
11905 constructor_elt *ce;
11907 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
11909 WALK_SUBTREE (ce->value);
11914 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11919 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11921 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11922 into declarations that are just mentioned, rather than
11923 declared; they don't really belong to this part of the tree.
11924 And, we can see cycles: the initializer for a declaration
11925 can refer to the declaration itself. */
11926 WALK_SUBTREE (DECL_INITIAL (decl));
11927 WALK_SUBTREE (DECL_SIZE (decl));
11928 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11930 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11933 case STATEMENT_LIST:
11935 tree_stmt_iterator i;
11936 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11937 WALK_SUBTREE (*tsi_stmt_ptr (i));
11942 switch (OMP_CLAUSE_CODE (*tp))
11944 case OMP_CLAUSE_GANG:
11945 case OMP_CLAUSE__GRIDDIM_:
11946 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11949 case OMP_CLAUSE_ASYNC:
11950 case OMP_CLAUSE_WAIT:
11951 case OMP_CLAUSE_WORKER:
11952 case OMP_CLAUSE_VECTOR:
11953 case OMP_CLAUSE_NUM_GANGS:
11954 case OMP_CLAUSE_NUM_WORKERS:
11955 case OMP_CLAUSE_VECTOR_LENGTH:
11956 case OMP_CLAUSE_PRIVATE:
11957 case OMP_CLAUSE_SHARED:
11958 case OMP_CLAUSE_FIRSTPRIVATE:
11959 case OMP_CLAUSE_COPYIN:
11960 case OMP_CLAUSE_COPYPRIVATE:
11961 case OMP_CLAUSE_FINAL:
11962 case OMP_CLAUSE_IF:
11963 case OMP_CLAUSE_NUM_THREADS:
11964 case OMP_CLAUSE_SCHEDULE:
11965 case OMP_CLAUSE_UNIFORM:
11966 case OMP_CLAUSE_DEPEND:
11967 case OMP_CLAUSE_NUM_TEAMS:
11968 case OMP_CLAUSE_THREAD_LIMIT:
11969 case OMP_CLAUSE_DEVICE:
11970 case OMP_CLAUSE_DIST_SCHEDULE:
11971 case OMP_CLAUSE_SAFELEN:
11972 case OMP_CLAUSE_SIMDLEN:
11973 case OMP_CLAUSE_ORDERED:
11974 case OMP_CLAUSE_PRIORITY:
11975 case OMP_CLAUSE_GRAINSIZE:
11976 case OMP_CLAUSE_NUM_TASKS:
11977 case OMP_CLAUSE_HINT:
11978 case OMP_CLAUSE_TO_DECLARE:
11979 case OMP_CLAUSE_LINK:
11980 case OMP_CLAUSE_USE_DEVICE_PTR:
11981 case OMP_CLAUSE_IS_DEVICE_PTR:
11982 case OMP_CLAUSE__LOOPTEMP_:
11983 case OMP_CLAUSE__SIMDUID_:
11984 case OMP_CLAUSE__CILK_FOR_COUNT_:
11985 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
11988 case OMP_CLAUSE_INDEPENDENT:
11989 case OMP_CLAUSE_NOWAIT:
11990 case OMP_CLAUSE_DEFAULT:
11991 case OMP_CLAUSE_UNTIED:
11992 case OMP_CLAUSE_MERGEABLE:
11993 case OMP_CLAUSE_PROC_BIND:
11994 case OMP_CLAUSE_INBRANCH:
11995 case OMP_CLAUSE_NOTINBRANCH:
11996 case OMP_CLAUSE_FOR:
11997 case OMP_CLAUSE_PARALLEL:
11998 case OMP_CLAUSE_SECTIONS:
11999 case OMP_CLAUSE_TASKGROUP:
12000 case OMP_CLAUSE_NOGROUP:
12001 case OMP_CLAUSE_THREADS:
12002 case OMP_CLAUSE_SIMD:
12003 case OMP_CLAUSE_DEFAULTMAP:
12004 case OMP_CLAUSE_AUTO:
12005 case OMP_CLAUSE_SEQ:
12006 case OMP_CLAUSE_TILE:
12007 case OMP_CLAUSE__SIMT_:
12008 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12010 case OMP_CLAUSE_LASTPRIVATE:
12011 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
12012 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
12013 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12015 case OMP_CLAUSE_COLLAPSE:
12018 for (i = 0; i < 3; i++)
12019 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
12020 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12023 case OMP_CLAUSE_LINEAR:
12024 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
12025 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
12026 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
12027 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12029 case OMP_CLAUSE_ALIGNED:
12030 case OMP_CLAUSE_FROM:
12031 case OMP_CLAUSE_TO:
12032 case OMP_CLAUSE_MAP:
12033 case OMP_CLAUSE__CACHE_:
12034 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
12035 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
12036 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12038 case OMP_CLAUSE_REDUCTION:
12041 for (i = 0; i < 5; i++)
12042 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
12043 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
12047 gcc_unreachable ();
12055 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
12056 But, we only want to walk once. */
12057 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
12058 for (i = 0; i < len; ++i)
12059 WALK_SUBTREE (TREE_OPERAND (*tp, i));
12060 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
12064 /* If this is a TYPE_DECL, walk into the fields of the type that it's
12065 defining. We only want to walk into these fields of a type in this
12066 case and not in the general case of a mere reference to the type.
12068 The criterion is as follows: if the field can be an expression, it
12069 must be walked only here. This should be in keeping with the fields
12070 that are directly gimplified in gimplify_type_sizes in order for the
12071 mark/copy-if-shared/unmark machinery of the gimplifier to work with
12072 variable-sized types.
12074 Note that DECLs get walked as part of processing the BIND_EXPR. */
12075 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
12077 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
12078 if (TREE_CODE (*type_p) == ERROR_MARK)
12081 /* Call the function for the type. See if it returns anything or
12082 doesn't want us to continue. If we are to continue, walk both
12083 the normal fields and those for the declaration case. */
12084 result = (*func) (type_p, &walk_subtrees, data);
12085 if (result || !walk_subtrees)
12088 /* But do not walk a pointed-to type since it may itself need to
12089 be walked in the declaration case if it isn't anonymous. */
12090 if (!POINTER_TYPE_P (*type_p))
12092 result = walk_type_fields (*type_p, func, data, pset, lh);
12097 /* If this is a record type, also walk the fields. */
12098 if (RECORD_OR_UNION_TYPE_P (*type_p))
12102 for (field = TYPE_FIELDS (*type_p); field;
12103 field = DECL_CHAIN (field))
12105 /* We'd like to look at the type of the field, but we can
12106 easily get infinite recursion. So assume it's pointed
12107 to elsewhere in the tree. Also, ignore things that
12109 if (TREE_CODE (field) != FIELD_DECL)
12112 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
12113 WALK_SUBTREE (DECL_SIZE (field));
12114 WALK_SUBTREE (DECL_SIZE_UNIT (field));
12115 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
12116 WALK_SUBTREE (DECL_QUALIFIER (field));
12120 /* Same for scalar types. */
12121 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
12122 || TREE_CODE (*type_p) == ENUMERAL_TYPE
12123 || TREE_CODE (*type_p) == INTEGER_TYPE
12124 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
12125 || TREE_CODE (*type_p) == REAL_TYPE)
12127 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
12128 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
12131 WALK_SUBTREE (TYPE_SIZE (*type_p));
12132 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
12137 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
12141 /* Walk over all the sub-trees of this operand. */
12142 len = TREE_OPERAND_LENGTH (*tp);
12144 /* Go through the subtrees. We need to do this in forward order so
12145 that the scope of a FOR_EXPR is handled properly. */
12148 for (i = 0; i < len - 1; ++i)
12149 WALK_SUBTREE (TREE_OPERAND (*tp, i));
12150 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
12153 /* If this is a type, walk the needed fields in the type. */
12154 else if (TYPE_P (*tp))
12155 return walk_type_fields (*tp, func, data, pset, lh);
12159 /* We didn't find what we were looking for. */
12162 #undef WALK_SUBTREE_TAIL
12164 #undef WALK_SUBTREE
12166 /* Like walk_tree, but does not walk duplicate nodes more than once. */
12169 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
12174 hash_set<tree> pset;
12175 result = walk_tree_1 (tp, func, data, &pset, lh);
12181 tree_block (tree t)
12183 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
12185 if (IS_EXPR_CODE_CLASS (c))
12186 return LOCATION_BLOCK (t->exp.locus);
12187 gcc_unreachable ();
12192 tree_set_block (tree t, tree b)
12194 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
12196 if (IS_EXPR_CODE_CLASS (c))
12198 t->exp.locus = set_block (t->exp.locus, b);
12201 gcc_unreachable ();
12204 /* Create a nameless artificial label and put it in the current
12205 function context. The label has a location of LOC. Returns the
12206 newly created label. */
12209 create_artificial_label (location_t loc)
12211 tree lab = build_decl (loc,
12212 LABEL_DECL, NULL_TREE, void_type_node);
12214 DECL_ARTIFICIAL (lab) = 1;
12215 DECL_IGNORED_P (lab) = 1;
12216 DECL_CONTEXT (lab) = current_function_decl;
12220 /* Given a tree, try to return a useful variable name that we can use
12221 to prefix a temporary that is being assigned the value of the tree.
12222 I.E. given <temp> = &A, return A. */
12227 tree stripped_decl;
12230 STRIP_NOPS (stripped_decl);
12231 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
12232 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
12233 else if (TREE_CODE (stripped_decl) == SSA_NAME)
12235 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
12238 return IDENTIFIER_POINTER (name);
12242 switch (TREE_CODE (stripped_decl))
12245 return get_name (TREE_OPERAND (stripped_decl, 0));
12252 /* Return true if TYPE has a variable argument list. */
12255 stdarg_p (const_tree fntype)
12257 function_args_iterator args_iter;
12258 tree n = NULL_TREE, t;
12263 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
12268 return n != NULL_TREE && n != void_type_node;
12271 /* Return true if TYPE has a prototype. */
12274 prototype_p (const_tree fntype)
12278 gcc_assert (fntype != NULL_TREE);
12280 t = TYPE_ARG_TYPES (fntype);
12281 return (t != NULL_TREE);
12284 /* If BLOCK is inlined from an __attribute__((__artificial__))
12285 routine, return pointer to location from where it has been
12288 block_nonartificial_location (tree block)
12290 location_t *ret = NULL;
12292 while (block && TREE_CODE (block) == BLOCK
12293 && BLOCK_ABSTRACT_ORIGIN (block))
12295 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
12297 while (TREE_CODE (ao) == BLOCK
12298 && BLOCK_ABSTRACT_ORIGIN (ao)
12299 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
12300 ao = BLOCK_ABSTRACT_ORIGIN (ao);
12302 if (TREE_CODE (ao) == FUNCTION_DECL)
12304 /* If AO is an artificial inline, point RET to the
12305 call site locus at which it has been inlined and continue
12306 the loop, in case AO's caller is also an artificial
12308 if (DECL_DECLARED_INLINE_P (ao)
12309 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
12310 ret = &BLOCK_SOURCE_LOCATION (block);
12314 else if (TREE_CODE (ao) != BLOCK)
12317 block = BLOCK_SUPERCONTEXT (block);
12323 /* If EXP is inlined from an __attribute__((__artificial__))
12324 function, return the location of the original call expression. */
12327 tree_nonartificial_location (tree exp)
12329 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
12334 return EXPR_LOCATION (exp);
12338 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
12341 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
12344 cl_option_hasher::hash (tree x)
12346 const_tree const t = x;
12350 hashval_t hash = 0;
12352 if (TREE_CODE (t) == OPTIMIZATION_NODE)
12354 p = (const char *)TREE_OPTIMIZATION (t);
12355 len = sizeof (struct cl_optimization);
12358 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
12359 return cl_target_option_hash (TREE_TARGET_OPTION (t));
12362 gcc_unreachable ();
12364 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
12366 for (i = 0; i < len; i++)
12368 hash = (hash << 4) ^ ((i << 2) | p[i]);
12373 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
12374 TARGET_OPTION tree node) is the same as that given by *Y, which is the
12378 cl_option_hasher::equal (tree x, tree y)
12380 const_tree const xt = x;
12381 const_tree const yt = y;
12386 if (TREE_CODE (xt) != TREE_CODE (yt))
12389 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
12391 xp = (const char *)TREE_OPTIMIZATION (xt);
12392 yp = (const char *)TREE_OPTIMIZATION (yt);
12393 len = sizeof (struct cl_optimization);
12396 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
12398 return cl_target_option_eq (TREE_TARGET_OPTION (xt),
12399 TREE_TARGET_OPTION (yt));
12403 gcc_unreachable ();
12405 return (memcmp (xp, yp, len) == 0);
12408 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
12411 build_optimization_node (struct gcc_options *opts)
12415 /* Use the cache of optimization nodes. */
12417 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
12420 tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
12424 /* Insert this one into the hash table. */
12425 t = cl_optimization_node;
12428 /* Make a new node for next time round. */
12429 cl_optimization_node = make_node (OPTIMIZATION_NODE);
12435 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
12438 build_target_option_node (struct gcc_options *opts)
12442 /* Use the cache of optimization nodes. */
12444 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
12447 tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
12451 /* Insert this one into the hash table. */
12452 t = cl_target_option_node;
12455 /* Make a new node for next time round. */
12456 cl_target_option_node = make_node (TARGET_OPTION_NODE);
12462 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
12463 so that they aren't saved during PCH writing. */
12466 prepare_target_option_nodes_for_pch (void)
12468 hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
12469 for (; iter != cl_option_hash_table->end (); ++iter)
12470 if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
12471 TREE_TARGET_GLOBALS (*iter) = NULL;
12474 /* Determine the "ultimate origin" of a block. The block may be an inlined
12475 instance of an inlined instance of a block which is local to an inline
12476 function, so we have to trace all of the way back through the origin chain
12477 to find out what sort of node actually served as the original seed for the
12481 block_ultimate_origin (const_tree block)
12483 tree immediate_origin = BLOCK_ABSTRACT_ORIGIN (block);
12485 /* BLOCK_ABSTRACT_ORIGIN can point to itself; ignore that if
12486 we're trying to output the abstract instance of this function. */
12487 if (BLOCK_ABSTRACT (block) && immediate_origin == block)
12490 if (immediate_origin == NULL_TREE)
12495 tree lookahead = immediate_origin;
12499 ret_val = lookahead;
12500 lookahead = (TREE_CODE (ret_val) == BLOCK
12501 ? BLOCK_ABSTRACT_ORIGIN (ret_val) : NULL);
12503 while (lookahead != NULL && lookahead != ret_val);
12505 /* The block's abstract origin chain may not be the *ultimate* origin of
12506 the block. It could lead to a DECL that has an abstract origin set.
12507 If so, we want that DECL's abstract origin (which is what DECL_ORIGIN
12508 will give us if it has one). Note that DECL's abstract origins are
12509 supposed to be the most distant ancestor (or so decl_ultimate_origin
12510 claims), so we don't need to loop following the DECL origins. */
12511 if (DECL_P (ret_val))
12512 return DECL_ORIGIN (ret_val);
12518 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
12522 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
12524 /* Do not strip casts into or out of differing address spaces. */
12525 if (POINTER_TYPE_P (outer_type)
12526 && TYPE_ADDR_SPACE (TREE_TYPE (outer_type)) != ADDR_SPACE_GENERIC)
12528 if (!POINTER_TYPE_P (inner_type)
12529 || (TYPE_ADDR_SPACE (TREE_TYPE (outer_type))
12530 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type))))
12533 else if (POINTER_TYPE_P (inner_type)
12534 && TYPE_ADDR_SPACE (TREE_TYPE (inner_type)) != ADDR_SPACE_GENERIC)
12536 /* We already know that outer_type is not a pointer with
12537 a non-generic address space. */
12541 /* Use precision rather then machine mode when we can, which gives
12542 the correct answer even for submode (bit-field) types. */
12543 if ((INTEGRAL_TYPE_P (outer_type)
12544 || POINTER_TYPE_P (outer_type)
12545 || TREE_CODE (outer_type) == OFFSET_TYPE)
12546 && (INTEGRAL_TYPE_P (inner_type)
12547 || POINTER_TYPE_P (inner_type)
12548 || TREE_CODE (inner_type) == OFFSET_TYPE))
12549 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
12551 /* Otherwise fall back on comparing machine modes (e.g. for
12552 aggregate types, floats). */
12553 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
12556 /* Return true iff conversion in EXP generates no instruction. Mark
12557 it inline so that we fully inline into the stripping functions even
12558 though we have two uses of this function. */
12561 tree_nop_conversion (const_tree exp)
12563 tree outer_type, inner_type;
12565 if (!CONVERT_EXPR_P (exp)
12566 && TREE_CODE (exp) != NON_LVALUE_EXPR)
12568 if (TREE_OPERAND (exp, 0) == error_mark_node)
12571 outer_type = TREE_TYPE (exp);
12572 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12577 return tree_nop_conversion_p (outer_type, inner_type);
12580 /* Return true iff conversion in EXP generates no instruction. Don't
12581 consider conversions changing the signedness. */
12584 tree_sign_nop_conversion (const_tree exp)
12586 tree outer_type, inner_type;
12588 if (!tree_nop_conversion (exp))
12591 outer_type = TREE_TYPE (exp);
12592 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12594 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
12595 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
12598 /* Strip conversions from EXP according to tree_nop_conversion and
12599 return the resulting expression. */
12602 tree_strip_nop_conversions (tree exp)
12604 while (tree_nop_conversion (exp))
12605 exp = TREE_OPERAND (exp, 0);
12609 /* Strip conversions from EXP according to tree_sign_nop_conversion
12610 and return the resulting expression. */
12613 tree_strip_sign_nop_conversions (tree exp)
12615 while (tree_sign_nop_conversion (exp))
12616 exp = TREE_OPERAND (exp, 0);
12620 /* Avoid any floating point extensions from EXP. */
12622 strip_float_extensions (tree exp)
12624 tree sub, expt, subt;
12626 /* For floating point constant look up the narrowest type that can hold
12627 it properly and handle it like (type)(narrowest_type)constant.
12628 This way we can optimize for instance a=a*2.0 where "a" is float
12629 but 2.0 is double constant. */
12630 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
12632 REAL_VALUE_TYPE orig;
12635 orig = TREE_REAL_CST (exp);
12636 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
12637 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
12638 type = float_type_node;
12639 else if (TYPE_PRECISION (TREE_TYPE (exp))
12640 > TYPE_PRECISION (double_type_node)
12641 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
12642 type = double_type_node;
12644 return build_real_truncate (type, orig);
12647 if (!CONVERT_EXPR_P (exp))
12650 sub = TREE_OPERAND (exp, 0);
12651 subt = TREE_TYPE (sub);
12652 expt = TREE_TYPE (exp);
12654 if (!FLOAT_TYPE_P (subt))
12657 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
12660 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
12663 return strip_float_extensions (sub);
12666 /* Strip out all handled components that produce invariant
12670 strip_invariant_refs (const_tree op)
12672 while (handled_component_p (op))
12674 switch (TREE_CODE (op))
12677 case ARRAY_RANGE_REF:
12678 if (!is_gimple_constant (TREE_OPERAND (op, 1))
12679 || TREE_OPERAND (op, 2) != NULL_TREE
12680 || TREE_OPERAND (op, 3) != NULL_TREE)
12684 case COMPONENT_REF:
12685 if (TREE_OPERAND (op, 2) != NULL_TREE)
12691 op = TREE_OPERAND (op, 0);
12697 static GTY(()) tree gcc_eh_personality_decl;
12699 /* Return the GCC personality function decl. */
12702 lhd_gcc_personality (void)
12704 if (!gcc_eh_personality_decl)
12705 gcc_eh_personality_decl = build_personality_function ("gcc");
12706 return gcc_eh_personality_decl;
12709 /* TARGET is a call target of GIMPLE call statement
12710 (obtained by gimple_call_fn). Return true if it is
12711 OBJ_TYPE_REF representing an virtual call of C++ method.
12712 (As opposed to OBJ_TYPE_REF representing objc calls
12713 through a cast where middle-end devirtualization machinery
12717 virtual_method_call_p (const_tree target)
12719 if (TREE_CODE (target) != OBJ_TYPE_REF)
12721 tree t = TREE_TYPE (target);
12722 gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE);
12724 if (TREE_CODE (t) == FUNCTION_TYPE)
12726 gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE);
12727 /* If we do not have BINFO associated, it means that type was built
12728 without devirtualization enabled. Do not consider this a virtual
12730 if (!TYPE_BINFO (obj_type_ref_class (target)))
12735 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
12738 obj_type_ref_class (const_tree ref)
12740 gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF);
12741 ref = TREE_TYPE (ref);
12742 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
12743 ref = TREE_TYPE (ref);
12744 /* We look for type THIS points to. ObjC also builds
12745 OBJ_TYPE_REF with non-method calls, Their first parameter
12746 ID however also corresponds to class type. */
12747 gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE
12748 || TREE_CODE (ref) == FUNCTION_TYPE);
12749 ref = TREE_VALUE (TYPE_ARG_TYPES (ref));
12750 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
12751 return TREE_TYPE (ref);
12754 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */
12757 lookup_binfo_at_offset (tree binfo, tree type, HOST_WIDE_INT pos)
12760 tree base_binfo, b;
12762 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12763 if (pos == tree_to_shwi (BINFO_OFFSET (base_binfo))
12764 && types_same_for_odr (TREE_TYPE (base_binfo), type))
12766 else if ((b = lookup_binfo_at_offset (base_binfo, type, pos)) != NULL)
12771 /* Try to find a base info of BINFO that would have its field decl at offset
12772 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
12773 found, return, otherwise return NULL_TREE. */
12776 get_binfo_at_offset (tree binfo, HOST_WIDE_INT offset, tree expected_type)
12778 tree type = BINFO_TYPE (binfo);
12782 HOST_WIDE_INT pos, size;
12786 if (types_same_for_odr (type, expected_type))
12791 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
12793 if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
12796 pos = int_bit_position (fld);
12797 size = tree_to_uhwi (DECL_SIZE (fld));
12798 if (pos <= offset && (pos + size) > offset)
12801 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
12804 /* Offset 0 indicates the primary base, whose vtable contents are
12805 represented in the binfo for the derived class. */
12806 else if (offset != 0)
12808 tree found_binfo = NULL, base_binfo;
12809 /* Offsets in BINFO are in bytes relative to the whole structure
12810 while POS is in bits relative to the containing field. */
12811 int binfo_offset = (tree_to_shwi (BINFO_OFFSET (binfo)) + pos
12814 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12815 if (tree_to_shwi (BINFO_OFFSET (base_binfo)) == binfo_offset
12816 && types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
12818 found_binfo = base_binfo;
12822 binfo = found_binfo;
12824 binfo = lookup_binfo_at_offset (binfo, TREE_TYPE (fld),
12828 type = TREE_TYPE (fld);
12833 /* Returns true if X is a typedef decl. */
12836 is_typedef_decl (const_tree x)
12838 return (x && TREE_CODE (x) == TYPE_DECL
12839 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
12842 /* Returns true iff TYPE is a type variant created for a typedef. */
12845 typedef_variant_p (const_tree type)
12847 return is_typedef_decl (TYPE_NAME (type));
12850 /* Warn about a use of an identifier which was marked deprecated. */
12852 warn_deprecated_use (tree node, tree attr)
12856 if (node == 0 || !warn_deprecated_decl)
12862 attr = DECL_ATTRIBUTES (node);
12863 else if (TYPE_P (node))
12865 tree decl = TYPE_STUB_DECL (node);
12867 attr = lookup_attribute ("deprecated",
12868 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
12873 attr = lookup_attribute ("deprecated", attr);
12876 msg = TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)));
12884 w = warning (OPT_Wdeprecated_declarations,
12885 "%qD is deprecated: %s", node, msg);
12887 w = warning (OPT_Wdeprecated_declarations,
12888 "%qD is deprecated", node);
12890 inform (DECL_SOURCE_LOCATION (node), "declared here");
12892 else if (TYPE_P (node))
12894 tree what = NULL_TREE;
12895 tree decl = TYPE_STUB_DECL (node);
12897 if (TYPE_NAME (node))
12899 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12900 what = TYPE_NAME (node);
12901 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12902 && DECL_NAME (TYPE_NAME (node)))
12903 what = DECL_NAME (TYPE_NAME (node));
12911 w = warning (OPT_Wdeprecated_declarations,
12912 "%qE is deprecated: %s", what, msg);
12914 w = warning (OPT_Wdeprecated_declarations,
12915 "%qE is deprecated", what);
12920 w = warning (OPT_Wdeprecated_declarations,
12921 "type is deprecated: %s", msg);
12923 w = warning (OPT_Wdeprecated_declarations,
12924 "type is deprecated");
12927 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12934 warning (OPT_Wdeprecated_declarations, "%qE is deprecated: %s",
12937 warning (OPT_Wdeprecated_declarations, "%qE is deprecated", what);
12942 warning (OPT_Wdeprecated_declarations, "type is deprecated: %s",
12945 warning (OPT_Wdeprecated_declarations, "type is deprecated");
12951 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12952 somewhere in it. */
12955 contains_bitfld_component_ref_p (const_tree ref)
12957 while (handled_component_p (ref))
12959 if (TREE_CODE (ref) == COMPONENT_REF
12960 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12962 ref = TREE_OPERAND (ref, 0);
12968 /* Try to determine whether a TRY_CATCH expression can fall through.
12969 This is a subroutine of block_may_fallthru. */
12972 try_catch_may_fallthru (const_tree stmt)
12974 tree_stmt_iterator i;
12976 /* If the TRY block can fall through, the whole TRY_CATCH can
12978 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12981 i = tsi_start (TREE_OPERAND (stmt, 1));
12982 switch (TREE_CODE (tsi_stmt (i)))
12985 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12986 catch expression and a body. The whole TRY_CATCH may fall
12987 through iff any of the catch bodies falls through. */
12988 for (; !tsi_end_p (i); tsi_next (&i))
12990 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12995 case EH_FILTER_EXPR:
12996 /* The exception filter expression only matters if there is an
12997 exception. If the exception does not match EH_FILTER_TYPES,
12998 we will execute EH_FILTER_FAILURE, and we will fall through
12999 if that falls through. If the exception does match
13000 EH_FILTER_TYPES, the stack unwinder will continue up the
13001 stack, so we will not fall through. We don't know whether we
13002 will throw an exception which matches EH_FILTER_TYPES or not,
13003 so we just ignore EH_FILTER_TYPES and assume that we might
13004 throw an exception which doesn't match. */
13005 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
13008 /* This case represents statements to be executed when an
13009 exception occurs. Those statements are implicitly followed
13010 by a RESX statement to resume execution after the exception.
13011 So in this case the TRY_CATCH never falls through. */
13016 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
13017 need not be 100% accurate; simply be conservative and return true if we
13018 don't know. This is used only to avoid stupidly generating extra code.
13019 If we're wrong, we'll just delete the extra code later. */
13022 block_may_fallthru (const_tree block)
13024 /* This CONST_CAST is okay because expr_last returns its argument
13025 unmodified and we assign it to a const_tree. */
13026 const_tree stmt = expr_last (CONST_CAST_TREE (block));
13028 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
13032 /* Easy cases. If the last statement of the block implies
13033 control transfer, then we can't fall through. */
13037 /* If SWITCH_LABELS is set, this is lowered, and represents a
13038 branch to a selected label and hence can not fall through.
13039 Otherwise SWITCH_BODY is set, and the switch can fall
13041 return SWITCH_LABELS (stmt) == NULL_TREE;
13044 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
13046 return block_may_fallthru (COND_EXPR_ELSE (stmt));
13049 return block_may_fallthru (BIND_EXPR_BODY (stmt));
13051 case TRY_CATCH_EXPR:
13052 return try_catch_may_fallthru (stmt);
13054 case TRY_FINALLY_EXPR:
13055 /* The finally clause is always executed after the try clause,
13056 so if it does not fall through, then the try-finally will not
13057 fall through. Otherwise, if the try clause does not fall
13058 through, then when the finally clause falls through it will
13059 resume execution wherever the try clause was going. So the
13060 whole try-finally will only fall through if both the try
13061 clause and the finally clause fall through. */
13062 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
13063 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
13066 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
13067 stmt = TREE_OPERAND (stmt, 1);
13073 /* Functions that do not return do not fall through. */
13074 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
13076 case CLEANUP_POINT_EXPR:
13077 return block_may_fallthru (TREE_OPERAND (stmt, 0));
13080 return block_may_fallthru (TREE_OPERAND (stmt, 1));
13086 return lang_hooks.block_may_fallthru (stmt);
13090 /* True if we are using EH to handle cleanups. */
13091 static bool using_eh_for_cleanups_flag = false;
13093 /* This routine is called from front ends to indicate eh should be used for
13096 using_eh_for_cleanups (void)
13098 using_eh_for_cleanups_flag = true;
13101 /* Query whether EH is used for cleanups. */
13103 using_eh_for_cleanups_p (void)
13105 return using_eh_for_cleanups_flag;
13108 /* Wrapper for tree_code_name to ensure that tree code is valid */
13110 get_tree_code_name (enum tree_code code)
13112 const char *invalid = "<invalid tree code>";
13114 if (code >= MAX_TREE_CODES)
13117 return tree_code_name[code];
13120 /* Drops the TREE_OVERFLOW flag from T. */
13123 drop_tree_overflow (tree t)
13125 gcc_checking_assert (TREE_OVERFLOW (t));
13127 /* For tree codes with a sharing machinery re-build the result. */
13128 if (TREE_CODE (t) == INTEGER_CST)
13129 return wide_int_to_tree (TREE_TYPE (t), t);
13131 /* Otherwise, as all tcc_constants are possibly shared, copy the node
13132 and drop the flag. */
13134 TREE_OVERFLOW (t) = 0;
13136 /* For constants that contain nested constants, drop the flag
13137 from those as well. */
13138 if (TREE_CODE (t) == COMPLEX_CST)
13140 if (TREE_OVERFLOW (TREE_REALPART (t)))
13141 TREE_REALPART (t) = drop_tree_overflow (TREE_REALPART (t));
13142 if (TREE_OVERFLOW (TREE_IMAGPART (t)))
13143 TREE_IMAGPART (t) = drop_tree_overflow (TREE_IMAGPART (t));
13145 if (TREE_CODE (t) == VECTOR_CST)
13147 for (unsigned i = 0; i < VECTOR_CST_NELTS (t); ++i)
13149 tree& elt = VECTOR_CST_ELT (t, i);
13150 if (TREE_OVERFLOW (elt))
13151 elt = drop_tree_overflow (elt);
13157 /* Given a memory reference expression T, return its base address.
13158 The base address of a memory reference expression is the main
13159 object being referenced. For instance, the base address for
13160 'array[i].fld[j]' is 'array'. You can think of this as stripping
13161 away the offset part from a memory address.
13163 This function calls handled_component_p to strip away all the inner
13164 parts of the memory reference until it reaches the base object. */
13167 get_base_address (tree t)
13169 while (handled_component_p (t))
13170 t = TREE_OPERAND (t, 0);
13172 if ((TREE_CODE (t) == MEM_REF
13173 || TREE_CODE (t) == TARGET_MEM_REF)
13174 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
13175 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
13177 /* ??? Either the alias oracle or all callers need to properly deal
13178 with WITH_SIZE_EXPRs before we can look through those. */
13179 if (TREE_CODE (t) == WITH_SIZE_EXPR)
13185 /* Return a tree of sizetype representing the size, in bytes, of the element
13186 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
13189 array_ref_element_size (tree exp)
13191 tree aligned_size = TREE_OPERAND (exp, 3);
13192 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
13193 location_t loc = EXPR_LOCATION (exp);
13195 /* If a size was specified in the ARRAY_REF, it's the size measured
13196 in alignment units of the element type. So multiply by that value. */
13199 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
13200 sizetype from another type of the same width and signedness. */
13201 if (TREE_TYPE (aligned_size) != sizetype)
13202 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
13203 return size_binop_loc (loc, MULT_EXPR, aligned_size,
13204 size_int (TYPE_ALIGN_UNIT (elmt_type)));
13207 /* Otherwise, take the size from that of the element type. Substitute
13208 any PLACEHOLDER_EXPR that we have. */
13210 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
13213 /* Return a tree representing the lower bound of the array mentioned in
13214 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
13217 array_ref_low_bound (tree exp)
13219 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
13221 /* If a lower bound is specified in EXP, use it. */
13222 if (TREE_OPERAND (exp, 2))
13223 return TREE_OPERAND (exp, 2);
13225 /* Otherwise, if there is a domain type and it has a lower bound, use it,
13226 substituting for a PLACEHOLDER_EXPR as needed. */
13227 if (domain_type && TYPE_MIN_VALUE (domain_type))
13228 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
13230 /* Otherwise, return a zero of the appropriate type. */
13231 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
13234 /* Return a tree representing the upper bound of the array mentioned in
13235 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
13238 array_ref_up_bound (tree exp)
13240 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
13242 /* If there is a domain type and it has an upper bound, use it, substituting
13243 for a PLACEHOLDER_EXPR as needed. */
13244 if (domain_type && TYPE_MAX_VALUE (domain_type))
13245 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
13247 /* Otherwise fail. */
13251 /* Returns true if REF is an array reference or a component reference
13252 to an array at the end of a structure.
13253 If this is the case, the array may be allocated larger
13254 than its upper bound implies. */
13257 array_at_struct_end_p (tree ref)
13261 if (TREE_CODE (ref) == ARRAY_REF
13262 || TREE_CODE (ref) == ARRAY_RANGE_REF)
13264 atype = TREE_TYPE (TREE_OPERAND (ref, 0));
13265 ref = TREE_OPERAND (ref, 0);
13267 else if (TREE_CODE (ref) == COMPONENT_REF
13268 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 1))) == ARRAY_TYPE)
13269 atype = TREE_TYPE (TREE_OPERAND (ref, 1));
13273 while (handled_component_p (ref))
13275 /* If the reference chain contains a component reference to a
13276 non-union type and there follows another field the reference
13277 is not at the end of a structure. */
13278 if (TREE_CODE (ref) == COMPONENT_REF)
13280 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
13282 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
13283 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
13284 nextf = DECL_CHAIN (nextf);
13289 /* If we have a multi-dimensional array we do not consider
13290 a non-innermost dimension as flex array if the whole
13291 multi-dimensional array is at struct end.
13292 Same for an array of aggregates with a trailing array
13294 else if (TREE_CODE (ref) == ARRAY_REF)
13296 else if (TREE_CODE (ref) == ARRAY_RANGE_REF)
13298 /* If we view an underlying object as sth else then what we
13299 gathered up to now is what we have to rely on. */
13300 else if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
13303 gcc_unreachable ();
13305 ref = TREE_OPERAND (ref, 0);
13308 /* The array now is at struct end. Treat flexible arrays as
13309 always subject to extend, even into just padding constrained by
13310 an underlying decl. */
13311 if (! TYPE_SIZE (atype))
13316 if (TREE_CODE (ref) == MEM_REF
13317 && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR)
13319 size = TYPE_SIZE (TREE_TYPE (ref));
13320 ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
13323 /* If the reference is based on a declared entity, the size of the array
13324 is constrained by its given domain. (Do not trust commons PR/69368). */
13326 /* Be sure the size of MEM_REF target match. For example:
13329 struct foo *str = (struct foo *)&buf;
13331 str->trailin_array[2] = 1;
13333 is valid because BUF allocate enough space. */
13335 && (!size || (DECL_SIZE (ref) != NULL
13336 && operand_equal_p (DECL_SIZE (ref), size, 0)))
13337 && !(flag_unconstrained_commons
13338 && VAR_P (ref) && DECL_COMMON (ref)))
13344 /* Return a tree representing the offset, in bytes, of the field referenced
13345 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
13348 component_ref_field_offset (tree exp)
13350 tree aligned_offset = TREE_OPERAND (exp, 2);
13351 tree field = TREE_OPERAND (exp, 1);
13352 location_t loc = EXPR_LOCATION (exp);
13354 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
13355 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
13357 if (aligned_offset)
13359 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
13360 sizetype from another type of the same width and signedness. */
13361 if (TREE_TYPE (aligned_offset) != sizetype)
13362 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
13363 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
13364 size_int (DECL_OFFSET_ALIGN (field)
13368 /* Otherwise, take the offset from that of the field. Substitute
13369 any PLACEHOLDER_EXPR that we have. */
13371 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
13374 /* Return the machine mode of T. For vectors, returns the mode of the
13375 inner type. The main use case is to feed the result to HONOR_NANS,
13376 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
13379 element_mode (const_tree t)
13383 if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
13385 return TYPE_MODE (t);
13389 /* Veirfy that basic properties of T match TV and thus T can be a variant of
13390 TV. TV should be the more specified variant (i.e. the main variant). */
13393 verify_type_variant (const_tree t, tree tv)
13395 /* Type variant can differ by:
13397 - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
13398 ENCODE_QUAL_ADDR_SPACE.
13399 - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
13400 in this case some values may not be set in the variant types
13401 (see TYPE_COMPLETE_P checks).
13402 - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
13403 - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
13404 - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
13405 - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
13406 - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
13407 this is necessary to make it possible to merge types form different TUs
13408 - arrays, pointers and references may have TREE_TYPE that is a variant
13409 of TREE_TYPE of their main variants.
13410 - aggregates may have new TYPE_FIELDS list that list variants of
13411 the main variant TYPE_FIELDS.
13412 - vector types may differ by TYPE_VECTOR_OPAQUE
13413 - TYPE_METHODS is always NULL for variant types and maintained for
13417 /* Convenience macro for matching individual fields. */
13418 #define verify_variant_match(flag) \
13420 if (flag (tv) != flag (t)) \
13422 error ("type variant differs by " #flag "."); \
13428 /* tree_base checks. */
13430 verify_variant_match (TREE_CODE);
13431 /* FIXME: Ada builds non-artificial variants of artificial types. */
13432 if (TYPE_ARTIFICIAL (tv) && 0)
13433 verify_variant_match (TYPE_ARTIFICIAL);
13434 if (POINTER_TYPE_P (tv))
13435 verify_variant_match (TYPE_REF_CAN_ALIAS_ALL);
13436 /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */
13437 verify_variant_match (TYPE_UNSIGNED);
13438 verify_variant_match (TYPE_PACKED);
13439 if (TREE_CODE (t) == REFERENCE_TYPE)
13440 verify_variant_match (TYPE_REF_IS_RVALUE);
13441 if (AGGREGATE_TYPE_P (t))
13442 verify_variant_match (TYPE_REVERSE_STORAGE_ORDER);
13444 verify_variant_match (TYPE_SATURATING);
13445 /* FIXME: This check trigger during libstdc++ build. */
13446 if (RECORD_OR_UNION_TYPE_P (t) && COMPLETE_TYPE_P (t) && 0)
13447 verify_variant_match (TYPE_FINAL_P);
13449 /* tree_type_common checks. */
13451 if (COMPLETE_TYPE_P (t))
13453 verify_variant_match (TYPE_MODE);
13454 if (TREE_CODE (TYPE_SIZE (t)) != PLACEHOLDER_EXPR
13455 && TREE_CODE (TYPE_SIZE (tv)) != PLACEHOLDER_EXPR)
13456 verify_variant_match (TYPE_SIZE);
13457 if (TREE_CODE (TYPE_SIZE_UNIT (t)) != PLACEHOLDER_EXPR
13458 && TREE_CODE (TYPE_SIZE_UNIT (tv)) != PLACEHOLDER_EXPR
13459 && TYPE_SIZE_UNIT (t) != TYPE_SIZE_UNIT (tv))
13461 gcc_assert (!operand_equal_p (TYPE_SIZE_UNIT (t),
13462 TYPE_SIZE_UNIT (tv), 0));
13463 error ("type variant has different TYPE_SIZE_UNIT");
13465 error ("type variant's TYPE_SIZE_UNIT");
13466 debug_tree (TYPE_SIZE_UNIT (tv));
13467 error ("type's TYPE_SIZE_UNIT");
13468 debug_tree (TYPE_SIZE_UNIT (t));
13472 verify_variant_match (TYPE_PRECISION);
13473 verify_variant_match (TYPE_NEEDS_CONSTRUCTING);
13474 if (RECORD_OR_UNION_TYPE_P (t))
13475 verify_variant_match (TYPE_TRANSPARENT_AGGR);
13476 else if (TREE_CODE (t) == ARRAY_TYPE)
13477 verify_variant_match (TYPE_NONALIASED_COMPONENT);
13478 /* During LTO we merge variant lists from diferent translation units
13479 that may differ BY TYPE_CONTEXT that in turn may point
13480 to TRANSLATION_UNIT_DECL.
13481 Ada also builds variants of types with different TYPE_CONTEXT. */
13482 if ((!in_lto_p || !TYPE_FILE_SCOPE_P (t)) && 0)
13483 verify_variant_match (TYPE_CONTEXT);
13484 verify_variant_match (TYPE_STRING_FLAG);
13485 if (TYPE_ALIAS_SET_KNOWN_P (t))
13487 error ("type variant with TYPE_ALIAS_SET_KNOWN_P");
13492 /* tree_type_non_common checks. */
13494 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13495 and dangle the pointer from time to time. */
13496 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_VFIELD (t) != TYPE_VFIELD (tv)
13497 && (in_lto_p || !TYPE_VFIELD (tv)
13498 || TREE_CODE (TYPE_VFIELD (tv)) != TREE_LIST))
13500 error ("type variant has different TYPE_VFIELD");
13504 if ((TREE_CODE (t) == ENUMERAL_TYPE && COMPLETE_TYPE_P (t))
13505 || TREE_CODE (t) == INTEGER_TYPE
13506 || TREE_CODE (t) == BOOLEAN_TYPE
13507 || TREE_CODE (t) == REAL_TYPE
13508 || TREE_CODE (t) == FIXED_POINT_TYPE)
13510 verify_variant_match (TYPE_MAX_VALUE);
13511 verify_variant_match (TYPE_MIN_VALUE);
13513 if (TREE_CODE (t) == METHOD_TYPE)
13514 verify_variant_match (TYPE_METHOD_BASETYPE);
13515 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_METHODS (t))
13517 error ("type variant has TYPE_METHODS");
13521 if (TREE_CODE (t) == OFFSET_TYPE)
13522 verify_variant_match (TYPE_OFFSET_BASETYPE);
13523 if (TREE_CODE (t) == ARRAY_TYPE)
13524 verify_variant_match (TYPE_ARRAY_MAX_SIZE);
13525 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
13526 or even type's main variant. This is needed to make bootstrap pass
13527 and the bug seems new in GCC 5.
13528 C++ FE should be updated to make this consistent and we should check
13529 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
13530 is a match with main variant.
13532 Also disable the check for Java for now because of parser hack that builds
13533 first an dummy BINFO and then sometimes replace it by real BINFO in some
13535 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t) && TYPE_BINFO (tv)
13536 && TYPE_BINFO (t) != TYPE_BINFO (tv)
13537 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
13538 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
13539 at LTO time only. */
13540 && (in_lto_p && odr_type_p (t)))
13542 error ("type variant has different TYPE_BINFO");
13544 error ("type variant's TYPE_BINFO");
13545 debug_tree (TYPE_BINFO (tv));
13546 error ("type's TYPE_BINFO");
13547 debug_tree (TYPE_BINFO (t));
13551 /* Check various uses of TYPE_VALUES_RAW. */
13552 if (TREE_CODE (t) == ENUMERAL_TYPE)
13553 verify_variant_match (TYPE_VALUES);
13554 else if (TREE_CODE (t) == ARRAY_TYPE)
13555 verify_variant_match (TYPE_DOMAIN);
13556 /* Permit incomplete variants of complete type. While FEs may complete
13557 all variants, this does not happen for C++ templates in all cases. */
13558 else if (RECORD_OR_UNION_TYPE_P (t)
13559 && COMPLETE_TYPE_P (t)
13560 && TYPE_FIELDS (t) != TYPE_FIELDS (tv))
13564 /* Fortran builds qualified variants as new records with items of
13565 qualified type. Verify that they looks same. */
13566 for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (tv);
13568 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13569 if (TREE_CODE (f1) != FIELD_DECL || TREE_CODE (f2) != FIELD_DECL
13570 || (TYPE_MAIN_VARIANT (TREE_TYPE (f1))
13571 != TYPE_MAIN_VARIANT (TREE_TYPE (f2))
13572 /* FIXME: gfc_nonrestricted_type builds all types as variants
13573 with exception of pointer types. It deeply copies the type
13574 which means that we may end up with a variant type
13575 referring non-variant pointer. We may change it to
13576 produce types as variants, too, like
13577 objc_get_protocol_qualified_type does. */
13578 && !POINTER_TYPE_P (TREE_TYPE (f1)))
13579 || DECL_FIELD_OFFSET (f1) != DECL_FIELD_OFFSET (f2)
13580 || DECL_FIELD_BIT_OFFSET (f1) != DECL_FIELD_BIT_OFFSET (f2))
13584 error ("type variant has different TYPE_FIELDS");
13586 error ("first mismatch is field");
13588 error ("and field");
13593 else if ((TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE))
13594 verify_variant_match (TYPE_ARG_TYPES);
13595 /* For C++ the qualified variant of array type is really an array type
13596 of qualified TREE_TYPE.
13597 objc builds variants of pointer where pointer to type is a variant, too
13598 in objc_get_protocol_qualified_type. */
13599 if (TREE_TYPE (t) != TREE_TYPE (tv)
13600 && ((TREE_CODE (t) != ARRAY_TYPE
13601 && !POINTER_TYPE_P (t))
13602 || TYPE_MAIN_VARIANT (TREE_TYPE (t))
13603 != TYPE_MAIN_VARIANT (TREE_TYPE (tv))))
13605 error ("type variant has different TREE_TYPE");
13607 error ("type variant's TREE_TYPE");
13608 debug_tree (TREE_TYPE (tv));
13609 error ("type's TREE_TYPE");
13610 debug_tree (TREE_TYPE (t));
13613 if (type_with_alias_set_p (t)
13614 && !gimple_canonical_types_compatible_p (t, tv, false))
13616 error ("type is not compatible with its variant");
13618 error ("type variant's TREE_TYPE");
13619 debug_tree (TREE_TYPE (tv));
13620 error ("type's TREE_TYPE");
13621 debug_tree (TREE_TYPE (t));
13625 #undef verify_variant_match
13629 /* The TYPE_CANONICAL merging machinery. It should closely resemble
13630 the middle-end types_compatible_p function. It needs to avoid
13631 claiming types are different for types that should be treated
13632 the same with respect to TBAA. Canonical types are also used
13633 for IL consistency checks via the useless_type_conversion_p
13634 predicate which does not handle all type kinds itself but falls
13635 back to pointer-comparison of TYPE_CANONICAL for aggregates
13638 /* Return true if TYPE_UNSIGNED of TYPE should be ignored for canonical
13639 type calculation because we need to allow inter-operability between signed
13640 and unsigned variants. */
13643 type_with_interoperable_signedness (const_tree type)
13645 /* Fortran standard require C_SIGNED_CHAR to be interoperable with both
13646 signed char and unsigned char. Similarly fortran FE builds
13647 C_SIZE_T as signed type, while C defines it unsigned. */
13649 return tree_code_for_canonical_type_merging (TREE_CODE (type))
13651 && (TYPE_PRECISION (type) == TYPE_PRECISION (signed_char_type_node)
13652 || TYPE_PRECISION (type) == TYPE_PRECISION (size_type_node));
13655 /* Return true iff T1 and T2 are structurally identical for what
13657 This function is used both by lto.c canonical type merging and by the
13658 verifier. If TRUST_TYPE_CANONICAL we do not look into structure of types
13659 that have TYPE_CANONICAL defined and assume them equivalent. This is useful
13660 only for LTO because only in these cases TYPE_CANONICAL equivalence
13661 correspond to one defined by gimple_canonical_types_compatible_p. */
13664 gimple_canonical_types_compatible_p (const_tree t1, const_tree t2,
13665 bool trust_type_canonical)
13667 /* Type variants should be same as the main variant. When not doing sanity
13668 checking to verify this fact, go to main variants and save some work. */
13669 if (trust_type_canonical)
13671 t1 = TYPE_MAIN_VARIANT (t1);
13672 t2 = TYPE_MAIN_VARIANT (t2);
13675 /* Check first for the obvious case of pointer identity. */
13679 /* Check that we have two types to compare. */
13680 if (t1 == NULL_TREE || t2 == NULL_TREE)
13683 /* We consider complete types always compatible with incomplete type.
13684 This does not make sense for canonical type calculation and thus we
13685 need to ensure that we are never called on it.
13687 FIXME: For more correctness the function probably should have three modes
13688 1) mode assuming that types are complete mathcing their structure
13689 2) mode allowing incomplete types but producing equivalence classes
13690 and thus ignoring all info from complete types
13691 3) mode allowing incomplete types to match complete but checking
13692 compatibility between complete types.
13694 1 and 2 can be used for canonical type calculation. 3 is the real
13695 definition of type compatibility that can be used i.e. for warnings during
13696 declaration merging. */
13698 gcc_assert (!trust_type_canonical
13699 || (type_with_alias_set_p (t1) && type_with_alias_set_p (t2)));
13700 /* If the types have been previously registered and found equal
13703 if (TYPE_CANONICAL (t1) && TYPE_CANONICAL (t2)
13704 && trust_type_canonical)
13706 /* Do not use TYPE_CANONICAL of pointer types. For LTO streamed types
13707 they are always NULL, but they are set to non-NULL for types
13708 constructed by build_pointer_type and variants. In this case the
13709 TYPE_CANONICAL is more fine grained than the equivalnce we test (where
13710 all pointers are considered equal. Be sure to not return false
13712 gcc_checking_assert (canonical_type_used_p (t1)
13713 && canonical_type_used_p (t2));
13714 return TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2);
13717 /* Can't be the same type if the types don't have the same code. */
13718 enum tree_code code = tree_code_for_canonical_type_merging (TREE_CODE (t1));
13719 if (code != tree_code_for_canonical_type_merging (TREE_CODE (t2)))
13722 /* Qualifiers do not matter for canonical type comparison purposes. */
13724 /* Void types and nullptr types are always the same. */
13725 if (TREE_CODE (t1) == VOID_TYPE
13726 || TREE_CODE (t1) == NULLPTR_TYPE)
13729 /* Can't be the same type if they have different mode. */
13730 if (TYPE_MODE (t1) != TYPE_MODE (t2))
13733 /* Non-aggregate types can be handled cheaply. */
13734 if (INTEGRAL_TYPE_P (t1)
13735 || SCALAR_FLOAT_TYPE_P (t1)
13736 || FIXED_POINT_TYPE_P (t1)
13737 || TREE_CODE (t1) == VECTOR_TYPE
13738 || TREE_CODE (t1) == COMPLEX_TYPE
13739 || TREE_CODE (t1) == OFFSET_TYPE
13740 || POINTER_TYPE_P (t1))
13742 /* Can't be the same type if they have different recision. */
13743 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
13746 /* In some cases the signed and unsigned types are required to be
13748 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2)
13749 && !type_with_interoperable_signedness (t1))
13752 /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
13753 interoperable with "signed char". Unless all frontends are revisited
13754 to agree on these types, we must ignore the flag completely. */
13756 /* Fortran standard define C_PTR type that is compatible with every
13757 C pointer. For this reason we need to glob all pointers into one.
13758 Still pointers in different address spaces are not compatible. */
13759 if (POINTER_TYPE_P (t1))
13761 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
13762 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
13766 /* Tail-recurse to components. */
13767 if (TREE_CODE (t1) == VECTOR_TYPE
13768 || TREE_CODE (t1) == COMPLEX_TYPE)
13769 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
13771 trust_type_canonical);
13776 /* Do type-specific comparisons. */
13777 switch (TREE_CODE (t1))
13780 /* Array types are the same if the element types are the same and
13781 the number of elements are the same. */
13782 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13783 trust_type_canonical)
13784 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
13785 || TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2)
13786 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
13790 tree i1 = TYPE_DOMAIN (t1);
13791 tree i2 = TYPE_DOMAIN (t2);
13793 /* For an incomplete external array, the type domain can be
13794 NULL_TREE. Check this condition also. */
13795 if (i1 == NULL_TREE && i2 == NULL_TREE)
13797 else if (i1 == NULL_TREE || i2 == NULL_TREE)
13801 tree min1 = TYPE_MIN_VALUE (i1);
13802 tree min2 = TYPE_MIN_VALUE (i2);
13803 tree max1 = TYPE_MAX_VALUE (i1);
13804 tree max2 = TYPE_MAX_VALUE (i2);
13806 /* The minimum/maximum values have to be the same. */
13809 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
13810 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
13811 || operand_equal_p (min1, min2, 0))))
13814 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
13815 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
13816 || operand_equal_p (max1, max2, 0)))))
13824 case FUNCTION_TYPE:
13825 /* Function types are the same if the return type and arguments types
13827 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13828 trust_type_canonical))
13831 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
13835 tree parms1, parms2;
13837 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
13839 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
13841 if (!gimple_canonical_types_compatible_p
13842 (TREE_VALUE (parms1), TREE_VALUE (parms2),
13843 trust_type_canonical))
13847 if (parms1 || parms2)
13855 case QUAL_UNION_TYPE:
13859 /* Don't try to compare variants of an incomplete type, before
13860 TYPE_FIELDS has been copied around. */
13861 if (!COMPLETE_TYPE_P (t1) && !COMPLETE_TYPE_P (t2))
13865 if (TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2))
13868 /* For aggregate types, all the fields must be the same. */
13869 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
13871 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13873 /* Skip non-fields and zero-sized fields. */
13874 while (f1 && (TREE_CODE (f1) != FIELD_DECL
13876 && integer_zerop (DECL_SIZE (f1)))))
13877 f1 = TREE_CHAIN (f1);
13878 while (f2 && (TREE_CODE (f2) != FIELD_DECL
13880 && integer_zerop (DECL_SIZE (f2)))))
13881 f2 = TREE_CHAIN (f2);
13884 /* The fields must have the same name, offset and type. */
13885 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
13886 || !gimple_compare_field_offset (f1, f2)
13887 || !gimple_canonical_types_compatible_p
13888 (TREE_TYPE (f1), TREE_TYPE (f2),
13889 trust_type_canonical))
13893 /* If one aggregate has more fields than the other, they
13894 are not the same. */
13902 /* Consider all types with language specific trees in them mutually
13903 compatible. This is executed only from verify_type and false
13904 positives can be tolerated. */
13905 gcc_assert (!in_lto_p);
13910 /* Verify type T. */
13913 verify_type (const_tree t)
13915 bool error_found = false;
13916 tree mv = TYPE_MAIN_VARIANT (t);
13919 error ("Main variant is not defined");
13920 error_found = true;
13922 else if (mv != TYPE_MAIN_VARIANT (mv))
13924 error ("TYPE_MAIN_VARIANT has different TYPE_MAIN_VARIANT");
13926 error_found = true;
13928 else if (t != mv && !verify_type_variant (t, mv))
13929 error_found = true;
13931 tree ct = TYPE_CANONICAL (t);
13934 else if (TYPE_CANONICAL (t) != ct)
13936 error ("TYPE_CANONICAL has different TYPE_CANONICAL");
13938 error_found = true;
13940 /* Method and function types can not be used to address memory and thus
13941 TYPE_CANONICAL really matters only for determining useless conversions.
13943 FIXME: C++ FE produce declarations of builtin functions that are not
13944 compatible with main variants. */
13945 else if (TREE_CODE (t) == FUNCTION_TYPE)
13948 /* FIXME: gimple_canonical_types_compatible_p can not compare types
13949 with variably sized arrays because their sizes possibly
13950 gimplified to different variables. */
13951 && !variably_modified_type_p (ct, NULL)
13952 && !gimple_canonical_types_compatible_p (t, ct, false))
13954 error ("TYPE_CANONICAL is not compatible");
13956 error_found = true;
13959 if (COMPLETE_TYPE_P (t) && TYPE_CANONICAL (t)
13960 && TYPE_MODE (t) != TYPE_MODE (TYPE_CANONICAL (t)))
13962 error ("TYPE_MODE of TYPE_CANONICAL is not compatible");
13964 error_found = true;
13966 /* FIXME: this is violated by the C++ FE as discussed in PR70029, when
13967 FUNCTION_*_QUALIFIED flags are set. */
13968 if (0 && TYPE_MAIN_VARIANT (t) == t && ct && TYPE_MAIN_VARIANT (ct) != ct)
13970 error ("TYPE_CANONICAL of main variant is not main variant");
13972 debug_tree (TYPE_MAIN_VARIANT (ct));
13973 error_found = true;
13977 /* Check various uses of TYPE_MINVAL. */
13978 if (RECORD_OR_UNION_TYPE_P (t))
13980 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13981 and danagle the pointer from time to time. */
13982 if (TYPE_VFIELD (t)
13983 && TREE_CODE (TYPE_VFIELD (t)) != FIELD_DECL
13984 && TREE_CODE (TYPE_VFIELD (t)) != TREE_LIST)
13986 error ("TYPE_VFIELD is not FIELD_DECL nor TREE_LIST");
13987 debug_tree (TYPE_VFIELD (t));
13988 error_found = true;
13991 else if (TREE_CODE (t) == POINTER_TYPE)
13993 if (TYPE_NEXT_PTR_TO (t)
13994 && TREE_CODE (TYPE_NEXT_PTR_TO (t)) != POINTER_TYPE)
13996 error ("TYPE_NEXT_PTR_TO is not POINTER_TYPE");
13997 debug_tree (TYPE_NEXT_PTR_TO (t));
13998 error_found = true;
14001 else if (TREE_CODE (t) == REFERENCE_TYPE)
14003 if (TYPE_NEXT_REF_TO (t)
14004 && TREE_CODE (TYPE_NEXT_REF_TO (t)) != REFERENCE_TYPE)
14006 error ("TYPE_NEXT_REF_TO is not REFERENCE_TYPE");
14007 debug_tree (TYPE_NEXT_REF_TO (t));
14008 error_found = true;
14011 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
14012 || TREE_CODE (t) == FIXED_POINT_TYPE)
14014 /* FIXME: The following check should pass:
14015 useless_type_conversion_p (const_cast <tree> (t),
14016 TREE_TYPE (TYPE_MIN_VALUE (t))
14017 but does not for C sizetypes in LTO. */
14020 /* Check various uses of TYPE_MAXVAL. */
14021 if (RECORD_OR_UNION_TYPE_P (t))
14023 if (TYPE_METHODS (t) && TREE_CODE (TYPE_METHODS (t)) != FUNCTION_DECL
14024 && TREE_CODE (TYPE_METHODS (t)) != TEMPLATE_DECL
14025 && TYPE_METHODS (t) != error_mark_node)
14027 error ("TYPE_METHODS is not FUNCTION_DECL, TEMPLATE_DECL nor error_mark_node");
14028 debug_tree (TYPE_METHODS (t));
14029 error_found = true;
14032 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
14034 if (TYPE_METHOD_BASETYPE (t)
14035 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != RECORD_TYPE
14036 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != UNION_TYPE)
14038 error ("TYPE_METHOD_BASETYPE is not record nor union");
14039 debug_tree (TYPE_METHOD_BASETYPE (t));
14040 error_found = true;
14043 else if (TREE_CODE (t) == OFFSET_TYPE)
14045 if (TYPE_OFFSET_BASETYPE (t)
14046 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != RECORD_TYPE
14047 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != UNION_TYPE)
14049 error ("TYPE_OFFSET_BASETYPE is not record nor union");
14050 debug_tree (TYPE_OFFSET_BASETYPE (t));
14051 error_found = true;
14054 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
14055 || TREE_CODE (t) == FIXED_POINT_TYPE)
14057 /* FIXME: The following check should pass:
14058 useless_type_conversion_p (const_cast <tree> (t),
14059 TREE_TYPE (TYPE_MAX_VALUE (t))
14060 but does not for C sizetypes in LTO. */
14062 else if (TREE_CODE (t) == ARRAY_TYPE)
14064 if (TYPE_ARRAY_MAX_SIZE (t)
14065 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t)) != INTEGER_CST)
14067 error ("TYPE_ARRAY_MAX_SIZE not INTEGER_CST");
14068 debug_tree (TYPE_ARRAY_MAX_SIZE (t));
14069 error_found = true;
14072 else if (TYPE_MAXVAL (t))
14074 error ("TYPE_MAXVAL non-NULL");
14075 debug_tree (TYPE_MAXVAL (t));
14076 error_found = true;
14079 /* Check various uses of TYPE_BINFO. */
14080 if (RECORD_OR_UNION_TYPE_P (t))
14082 if (!TYPE_BINFO (t))
14084 else if (TREE_CODE (TYPE_BINFO (t)) != TREE_BINFO)
14086 error ("TYPE_BINFO is not TREE_BINFO");
14087 debug_tree (TYPE_BINFO (t));
14088 error_found = true;
14091 else if (TYPE_LANG_SLOT_1 (t) && in_lto_p)
14093 error ("TYPE_LANG_SLOT_1 (binfo) field is non-NULL");
14094 debug_tree (TYPE_LANG_SLOT_1 (t));
14095 error_found = true;
14098 /* Check various uses of TYPE_VALUES_RAW. */
14099 if (TREE_CODE (t) == ENUMERAL_TYPE)
14100 for (tree l = TYPE_VALUES (t); l; l = TREE_CHAIN (l))
14102 tree value = TREE_VALUE (l);
14103 tree name = TREE_PURPOSE (l);
14105 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
14106 CONST_DECL of ENUMERAL TYPE. */
14107 if (TREE_CODE (value) != INTEGER_CST && TREE_CODE (value) != CONST_DECL)
14109 error ("Enum value is not CONST_DECL or INTEGER_CST");
14110 debug_tree (value);
14112 error_found = true;
14114 if (TREE_CODE (TREE_TYPE (value)) != INTEGER_TYPE
14115 && !useless_type_conversion_p (const_cast <tree> (t), TREE_TYPE (value)))
14117 error ("Enum value type is not INTEGER_TYPE nor convertible to the enum");
14118 debug_tree (value);
14120 error_found = true;
14122 if (TREE_CODE (name) != IDENTIFIER_NODE)
14124 error ("Enum value name is not IDENTIFIER_NODE");
14125 debug_tree (value);
14127 error_found = true;
14130 else if (TREE_CODE (t) == ARRAY_TYPE)
14132 if (TYPE_DOMAIN (t) && TREE_CODE (TYPE_DOMAIN (t)) != INTEGER_TYPE)
14134 error ("Array TYPE_DOMAIN is not integer type");
14135 debug_tree (TYPE_DOMAIN (t));
14136 error_found = true;
14139 else if (RECORD_OR_UNION_TYPE_P (t))
14141 if (TYPE_FIELDS (t) && !COMPLETE_TYPE_P (t) && in_lto_p)
14143 error ("TYPE_FIELDS defined in incomplete type");
14144 error_found = true;
14146 for (tree fld = TYPE_FIELDS (t); fld; fld = TREE_CHAIN (fld))
14148 /* TODO: verify properties of decls. */
14149 if (TREE_CODE (fld) == FIELD_DECL)
14151 else if (TREE_CODE (fld) == TYPE_DECL)
14153 else if (TREE_CODE (fld) == CONST_DECL)
14155 else if (VAR_P (fld))
14157 else if (TREE_CODE (fld) == TEMPLATE_DECL)
14159 else if (TREE_CODE (fld) == USING_DECL)
14163 error ("Wrong tree in TYPE_FIELDS list");
14165 error_found = true;
14169 else if (TREE_CODE (t) == INTEGER_TYPE
14170 || TREE_CODE (t) == BOOLEAN_TYPE
14171 || TREE_CODE (t) == OFFSET_TYPE
14172 || TREE_CODE (t) == REFERENCE_TYPE
14173 || TREE_CODE (t) == NULLPTR_TYPE
14174 || TREE_CODE (t) == POINTER_TYPE)
14176 if (TYPE_CACHED_VALUES_P (t) != (TYPE_CACHED_VALUES (t) != NULL))
14178 error ("TYPE_CACHED_VALUES_P is %i while TYPE_CACHED_VALUES is %p",
14179 TYPE_CACHED_VALUES_P (t), (void *)TYPE_CACHED_VALUES (t));
14180 error_found = true;
14182 else if (TYPE_CACHED_VALUES_P (t) && TREE_CODE (TYPE_CACHED_VALUES (t)) != TREE_VEC)
14184 error ("TYPE_CACHED_VALUES is not TREE_VEC");
14185 debug_tree (TYPE_CACHED_VALUES (t));
14186 error_found = true;
14188 /* Verify just enough of cache to ensure that no one copied it to new type.
14189 All copying should go by copy_node that should clear it. */
14190 else if (TYPE_CACHED_VALUES_P (t))
14193 for (i = 0; i < TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t)); i++)
14194 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)
14195 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)) != t)
14197 error ("wrong TYPE_CACHED_VALUES entry");
14198 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i));
14199 error_found = true;
14204 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
14205 for (tree l = TYPE_ARG_TYPES (t); l; l = TREE_CHAIN (l))
14207 /* C++ FE uses TREE_PURPOSE to store initial values. */
14208 if (TREE_PURPOSE (l) && in_lto_p)
14210 error ("TREE_PURPOSE is non-NULL in TYPE_ARG_TYPES list");
14212 error_found = true;
14214 if (!TYPE_P (TREE_VALUE (l)))
14216 error ("Wrong entry in TYPE_ARG_TYPES list");
14218 error_found = true;
14221 else if (!is_lang_specific (t) && TYPE_VALUES_RAW (t))
14223 error ("TYPE_VALUES_RAW field is non-NULL");
14224 debug_tree (TYPE_VALUES_RAW (t));
14225 error_found = true;
14227 if (TREE_CODE (t) != INTEGER_TYPE
14228 && TREE_CODE (t) != BOOLEAN_TYPE
14229 && TREE_CODE (t) != OFFSET_TYPE
14230 && TREE_CODE (t) != REFERENCE_TYPE
14231 && TREE_CODE (t) != NULLPTR_TYPE
14232 && TREE_CODE (t) != POINTER_TYPE
14233 && TYPE_CACHED_VALUES_P (t))
14235 error ("TYPE_CACHED_VALUES_P is set while it should not");
14236 error_found = true;
14238 if (TYPE_STRING_FLAG (t)
14239 && TREE_CODE (t) != ARRAY_TYPE && TREE_CODE (t) != INTEGER_TYPE)
14241 error ("TYPE_STRING_FLAG is set on wrong type code");
14242 error_found = true;
14245 /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
14246 TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
14248 if (TREE_CODE (t) == METHOD_TYPE
14249 && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t)) != TYPE_METHOD_BASETYPE (t))
14251 error ("TYPE_METHOD_BASETYPE is not main variant");
14252 error_found = true;
14257 debug_tree (const_cast <tree> (t));
14258 internal_error ("verify_type failed");
14263 /* Return 1 if ARG interpreted as signed in its precision is known to be
14264 always positive or 2 if ARG is known to be always negative, or 3 if
14265 ARG may be positive or negative. */
14268 get_range_pos_neg (tree arg)
14270 if (arg == error_mark_node)
14273 int prec = TYPE_PRECISION (TREE_TYPE (arg));
14275 if (TREE_CODE (arg) == INTEGER_CST)
14277 wide_int w = wi::sext (arg, prec);
14283 while (CONVERT_EXPR_P (arg)
14284 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
14285 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg, 0))) <= prec)
14287 arg = TREE_OPERAND (arg, 0);
14288 /* Narrower value zero extended into wider type
14289 will always result in positive values. */
14290 if (TYPE_UNSIGNED (TREE_TYPE (arg))
14291 && TYPE_PRECISION (TREE_TYPE (arg)) < prec)
14293 prec = TYPE_PRECISION (TREE_TYPE (arg));
14298 if (TREE_CODE (arg) != SSA_NAME)
14300 wide_int arg_min, arg_max;
14301 while (get_range_info (arg, &arg_min, &arg_max) != VR_RANGE)
14303 gimple *g = SSA_NAME_DEF_STMT (arg);
14304 if (is_gimple_assign (g)
14305 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (g)))
14307 tree t = gimple_assign_rhs1 (g);
14308 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
14309 && TYPE_PRECISION (TREE_TYPE (t)) <= prec)
14311 if (TYPE_UNSIGNED (TREE_TYPE (t))
14312 && TYPE_PRECISION (TREE_TYPE (t)) < prec)
14314 prec = TYPE_PRECISION (TREE_TYPE (t));
14323 if (TYPE_UNSIGNED (TREE_TYPE (arg)))
14325 /* For unsigned values, the "positive" range comes
14326 below the "negative" range. */
14327 if (!wi::neg_p (wi::sext (arg_max, prec), SIGNED))
14329 if (wi::neg_p (wi::sext (arg_min, prec), SIGNED))
14334 if (!wi::neg_p (wi::sext (arg_min, prec), SIGNED))
14336 if (wi::neg_p (wi::sext (arg_max, prec), SIGNED))
14345 /* Return true if ARG is marked with the nonnull attribute in the
14346 current function signature. */
14349 nonnull_arg_p (const_tree arg)
14351 tree t, attrs, fntype;
14352 unsigned HOST_WIDE_INT arg_num;
14354 gcc_assert (TREE_CODE (arg) == PARM_DECL
14355 && (POINTER_TYPE_P (TREE_TYPE (arg))
14356 || TREE_CODE (TREE_TYPE (arg)) == OFFSET_TYPE));
14358 /* The static chain decl is always non null. */
14359 if (arg == cfun->static_chain_decl)
14362 /* THIS argument of method is always non-NULL. */
14363 if (TREE_CODE (TREE_TYPE (cfun->decl)) == METHOD_TYPE
14364 && arg == DECL_ARGUMENTS (cfun->decl)
14365 && flag_delete_null_pointer_checks)
14368 /* Values passed by reference are always non-NULL. */
14369 if (TREE_CODE (TREE_TYPE (arg)) == REFERENCE_TYPE
14370 && flag_delete_null_pointer_checks)
14373 fntype = TREE_TYPE (cfun->decl);
14374 for (attrs = TYPE_ATTRIBUTES (fntype); attrs; attrs = TREE_CHAIN (attrs))
14376 attrs = lookup_attribute ("nonnull", attrs);
14378 /* If "nonnull" wasn't specified, we know nothing about the argument. */
14379 if (attrs == NULL_TREE)
14382 /* If "nonnull" applies to all the arguments, then ARG is non-null. */
14383 if (TREE_VALUE (attrs) == NULL_TREE)
14386 /* Get the position number for ARG in the function signature. */
14387 for (arg_num = 1, t = DECL_ARGUMENTS (cfun->decl);
14389 t = DECL_CHAIN (t), arg_num++)
14395 gcc_assert (t == arg);
14397 /* Now see if ARG_NUM is mentioned in the nonnull list. */
14398 for (t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
14400 if (compare_tree_int (TREE_VALUE (t), arg_num) == 0)
14408 /* Combine LOC and BLOCK to a combined adhoc loc, retaining any range
14412 set_block (location_t loc, tree block)
14414 location_t pure_loc = get_pure_location (loc);
14415 source_range src_range = get_range_from_loc (line_table, loc);
14416 return COMBINE_LOCATION_DATA (line_table, pure_loc, src_range, block);
14420 set_source_range (tree expr, location_t start, location_t finish)
14422 source_range src_range;
14423 src_range.m_start = start;
14424 src_range.m_finish = finish;
14425 return set_source_range (expr, src_range);
14429 set_source_range (tree expr, source_range src_range)
14431 if (!EXPR_P (expr))
14432 return UNKNOWN_LOCATION;
14434 location_t pure_loc = get_pure_location (EXPR_LOCATION (expr));
14435 location_t adhoc = COMBINE_LOCATION_DATA (line_table,
14439 SET_EXPR_LOCATION (expr, adhoc);
14443 /* Return the name of combined function FN, for debugging purposes. */
14446 combined_fn_name (combined_fn fn)
14448 if (builtin_fn_p (fn))
14450 tree fndecl = builtin_decl_explicit (as_builtin_fn (fn));
14451 return IDENTIFIER_POINTER (DECL_NAME (fndecl));
14454 return internal_fn_name (as_internal_fn (fn));
14457 /* Return a bitmap with a bit set corresponding to each argument in
14458 a function call type FNTYPE declared with attribute nonnull,
14459 or null if none of the function's argument are nonnull. The caller
14460 must free the bitmap. */
14463 get_nonnull_args (const_tree fntype)
14465 if (fntype == NULL_TREE)
14468 tree attrs = TYPE_ATTRIBUTES (fntype);
14472 bitmap argmap = NULL;
14474 /* A function declaration can specify multiple attribute nonnull,
14475 each with zero or more arguments. The loop below creates a bitmap
14476 representing a union of all the arguments. An empty (but non-null)
14477 bitmap means that all arguments have been declaraed nonnull. */
14478 for ( ; attrs; attrs = TREE_CHAIN (attrs))
14480 attrs = lookup_attribute ("nonnull", attrs);
14485 argmap = BITMAP_ALLOC (NULL);
14487 if (!TREE_VALUE (attrs))
14489 /* Clear the bitmap in case a previous attribute nonnull
14490 set it and this one overrides it for all arguments. */
14491 bitmap_clear (argmap);
14495 /* Iterate over the indices of the format arguments declared nonnull
14496 and set a bit for each. */
14497 for (tree idx = TREE_VALUE (attrs); idx; idx = TREE_CHAIN (idx))
14499 unsigned int val = TREE_INT_CST_LOW (TREE_VALUE (idx)) - 1;
14500 bitmap_set_bit (argmap, val);
14507 /* List of pointer types used to declare builtins before we have seen their
14510 Keep the size up to date in tree.h ! */
14511 const builtin_structptr_type builtin_structptr_types[6] =
14513 { fileptr_type_node, ptr_type_node, "FILE" },
14514 { const_tm_ptr_type_node, const_ptr_type_node, "tm" },
14515 { fenv_t_ptr_type_node, ptr_type_node, "fenv_t" },
14516 { const_fenv_t_ptr_type_node, const_ptr_type_node, "fenv_t" },
14517 { fexcept_t_ptr_type_node, ptr_type_node, "fexcept_t" },
14518 { const_fexcept_t_ptr_type_node, const_ptr_type_node, "fexcept_t" }
14523 namespace selftest {
14525 /* Selftests for tree. */
14527 /* Verify that integer constants are sane. */
14530 test_integer_constants ()
14532 ASSERT_TRUE (integer_type_node != NULL);
14533 ASSERT_TRUE (build_int_cst (integer_type_node, 0) != NULL);
14535 tree type = integer_type_node;
14537 tree zero = build_zero_cst (type);
14538 ASSERT_EQ (INTEGER_CST, TREE_CODE (zero));
14539 ASSERT_EQ (type, TREE_TYPE (zero));
14541 tree one = build_int_cst (type, 1);
14542 ASSERT_EQ (INTEGER_CST, TREE_CODE (one));
14543 ASSERT_EQ (type, TREE_TYPE (zero));
14546 /* Verify identifiers. */
14549 test_identifiers ()
14551 tree identifier = get_identifier ("foo");
14552 ASSERT_EQ (3, IDENTIFIER_LENGTH (identifier));
14553 ASSERT_STREQ ("foo", IDENTIFIER_POINTER (identifier));
14556 /* Verify LABEL_DECL. */
14561 tree identifier = get_identifier ("err");
14562 tree label_decl = build_decl (UNKNOWN_LOCATION, LABEL_DECL,
14563 identifier, void_type_node);
14564 ASSERT_EQ (-1, LABEL_DECL_UID (label_decl));
14565 ASSERT_FALSE (FORCED_LABEL (label_decl));
14568 /* Run all of the selftests within this file. */
14573 test_integer_constants ();
14574 test_identifiers ();
14578 } // namespace selftest
14580 #endif /* CHECKING_P */
14582 #include "gt-tree.h"