1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2016 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
27 It is intended to be language-independent but can occasionally
28 calls language-dependent routines. */
32 #include "coretypes.h"
37 #include "tree-pass.h"
40 #include "diagnostic.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
47 #include "toplev.h" /* get_random_seed */
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "internal-fn.h"
54 #include "gimple-iterator.h"
58 #include "langhooks-def.h"
59 #include "tree-diagnostic.h"
62 #include "print-tree.h"
63 #include "ipa-utils.h"
65 /* Tree code classes. */
67 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
68 #define END_OF_BASE_TREE_CODES tcc_exceptional,
70 const enum tree_code_class tree_code_type[] = {
71 #include "all-tree.def"
75 #undef END_OF_BASE_TREE_CODES
77 /* Table indexed by tree code giving number of expression
78 operands beyond the fixed part of the node structure.
79 Not used for types or decls. */
81 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
82 #define END_OF_BASE_TREE_CODES 0,
84 const unsigned char tree_code_length[] = {
85 #include "all-tree.def"
89 #undef END_OF_BASE_TREE_CODES
91 /* Names of tree components.
92 Used for printing out the tree and error messages. */
93 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
94 #define END_OF_BASE_TREE_CODES "@dummy",
96 static const char *const tree_code_name[] = {
97 #include "all-tree.def"
101 #undef END_OF_BASE_TREE_CODES
103 /* Each tree code class has an associated string representation.
104 These must correspond to the tree_code_class entries. */
106 const char *const tree_code_class_strings[] =
121 /* obstack.[ch] explicitly declined to prototype this. */
122 extern int _obstack_allocated_p (struct obstack *h, void *obj);
124 /* Statistics-gathering stuff. */
126 static int tree_code_counts[MAX_TREE_CODES];
127 int tree_node_counts[(int) all_kinds];
128 int tree_node_sizes[(int) all_kinds];
130 /* Keep in sync with tree.h:enum tree_node_kind. */
131 static const char * const tree_node_kind_names[] = {
150 /* Unique id for next decl created. */
151 static GTY(()) int next_decl_uid;
152 /* Unique id for next type created. */
153 static GTY(()) int next_type_uid = 1;
154 /* Unique id for next debug decl created. Use negative numbers,
155 to catch erroneous uses. */
156 static GTY(()) int next_debug_decl_uid;
158 /* Since we cannot rehash a type after it is in the table, we have to
159 keep the hash code. */
161 struct GTY((for_user)) type_hash {
166 /* Initial size of the hash table (rounded to next prime). */
167 #define TYPE_HASH_INITIAL_SIZE 1000
169 struct type_cache_hasher : ggc_cache_ptr_hash<type_hash>
171 static hashval_t hash (type_hash *t) { return t->hash; }
172 static bool equal (type_hash *a, type_hash *b);
175 keep_cache_entry (type_hash *&t)
177 return ggc_marked_p (t->type);
181 /* Now here is the hash table. When recording a type, it is added to
182 the slot whose index is the hash code. Note that the hash table is
183 used for several kinds of types (function types, array types and
184 array index range types, for now). While all these live in the
185 same table, they are completely independent, and the hash code is
186 computed differently for each of these. */
188 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
190 /* Hash table and temporary node for larger integer const values. */
191 static GTY (()) tree int_cst_node;
193 struct int_cst_hasher : ggc_cache_ptr_hash<tree_node>
195 static hashval_t hash (tree t);
196 static bool equal (tree x, tree y);
199 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
201 /* Hash table for optimization flags and target option flags. Use the same
202 hash table for both sets of options. Nodes for building the current
203 optimization and target option nodes. The assumption is most of the time
204 the options created will already be in the hash table, so we avoid
205 allocating and freeing up a node repeatably. */
206 static GTY (()) tree cl_optimization_node;
207 static GTY (()) tree cl_target_option_node;
209 struct cl_option_hasher : ggc_cache_ptr_hash<tree_node>
211 static hashval_t hash (tree t);
212 static bool equal (tree x, tree y);
215 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
217 /* General tree->tree mapping structure for use in hash tables. */
221 hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
224 hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
226 struct tree_vec_map_cache_hasher : ggc_cache_ptr_hash<tree_vec_map>
228 static hashval_t hash (tree_vec_map *m) { return DECL_UID (m->base.from); }
231 equal (tree_vec_map *a, tree_vec_map *b)
233 return a->base.from == b->base.from;
237 keep_cache_entry (tree_vec_map *&m)
239 return ggc_marked_p (m->base.from);
244 hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
246 static void set_type_quals (tree, int);
247 static void print_type_hash_statistics (void);
248 static void print_debug_expr_statistics (void);
249 static void print_value_expr_statistics (void);
250 static void type_hash_list (const_tree, inchash::hash &);
251 static void attribute_hash_list (const_tree, inchash::hash &);
253 tree global_trees[TI_MAX];
254 tree integer_types[itk_none];
256 bool int_n_enabled_p[NUM_INT_N_ENTS];
257 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
259 unsigned char tree_contains_struct[MAX_TREE_CODES][64];
261 /* Number of operands for each OpenMP clause. */
262 unsigned const char omp_clause_num_ops[] =
264 0, /* OMP_CLAUSE_ERROR */
265 1, /* OMP_CLAUSE_PRIVATE */
266 1, /* OMP_CLAUSE_SHARED */
267 1, /* OMP_CLAUSE_FIRSTPRIVATE */
268 2, /* OMP_CLAUSE_LASTPRIVATE */
269 5, /* OMP_CLAUSE_REDUCTION */
270 1, /* OMP_CLAUSE_COPYIN */
271 1, /* OMP_CLAUSE_COPYPRIVATE */
272 3, /* OMP_CLAUSE_LINEAR */
273 2, /* OMP_CLAUSE_ALIGNED */
274 1, /* OMP_CLAUSE_DEPEND */
275 1, /* OMP_CLAUSE_UNIFORM */
276 1, /* OMP_CLAUSE_TO_DECLARE */
277 1, /* OMP_CLAUSE_LINK */
278 2, /* OMP_CLAUSE_FROM */
279 2, /* OMP_CLAUSE_TO */
280 2, /* OMP_CLAUSE_MAP */
281 1, /* OMP_CLAUSE_USE_DEVICE_PTR */
282 1, /* OMP_CLAUSE_IS_DEVICE_PTR */
283 2, /* OMP_CLAUSE__CACHE_ */
284 1, /* OMP_CLAUSE_DEVICE_RESIDENT */
285 2, /* OMP_CLAUSE_GANG */
286 1, /* OMP_CLAUSE_ASYNC */
287 1, /* OMP_CLAUSE_WAIT */
288 0, /* OMP_CLAUSE_AUTO */
289 0, /* OMP_CLAUSE_SEQ */
290 1, /* OMP_CLAUSE__LOOPTEMP_ */
291 1, /* OMP_CLAUSE_IF */
292 1, /* OMP_CLAUSE_NUM_THREADS */
293 1, /* OMP_CLAUSE_SCHEDULE */
294 0, /* OMP_CLAUSE_NOWAIT */
295 1, /* OMP_CLAUSE_ORDERED */
296 0, /* OMP_CLAUSE_DEFAULT */
297 3, /* OMP_CLAUSE_COLLAPSE */
298 0, /* OMP_CLAUSE_UNTIED */
299 1, /* OMP_CLAUSE_FINAL */
300 0, /* OMP_CLAUSE_MERGEABLE */
301 1, /* OMP_CLAUSE_DEVICE */
302 1, /* OMP_CLAUSE_DIST_SCHEDULE */
303 0, /* OMP_CLAUSE_INBRANCH */
304 0, /* OMP_CLAUSE_NOTINBRANCH */
305 1, /* OMP_CLAUSE_NUM_TEAMS */
306 1, /* OMP_CLAUSE_THREAD_LIMIT */
307 0, /* OMP_CLAUSE_PROC_BIND */
308 1, /* OMP_CLAUSE_SAFELEN */
309 1, /* OMP_CLAUSE_SIMDLEN */
310 0, /* OMP_CLAUSE_FOR */
311 0, /* OMP_CLAUSE_PARALLEL */
312 0, /* OMP_CLAUSE_SECTIONS */
313 0, /* OMP_CLAUSE_TASKGROUP */
314 1, /* OMP_CLAUSE_PRIORITY */
315 1, /* OMP_CLAUSE_GRAINSIZE */
316 1, /* OMP_CLAUSE_NUM_TASKS */
317 0, /* OMP_CLAUSE_NOGROUP */
318 0, /* OMP_CLAUSE_THREADS */
319 0, /* OMP_CLAUSE_SIMD */
320 1, /* OMP_CLAUSE_HINT */
321 0, /* OMP_CLAUSE_DEFALTMAP */
322 1, /* OMP_CLAUSE__SIMDUID_ */
323 1, /* OMP_CLAUSE__CILK_FOR_COUNT_ */
324 0, /* OMP_CLAUSE_INDEPENDENT */
325 1, /* OMP_CLAUSE_WORKER */
326 1, /* OMP_CLAUSE_VECTOR */
327 1, /* OMP_CLAUSE_NUM_GANGS */
328 1, /* OMP_CLAUSE_NUM_WORKERS */
329 1, /* OMP_CLAUSE_VECTOR_LENGTH */
330 1, /* OMP_CLAUSE_TILE */
331 2, /* OMP_CLAUSE__GRIDDIM_ */
334 const char * const omp_clause_code_name[] =
407 /* Return the tree node structure used by tree code CODE. */
409 static inline enum tree_node_structure_enum
410 tree_node_structure_for_code (enum tree_code code)
412 switch (TREE_CODE_CLASS (code))
414 case tcc_declaration:
419 return TS_FIELD_DECL;
425 return TS_LABEL_DECL;
427 return TS_RESULT_DECL;
428 case DEBUG_EXPR_DECL:
431 return TS_CONST_DECL;
435 return TS_FUNCTION_DECL;
436 case TRANSLATION_UNIT_DECL:
437 return TS_TRANSLATION_UNIT_DECL;
439 return TS_DECL_NON_COMMON;
443 return TS_TYPE_NON_COMMON;
452 default: /* tcc_constant and tcc_exceptional */
457 /* tcc_constant cases. */
458 case VOID_CST: return TS_TYPED;
459 case INTEGER_CST: return TS_INT_CST;
460 case REAL_CST: return TS_REAL_CST;
461 case FIXED_CST: return TS_FIXED_CST;
462 case COMPLEX_CST: return TS_COMPLEX;
463 case VECTOR_CST: return TS_VECTOR;
464 case STRING_CST: return TS_STRING;
465 /* tcc_exceptional cases. */
466 case ERROR_MARK: return TS_COMMON;
467 case IDENTIFIER_NODE: return TS_IDENTIFIER;
468 case TREE_LIST: return TS_LIST;
469 case TREE_VEC: return TS_VEC;
470 case SSA_NAME: return TS_SSA_NAME;
471 case PLACEHOLDER_EXPR: return TS_COMMON;
472 case STATEMENT_LIST: return TS_STATEMENT_LIST;
473 case STATEMENT_LIST_END: return TS_COMMON;
474 case BLOCK: return TS_BLOCK;
475 case CONSTRUCTOR: return TS_CONSTRUCTOR;
476 case TREE_BINFO: return TS_BINFO;
477 case OMP_CLAUSE: return TS_OMP_CLAUSE;
478 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
479 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
487 /* Initialize tree_contains_struct to describe the hierarchy of tree
491 initialize_tree_contains_struct (void)
495 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
498 enum tree_node_structure_enum ts_code;
500 code = (enum tree_code) i;
501 ts_code = tree_node_structure_for_code (code);
503 /* Mark the TS structure itself. */
504 tree_contains_struct[code][ts_code] = 1;
506 /* Mark all the structures that TS is derived from. */
524 case TS_STATEMENT_LIST:
525 MARK_TS_TYPED (code);
529 case TS_DECL_MINIMAL:
535 case TS_OPTIMIZATION:
536 case TS_TARGET_OPTION:
537 MARK_TS_COMMON (code);
540 case TS_TYPE_WITH_LANG_SPECIFIC:
541 MARK_TS_TYPE_COMMON (code);
544 case TS_TYPE_NON_COMMON:
545 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
549 MARK_TS_DECL_MINIMAL (code);
554 MARK_TS_DECL_COMMON (code);
557 case TS_DECL_NON_COMMON:
558 MARK_TS_DECL_WITH_VIS (code);
561 case TS_DECL_WITH_VIS:
565 MARK_TS_DECL_WRTL (code);
569 MARK_TS_DECL_COMMON (code);
573 MARK_TS_DECL_WITH_VIS (code);
577 case TS_FUNCTION_DECL:
578 MARK_TS_DECL_NON_COMMON (code);
581 case TS_TRANSLATION_UNIT_DECL:
582 MARK_TS_DECL_COMMON (code);
590 /* Basic consistency checks for attributes used in fold. */
591 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
592 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
593 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
594 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
595 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
596 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
597 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
598 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
599 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
600 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
601 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
602 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
603 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
604 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
605 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
606 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
607 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
608 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
609 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
610 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
611 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
612 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
613 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
614 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
615 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
616 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
617 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
618 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
619 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
620 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
621 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
622 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
623 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
624 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
625 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
626 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
627 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
628 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
629 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
630 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
639 /* Initialize the hash table of types. */
641 = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
644 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
647 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
649 int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
651 int_cst_node = make_int_cst (1, 1);
653 cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
655 cl_optimization_node = make_node (OPTIMIZATION_NODE);
656 cl_target_option_node = make_node (TARGET_OPTION_NODE);
658 /* Initialize the tree_contains_struct array. */
659 initialize_tree_contains_struct ();
660 lang_hooks.init_ts ();
664 /* The name of the object as the assembler will see it (but before any
665 translations made by ASM_OUTPUT_LABELREF). Often this is the same
666 as DECL_NAME. It is an IDENTIFIER_NODE. */
668 decl_assembler_name (tree decl)
670 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
671 lang_hooks.set_decl_assembler_name (decl);
672 return DECL_WITH_VIS_CHECK (decl)->decl_with_vis.assembler_name;
675 /* When the target supports COMDAT groups, this indicates which group the
676 DECL is associated with. This can be either an IDENTIFIER_NODE or a
677 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
679 decl_comdat_group (const_tree node)
681 struct symtab_node *snode = symtab_node::get (node);
684 return snode->get_comdat_group ();
687 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
689 decl_comdat_group_id (const_tree node)
691 struct symtab_node *snode = symtab_node::get (node);
694 return snode->get_comdat_group_id ();
697 /* When the target supports named section, return its name as IDENTIFIER_NODE
698 or NULL if it is in no section. */
700 decl_section_name (const_tree node)
702 struct symtab_node *snode = symtab_node::get (node);
705 return snode->get_section ();
708 /* Set section name of NODE to VALUE (that is expected to be
711 set_decl_section_name (tree node, const char *value)
713 struct symtab_node *snode;
717 snode = symtab_node::get (node);
721 else if (TREE_CODE (node) == VAR_DECL)
722 snode = varpool_node::get_create (node);
724 snode = cgraph_node::get_create (node);
725 snode->set_section (value);
728 /* Return TLS model of a variable NODE. */
730 decl_tls_model (const_tree node)
732 struct varpool_node *snode = varpool_node::get (node);
734 return TLS_MODEL_NONE;
735 return snode->tls_model;
738 /* Set TLS model of variable NODE to MODEL. */
740 set_decl_tls_model (tree node, enum tls_model model)
742 struct varpool_node *vnode;
744 if (model == TLS_MODEL_NONE)
746 vnode = varpool_node::get (node);
751 vnode = varpool_node::get_create (node);
752 vnode->tls_model = model;
755 /* Compute the number of bytes occupied by a tree with code CODE.
756 This function cannot be used for nodes that have variable sizes,
757 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
759 tree_code_size (enum tree_code code)
761 switch (TREE_CODE_CLASS (code))
763 case tcc_declaration: /* A decl node */
768 return sizeof (struct tree_field_decl);
770 return sizeof (struct tree_parm_decl);
772 return sizeof (struct tree_var_decl);
774 return sizeof (struct tree_label_decl);
776 return sizeof (struct tree_result_decl);
778 return sizeof (struct tree_const_decl);
780 return sizeof (struct tree_type_decl);
782 return sizeof (struct tree_function_decl);
783 case DEBUG_EXPR_DECL:
784 return sizeof (struct tree_decl_with_rtl);
785 case TRANSLATION_UNIT_DECL:
786 return sizeof (struct tree_translation_unit_decl);
790 return sizeof (struct tree_decl_non_common);
792 return lang_hooks.tree_size (code);
796 case tcc_type: /* a type node */
797 return sizeof (struct tree_type_non_common);
799 case tcc_reference: /* a reference */
800 case tcc_expression: /* an expression */
801 case tcc_statement: /* an expression with side effects */
802 case tcc_comparison: /* a comparison expression */
803 case tcc_unary: /* a unary arithmetic expression */
804 case tcc_binary: /* a binary arithmetic expression */
805 return (sizeof (struct tree_exp)
806 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
808 case tcc_constant: /* a constant */
811 case VOID_CST: return sizeof (struct tree_typed);
812 case INTEGER_CST: gcc_unreachable ();
813 case REAL_CST: return sizeof (struct tree_real_cst);
814 case FIXED_CST: return sizeof (struct tree_fixed_cst);
815 case COMPLEX_CST: return sizeof (struct tree_complex);
816 case VECTOR_CST: return sizeof (struct tree_vector);
817 case STRING_CST: gcc_unreachable ();
819 return lang_hooks.tree_size (code);
822 case tcc_exceptional: /* something random, like an identifier. */
825 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
826 case TREE_LIST: return sizeof (struct tree_list);
829 case STATEMENT_LIST_END:
830 case PLACEHOLDER_EXPR: return sizeof (struct tree_common);
833 case OMP_CLAUSE: gcc_unreachable ();
835 case SSA_NAME: return sizeof (struct tree_ssa_name);
837 case STATEMENT_LIST: return sizeof (struct tree_statement_list);
838 case BLOCK: return sizeof (struct tree_block);
839 case CONSTRUCTOR: return sizeof (struct tree_constructor);
840 case OPTIMIZATION_NODE: return sizeof (struct tree_optimization_option);
841 case TARGET_OPTION_NODE: return sizeof (struct tree_target_option);
844 return lang_hooks.tree_size (code);
852 /* Compute the number of bytes occupied by NODE. This routine only
853 looks at TREE_CODE, except for those nodes that have variable sizes. */
855 tree_size (const_tree node)
857 const enum tree_code code = TREE_CODE (node);
861 return (sizeof (struct tree_int_cst)
862 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
865 return (offsetof (struct tree_binfo, base_binfos)
867 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
870 return (sizeof (struct tree_vec)
871 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
874 return (sizeof (struct tree_vector)
875 + (TYPE_VECTOR_SUBPARTS (TREE_TYPE (node)) - 1) * sizeof (tree));
878 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
881 return (sizeof (struct tree_omp_clause)
882 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
886 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
887 return (sizeof (struct tree_exp)
888 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
890 return tree_code_size (code);
894 /* Record interesting allocation statistics for a tree node with CODE
898 record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED,
899 size_t length ATTRIBUTE_UNUSED)
901 enum tree_code_class type = TREE_CODE_CLASS (code);
904 if (!GATHER_STATISTICS)
909 case tcc_declaration: /* A decl node */
913 case tcc_type: /* a type node */
917 case tcc_statement: /* an expression with side effects */
921 case tcc_reference: /* a reference */
925 case tcc_expression: /* an expression */
926 case tcc_comparison: /* a comparison expression */
927 case tcc_unary: /* a unary arithmetic expression */
928 case tcc_binary: /* a binary arithmetic expression */
932 case tcc_constant: /* a constant */
936 case tcc_exceptional: /* something random, like an identifier. */
939 case IDENTIFIER_NODE:
952 kind = ssa_name_kind;
964 kind = omp_clause_kind;
981 tree_code_counts[(int) code]++;
982 tree_node_counts[(int) kind]++;
983 tree_node_sizes[(int) kind] += length;
986 /* Allocate and return a new UID from the DECL_UID namespace. */
989 allocate_decl_uid (void)
991 return next_decl_uid++;
994 /* Return a newly allocated node of code CODE. For decl and type
995 nodes, some other fields are initialized. The rest of the node is
996 initialized to zero. This function cannot be used for TREE_VEC,
997 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
1000 Achoo! I got a code in the node. */
1003 make_node_stat (enum tree_code code MEM_STAT_DECL)
1006 enum tree_code_class type = TREE_CODE_CLASS (code);
1007 size_t length = tree_code_size (code);
1009 record_node_allocation_statistics (code, length);
1011 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1012 TREE_SET_CODE (t, code);
1017 TREE_SIDE_EFFECTS (t) = 1;
1020 case tcc_declaration:
1021 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1023 if (code == FUNCTION_DECL)
1025 SET_DECL_ALIGN (t, FUNCTION_BOUNDARY);
1026 DECL_MODE (t) = FUNCTION_MODE;
1029 SET_DECL_ALIGN (t, 1);
1031 DECL_SOURCE_LOCATION (t) = input_location;
1032 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
1033 DECL_UID (t) = --next_debug_decl_uid;
1036 DECL_UID (t) = allocate_decl_uid ();
1037 SET_DECL_PT_UID (t, -1);
1039 if (TREE_CODE (t) == LABEL_DECL)
1040 LABEL_DECL_UID (t) = -1;
1045 TYPE_UID (t) = next_type_uid++;
1046 SET_TYPE_ALIGN (t, BITS_PER_UNIT);
1047 TYPE_USER_ALIGN (t) = 0;
1048 TYPE_MAIN_VARIANT (t) = t;
1049 TYPE_CANONICAL (t) = t;
1051 /* Default to no attributes for type, but let target change that. */
1052 TYPE_ATTRIBUTES (t) = NULL_TREE;
1053 targetm.set_default_type_attributes (t);
1055 /* We have not yet computed the alias set for this type. */
1056 TYPE_ALIAS_SET (t) = -1;
1060 TREE_CONSTANT (t) = 1;
1063 case tcc_expression:
1069 case PREDECREMENT_EXPR:
1070 case PREINCREMENT_EXPR:
1071 case POSTDECREMENT_EXPR:
1072 case POSTINCREMENT_EXPR:
1073 /* All of these have side-effects, no matter what their
1075 TREE_SIDE_EFFECTS (t) = 1;
1083 case tcc_exceptional:
1086 case TARGET_OPTION_NODE:
1087 TREE_TARGET_OPTION(t)
1088 = ggc_cleared_alloc<struct cl_target_option> ();
1091 case OPTIMIZATION_NODE:
1092 TREE_OPTIMIZATION (t)
1093 = ggc_cleared_alloc<struct cl_optimization> ();
1102 /* Other classes need no special treatment. */
1109 /* Free tree node. */
1112 free_node (tree node)
1114 enum tree_code code = TREE_CODE (node);
1115 if (GATHER_STATISTICS)
1117 tree_code_counts[(int) TREE_CODE (node)]--;
1118 tree_node_counts[(int) t_kind]--;
1119 tree_node_sizes[(int) t_kind] -= tree_size (node);
1121 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1122 vec_free (CONSTRUCTOR_ELTS (node));
1123 else if (code == BLOCK)
1124 vec_free (BLOCK_NONLOCALIZED_VARS (node));
1125 else if (code == TREE_BINFO)
1126 vec_free (BINFO_BASE_ACCESSES (node));
1130 /* Return a new node with the same contents as NODE except that its
1131 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1134 copy_node_stat (tree node MEM_STAT_DECL)
1137 enum tree_code code = TREE_CODE (node);
1140 gcc_assert (code != STATEMENT_LIST);
1142 length = tree_size (node);
1143 record_node_allocation_statistics (code, length);
1144 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1145 memcpy (t, node, length);
1147 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1149 TREE_ASM_WRITTEN (t) = 0;
1150 TREE_VISITED (t) = 0;
1152 if (TREE_CODE_CLASS (code) == tcc_declaration)
1154 if (code == DEBUG_EXPR_DECL)
1155 DECL_UID (t) = --next_debug_decl_uid;
1158 DECL_UID (t) = allocate_decl_uid ();
1159 if (DECL_PT_UID_SET_P (node))
1160 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1162 if ((TREE_CODE (node) == PARM_DECL || TREE_CODE (node) == VAR_DECL)
1163 && DECL_HAS_VALUE_EXPR_P (node))
1165 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1166 DECL_HAS_VALUE_EXPR_P (t) = 1;
1168 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1169 if (TREE_CODE (node) == VAR_DECL)
1171 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1172 t->decl_with_vis.symtab_node = NULL;
1174 if (TREE_CODE (node) == VAR_DECL && DECL_HAS_INIT_PRIORITY_P (node))
1176 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1177 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1179 if (TREE_CODE (node) == FUNCTION_DECL)
1181 DECL_STRUCT_FUNCTION (t) = NULL;
1182 t->decl_with_vis.symtab_node = NULL;
1185 else if (TREE_CODE_CLASS (code) == tcc_type)
1187 TYPE_UID (t) = next_type_uid++;
1188 /* The following is so that the debug code for
1189 the copy is different from the original type.
1190 The two statements usually duplicate each other
1191 (because they clear fields of the same union),
1192 but the optimizer should catch that. */
1193 TYPE_SYMTAB_POINTER (t) = 0;
1194 TYPE_SYMTAB_ADDRESS (t) = 0;
1196 /* Do not copy the values cache. */
1197 if (TYPE_CACHED_VALUES_P (t))
1199 TYPE_CACHED_VALUES_P (t) = 0;
1200 TYPE_CACHED_VALUES (t) = NULL_TREE;
1203 else if (code == TARGET_OPTION_NODE)
1205 TREE_TARGET_OPTION (t) = ggc_alloc<struct cl_target_option>();
1206 memcpy (TREE_TARGET_OPTION (t), TREE_TARGET_OPTION (node),
1207 sizeof (struct cl_target_option));
1209 else if (code == OPTIMIZATION_NODE)
1211 TREE_OPTIMIZATION (t) = ggc_alloc<struct cl_optimization>();
1212 memcpy (TREE_OPTIMIZATION (t), TREE_OPTIMIZATION (node),
1213 sizeof (struct cl_optimization));
1219 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1220 For example, this can copy a list made of TREE_LIST nodes. */
1223 copy_list (tree list)
1231 head = prev = copy_node (list);
1232 next = TREE_CHAIN (list);
1235 TREE_CHAIN (prev) = copy_node (next);
1236 prev = TREE_CHAIN (prev);
1237 next = TREE_CHAIN (next);
1243 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1244 INTEGER_CST with value CST and type TYPE. */
1247 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1249 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1250 /* We need extra HWIs if CST is an unsigned integer with its
1252 if (TYPE_UNSIGNED (type) && wi::neg_p (cst))
1253 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1254 return cst.get_len ();
1257 /* Return a new INTEGER_CST with value CST and type TYPE. */
1260 build_new_int_cst (tree type, const wide_int &cst)
1262 unsigned int len = cst.get_len ();
1263 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1264 tree nt = make_int_cst (len, ext_len);
1269 TREE_INT_CST_ELT (nt, ext_len)
1270 = zext_hwi (-1, cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1271 for (unsigned int i = len; i < ext_len; ++i)
1272 TREE_INT_CST_ELT (nt, i) = -1;
1274 else if (TYPE_UNSIGNED (type)
1275 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1278 TREE_INT_CST_ELT (nt, len)
1279 = zext_hwi (cst.elt (len),
1280 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1283 for (unsigned int i = 0; i < len; i++)
1284 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1285 TREE_TYPE (nt) = type;
1289 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1292 build_int_cst (tree type, HOST_WIDE_INT low)
1294 /* Support legacy code. */
1296 type = integer_type_node;
1298 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1302 build_int_cstu (tree type, unsigned HOST_WIDE_INT cst)
1304 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1307 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1310 build_int_cst_type (tree type, HOST_WIDE_INT low)
1313 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1316 /* Constructs tree in type TYPE from with value given by CST. Signedness
1317 of CST is assumed to be the same as the signedness of TYPE. */
1320 double_int_to_tree (tree type, double_int cst)
1322 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1325 /* We force the wide_int CST to the range of the type TYPE by sign or
1326 zero extending it. OVERFLOWABLE indicates if we are interested in
1327 overflow of the value, when >0 we are only interested in signed
1328 overflow, for <0 we are interested in any overflow. OVERFLOWED
1329 indicates whether overflow has already occurred. CONST_OVERFLOWED
1330 indicates whether constant overflow has already occurred. We force
1331 T's value to be within range of T's type (by setting to 0 or 1 all
1332 the bits outside the type's range). We set TREE_OVERFLOWED if,
1333 OVERFLOWED is nonzero,
1334 or OVERFLOWABLE is >0 and signed overflow occurs
1335 or OVERFLOWABLE is <0 and any overflow occurs
1336 We return a new tree node for the extended wide_int. The node
1337 is shared if no overflow flags are set. */
1341 force_fit_type (tree type, const wide_int_ref &cst,
1342 int overflowable, bool overflowed)
1344 signop sign = TYPE_SIGN (type);
1346 /* If we need to set overflow flags, return a new unshared node. */
1347 if (overflowed || !wi::fits_to_tree_p (cst, type))
1351 || (overflowable > 0 && sign == SIGNED))
1353 wide_int tmp = wide_int::from (cst, TYPE_PRECISION (type), sign);
1354 tree t = build_new_int_cst (type, tmp);
1355 TREE_OVERFLOW (t) = 1;
1360 /* Else build a shared node. */
1361 return wide_int_to_tree (type, cst);
1364 /* These are the hash table functions for the hash table of INTEGER_CST
1365 nodes of a sizetype. */
1367 /* Return the hash code X, an INTEGER_CST. */
1370 int_cst_hasher::hash (tree x)
1372 const_tree const t = x;
1373 hashval_t code = TYPE_UID (TREE_TYPE (t));
1376 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1377 code = iterative_hash_host_wide_int (TREE_INT_CST_ELT(t, i), code);
1382 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1383 is the same as that given by *Y, which is the same. */
1386 int_cst_hasher::equal (tree x, tree y)
1388 const_tree const xt = x;
1389 const_tree const yt = y;
1391 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1392 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1393 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1396 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1397 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1403 /* Create an INT_CST node of TYPE and value CST.
1404 The returned node is always shared. For small integers we use a
1405 per-type vector cache, for larger ones we use a single hash table.
1406 The value is extended from its precision according to the sign of
1407 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1408 the upper bits and ensures that hashing and value equality based
1409 upon the underlying HOST_WIDE_INTs works without masking. */
1412 wide_int_to_tree (tree type, const wide_int_ref &pcst)
1419 unsigned int prec = TYPE_PRECISION (type);
1420 signop sgn = TYPE_SIGN (type);
1422 /* Verify that everything is canonical. */
1423 int l = pcst.get_len ();
1426 if (pcst.elt (l - 1) == 0)
1427 gcc_checking_assert (pcst.elt (l - 2) < 0);
1428 if (pcst.elt (l - 1) == (HOST_WIDE_INT) -1)
1429 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1432 wide_int cst = wide_int::from (pcst, prec, sgn);
1433 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1437 /* We just need to store a single HOST_WIDE_INT. */
1439 if (TYPE_UNSIGNED (type))
1440 hwi = cst.to_uhwi ();
1442 hwi = cst.to_shwi ();
1444 switch (TREE_CODE (type))
1447 gcc_assert (hwi == 0);
1451 case REFERENCE_TYPE:
1452 case POINTER_BOUNDS_TYPE:
1453 /* Cache NULL pointer and zero bounds. */
1462 /* Cache false or true. */
1464 if (IN_RANGE (hwi, 0, 1))
1470 if (TYPE_SIGN (type) == UNSIGNED)
1473 limit = INTEGER_SHARE_LIMIT;
1474 if (IN_RANGE (hwi, 0, INTEGER_SHARE_LIMIT - 1))
1479 /* Cache [-1, N). */
1480 limit = INTEGER_SHARE_LIMIT + 1;
1481 if (IN_RANGE (hwi, -1, INTEGER_SHARE_LIMIT - 1))
1495 /* Look for it in the type's vector of small shared ints. */
1496 if (!TYPE_CACHED_VALUES_P (type))
1498 TYPE_CACHED_VALUES_P (type) = 1;
1499 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1502 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1504 /* Make sure no one is clobbering the shared constant. */
1505 gcc_checking_assert (TREE_TYPE (t) == type
1506 && TREE_INT_CST_NUNITS (t) == 1
1507 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1508 && TREE_INT_CST_EXT_NUNITS (t) == 1
1509 && TREE_INT_CST_ELT (t, 0) == hwi);
1512 /* Create a new shared int. */
1513 t = build_new_int_cst (type, cst);
1514 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1519 /* Use the cache of larger shared ints, using int_cst_node as
1522 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1523 TREE_TYPE (int_cst_node) = type;
1525 tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
1529 /* Insert this one into the hash table. */
1532 /* Make a new node for next time round. */
1533 int_cst_node = make_int_cst (1, 1);
1539 /* The value either hashes properly or we drop it on the floor
1540 for the gc to take care of. There will not be enough of them
1543 tree nt = build_new_int_cst (type, cst);
1544 tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
1548 /* Insert this one into the hash table. */
1558 cache_integer_cst (tree t)
1560 tree type = TREE_TYPE (t);
1563 int prec = TYPE_PRECISION (type);
1565 gcc_assert (!TREE_OVERFLOW (t));
1567 switch (TREE_CODE (type))
1570 gcc_assert (integer_zerop (t));
1574 case REFERENCE_TYPE:
1575 /* Cache NULL pointer. */
1576 if (integer_zerop (t))
1584 /* Cache false or true. */
1586 if (wi::ltu_p (t, 2))
1587 ix = TREE_INT_CST_ELT (t, 0);
1592 if (TYPE_UNSIGNED (type))
1595 limit = INTEGER_SHARE_LIMIT;
1597 /* This is a little hokie, but if the prec is smaller than
1598 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1599 obvious test will not get the correct answer. */
1600 if (prec < HOST_BITS_PER_WIDE_INT)
1602 if (tree_to_uhwi (t) < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT)
1603 ix = tree_to_uhwi (t);
1605 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1606 ix = tree_to_uhwi (t);
1611 limit = INTEGER_SHARE_LIMIT + 1;
1613 if (integer_minus_onep (t))
1615 else if (!wi::neg_p (t))
1617 if (prec < HOST_BITS_PER_WIDE_INT)
1619 if (tree_to_shwi (t) < INTEGER_SHARE_LIMIT)
1620 ix = tree_to_shwi (t) + 1;
1622 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1623 ix = tree_to_shwi (t) + 1;
1637 /* Look for it in the type's vector of small shared ints. */
1638 if (!TYPE_CACHED_VALUES_P (type))
1640 TYPE_CACHED_VALUES_P (type) = 1;
1641 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1644 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1645 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1649 /* Use the cache of larger shared ints. */
1650 tree *slot = int_cst_hash_table->find_slot (t, INSERT);
1651 /* If there is already an entry for the number verify it's the
1654 gcc_assert (wi::eq_p (tree (*slot), t));
1656 /* Otherwise insert this one into the hash table. */
1662 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1663 and the rest are zeros. */
1666 build_low_bits_mask (tree type, unsigned bits)
1668 gcc_assert (bits <= TYPE_PRECISION (type));
1670 return wide_int_to_tree (type, wi::mask (bits, false,
1671 TYPE_PRECISION (type)));
1674 /* Checks that X is integer constant that can be expressed in (unsigned)
1675 HOST_WIDE_INT without loss of precision. */
1678 cst_and_fits_in_hwi (const_tree x)
1680 return (TREE_CODE (x) == INTEGER_CST
1681 && (tree_fits_shwi_p (x) || tree_fits_uhwi_p (x)));
1684 /* Build a newly constructed VECTOR_CST node of length LEN. */
1687 make_vector_stat (unsigned len MEM_STAT_DECL)
1690 unsigned length = (len - 1) * sizeof (tree) + sizeof (struct tree_vector);
1692 record_node_allocation_statistics (VECTOR_CST, length);
1694 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1696 TREE_SET_CODE (t, VECTOR_CST);
1697 TREE_CONSTANT (t) = 1;
1702 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1703 are in a list pointed to by VALS. */
1706 build_vector_stat (tree type, tree *vals MEM_STAT_DECL)
1710 tree v = make_vector (TYPE_VECTOR_SUBPARTS (type));
1711 TREE_TYPE (v) = type;
1713 /* Iterate through elements and check for overflow. */
1714 for (cnt = 0; cnt < TYPE_VECTOR_SUBPARTS (type); ++cnt)
1716 tree value = vals[cnt];
1718 VECTOR_CST_ELT (v, cnt) = value;
1720 /* Don't crash if we get an address constant. */
1721 if (!CONSTANT_CLASS_P (value))
1724 over |= TREE_OVERFLOW (value);
1727 TREE_OVERFLOW (v) = over;
1731 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1732 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1735 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1737 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
1738 unsigned HOST_WIDE_INT idx, pos = 0;
1741 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1743 if (TREE_CODE (value) == VECTOR_CST)
1744 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
1745 vec[pos++] = VECTOR_CST_ELT (value, i);
1749 while (pos < TYPE_VECTOR_SUBPARTS (type))
1750 vec[pos++] = build_zero_cst (TREE_TYPE (type));
1752 return build_vector (type, vec);
1755 /* Build a vector of type VECTYPE where all the elements are SCs. */
1757 build_vector_from_val (tree vectype, tree sc)
1759 int i, nunits = TYPE_VECTOR_SUBPARTS (vectype);
1761 if (sc == error_mark_node)
1764 /* Verify that the vector type is suitable for SC. Note that there
1765 is some inconsistency in the type-system with respect to restrict
1766 qualifications of pointers. Vector types always have a main-variant
1767 element type and the qualification is applied to the vector-type.
1768 So TREE_TYPE (vector-type) does not return a properly qualified
1769 vector element-type. */
1770 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1771 TREE_TYPE (vectype)));
1773 if (CONSTANT_CLASS_P (sc))
1775 tree *v = XALLOCAVEC (tree, nunits);
1776 for (i = 0; i < nunits; ++i)
1778 return build_vector (vectype, v);
1782 vec<constructor_elt, va_gc> *v;
1783 vec_alloc (v, nunits);
1784 for (i = 0; i < nunits; ++i)
1785 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1786 return build_constructor (vectype, v);
1790 /* Something has messed with the elements of CONSTRUCTOR C after it was built;
1791 calculate TREE_CONSTANT and TREE_SIDE_EFFECTS. */
1794 recompute_constructor_flags (tree c)
1798 bool constant_p = true;
1799 bool side_effects_p = false;
1800 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
1802 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
1804 /* Mostly ctors will have elts that don't have side-effects, so
1805 the usual case is to scan all the elements. Hence a single
1806 loop for both const and side effects, rather than one loop
1807 each (with early outs). */
1808 if (!TREE_CONSTANT (val))
1810 if (TREE_SIDE_EFFECTS (val))
1811 side_effects_p = true;
1814 TREE_SIDE_EFFECTS (c) = side_effects_p;
1815 TREE_CONSTANT (c) = constant_p;
1818 /* Make sure that TREE_CONSTANT and TREE_SIDE_EFFECTS are correct for
1822 verify_constructor_flags (tree c)
1826 bool constant_p = TREE_CONSTANT (c);
1827 bool side_effects_p = TREE_SIDE_EFFECTS (c);
1828 vec<constructor_elt, va_gc> *vals = CONSTRUCTOR_ELTS (c);
1830 FOR_EACH_CONSTRUCTOR_VALUE (vals, i, val)
1832 if (constant_p && !TREE_CONSTANT (val))
1833 internal_error ("non-constant element in constant CONSTRUCTOR");
1834 if (!side_effects_p && TREE_SIDE_EFFECTS (val))
1835 internal_error ("side-effects element in no-side-effects CONSTRUCTOR");
1839 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1840 are in the vec pointed to by VALS. */
1842 build_constructor (tree type, vec<constructor_elt, va_gc> *vals)
1844 tree c = make_node (CONSTRUCTOR);
1846 TREE_TYPE (c) = type;
1847 CONSTRUCTOR_ELTS (c) = vals;
1849 recompute_constructor_flags (c);
1854 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
1857 build_constructor_single (tree type, tree index, tree value)
1859 vec<constructor_elt, va_gc> *v;
1860 constructor_elt elt = {index, value};
1863 v->quick_push (elt);
1865 return build_constructor (type, v);
1869 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1870 are in a list pointed to by VALS. */
1872 build_constructor_from_list (tree type, tree vals)
1875 vec<constructor_elt, va_gc> *v = NULL;
1879 vec_alloc (v, list_length (vals));
1880 for (t = vals; t; t = TREE_CHAIN (t))
1881 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
1884 return build_constructor (type, v);
1887 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
1888 of elements, provided as index/value pairs. */
1891 build_constructor_va (tree type, int nelts, ...)
1893 vec<constructor_elt, va_gc> *v = NULL;
1896 va_start (p, nelts);
1897 vec_alloc (v, nelts);
1900 tree index = va_arg (p, tree);
1901 tree value = va_arg (p, tree);
1902 CONSTRUCTOR_APPEND_ELT (v, index, value);
1905 return build_constructor (type, v);
1908 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
1911 build_fixed (tree type, FIXED_VALUE_TYPE f)
1914 FIXED_VALUE_TYPE *fp;
1916 v = make_node (FIXED_CST);
1917 fp = ggc_alloc<fixed_value> ();
1918 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
1920 TREE_TYPE (v) = type;
1921 TREE_FIXED_CST_PTR (v) = fp;
1925 /* Return a new REAL_CST node whose type is TYPE and value is D. */
1928 build_real (tree type, REAL_VALUE_TYPE d)
1931 REAL_VALUE_TYPE *dp;
1934 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
1935 Consider doing it via real_convert now. */
1937 v = make_node (REAL_CST);
1938 dp = ggc_alloc<real_value> ();
1939 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
1941 TREE_TYPE (v) = type;
1942 TREE_REAL_CST_PTR (v) = dp;
1943 TREE_OVERFLOW (v) = overflow;
1947 /* Like build_real, but first truncate D to the type. */
1950 build_real_truncate (tree type, REAL_VALUE_TYPE d)
1952 return build_real (type, real_value_truncate (TYPE_MODE (type), d));
1955 /* Return a new REAL_CST node whose type is TYPE
1956 and whose value is the integer value of the INTEGER_CST node I. */
1959 real_value_from_int_cst (const_tree type, const_tree i)
1963 /* Clear all bits of the real value type so that we can later do
1964 bitwise comparisons to see if two values are the same. */
1965 memset (&d, 0, sizeof d);
1967 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, i,
1968 TYPE_SIGN (TREE_TYPE (i)));
1972 /* Given a tree representing an integer constant I, return a tree
1973 representing the same value as a floating-point constant of type TYPE. */
1976 build_real_from_int_cst (tree type, const_tree i)
1979 int overflow = TREE_OVERFLOW (i);
1981 v = build_real (type, real_value_from_int_cst (type, i));
1983 TREE_OVERFLOW (v) |= overflow;
1987 /* Return a newly constructed STRING_CST node whose value is
1988 the LEN characters at STR.
1989 Note that for a C string literal, LEN should include the trailing NUL.
1990 The TREE_TYPE is not initialized. */
1993 build_string (int len, const char *str)
1998 /* Do not waste bytes provided by padding of struct tree_string. */
1999 length = len + offsetof (struct tree_string, str) + 1;
2001 record_node_allocation_statistics (STRING_CST, length);
2003 s = (tree) ggc_internal_alloc (length);
2005 memset (s, 0, sizeof (struct tree_typed));
2006 TREE_SET_CODE (s, STRING_CST);
2007 TREE_CONSTANT (s) = 1;
2008 TREE_STRING_LENGTH (s) = len;
2009 memcpy (s->string.str, str, len);
2010 s->string.str[len] = '\0';
2015 /* Return a newly constructed COMPLEX_CST node whose value is
2016 specified by the real and imaginary parts REAL and IMAG.
2017 Both REAL and IMAG should be constant nodes. TYPE, if specified,
2018 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
2021 build_complex (tree type, tree real, tree imag)
2023 tree t = make_node (COMPLEX_CST);
2025 TREE_REALPART (t) = real;
2026 TREE_IMAGPART (t) = imag;
2027 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
2028 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
2032 /* Build a complex (inf +- 0i), such as for the result of cproj.
2033 TYPE is the complex tree type of the result. If NEG is true, the
2034 imaginary zero is negative. */
2037 build_complex_inf (tree type, bool neg)
2039 REAL_VALUE_TYPE rinf, rzero = dconst0;
2043 return build_complex (type, build_real (TREE_TYPE (type), rinf),
2044 build_real (TREE_TYPE (type), rzero));
2047 /* Return the constant 1 in type TYPE. If TYPE has several elements, each
2048 element is set to 1. In particular, this is 1 + i for complex types. */
2051 build_each_one_cst (tree type)
2053 if (TREE_CODE (type) == COMPLEX_TYPE)
2055 tree scalar = build_one_cst (TREE_TYPE (type));
2056 return build_complex (type, scalar, scalar);
2059 return build_one_cst (type);
2062 /* Return a constant of arithmetic type TYPE which is the
2063 multiplicative identity of the set TYPE. */
2066 build_one_cst (tree type)
2068 switch (TREE_CODE (type))
2070 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2071 case POINTER_TYPE: case REFERENCE_TYPE:
2073 return build_int_cst (type, 1);
2076 return build_real (type, dconst1);
2078 case FIXED_POINT_TYPE:
2079 /* We can only generate 1 for accum types. */
2080 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2081 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
2085 tree scalar = build_one_cst (TREE_TYPE (type));
2087 return build_vector_from_val (type, scalar);
2091 return build_complex (type,
2092 build_one_cst (TREE_TYPE (type)),
2093 build_zero_cst (TREE_TYPE (type)));
2100 /* Return an integer of type TYPE containing all 1's in as much precision as
2101 it contains, or a complex or vector whose subparts are such integers. */
2104 build_all_ones_cst (tree type)
2106 if (TREE_CODE (type) == COMPLEX_TYPE)
2108 tree scalar = build_all_ones_cst (TREE_TYPE (type));
2109 return build_complex (type, scalar, scalar);
2112 return build_minus_one_cst (type);
2115 /* Return a constant of arithmetic type TYPE which is the
2116 opposite of the multiplicative identity of the set TYPE. */
2119 build_minus_one_cst (tree type)
2121 switch (TREE_CODE (type))
2123 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2124 case POINTER_TYPE: case REFERENCE_TYPE:
2126 return build_int_cst (type, -1);
2129 return build_real (type, dconstm1);
2131 case FIXED_POINT_TYPE:
2132 /* We can only generate 1 for accum types. */
2133 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2134 return build_fixed (type, fixed_from_double_int (double_int_minus_one,
2139 tree scalar = build_minus_one_cst (TREE_TYPE (type));
2141 return build_vector_from_val (type, scalar);
2145 return build_complex (type,
2146 build_minus_one_cst (TREE_TYPE (type)),
2147 build_zero_cst (TREE_TYPE (type)));
2154 /* Build 0 constant of type TYPE. This is used by constructor folding
2155 and thus the constant should be represented in memory by
2159 build_zero_cst (tree type)
2161 switch (TREE_CODE (type))
2163 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2164 case POINTER_TYPE: case REFERENCE_TYPE:
2165 case OFFSET_TYPE: case NULLPTR_TYPE:
2166 return build_int_cst (type, 0);
2169 return build_real (type, dconst0);
2171 case FIXED_POINT_TYPE:
2172 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
2176 tree scalar = build_zero_cst (TREE_TYPE (type));
2178 return build_vector_from_val (type, scalar);
2183 tree zero = build_zero_cst (TREE_TYPE (type));
2185 return build_complex (type, zero, zero);
2189 if (!AGGREGATE_TYPE_P (type))
2190 return fold_convert (type, integer_zero_node);
2191 return build_constructor (type, NULL);
2196 /* Build a BINFO with LEN language slots. */
2199 make_tree_binfo_stat (unsigned base_binfos MEM_STAT_DECL)
2202 size_t length = (offsetof (struct tree_binfo, base_binfos)
2203 + vec<tree, va_gc>::embedded_size (base_binfos));
2205 record_node_allocation_statistics (TREE_BINFO, length);
2207 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2209 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2211 TREE_SET_CODE (t, TREE_BINFO);
2213 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2218 /* Create a CASE_LABEL_EXPR tree node and return it. */
2221 build_case_label (tree low_value, tree high_value, tree label_decl)
2223 tree t = make_node (CASE_LABEL_EXPR);
2225 TREE_TYPE (t) = void_type_node;
2226 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2228 CASE_LOW (t) = low_value;
2229 CASE_HIGH (t) = high_value;
2230 CASE_LABEL (t) = label_decl;
2231 CASE_CHAIN (t) = NULL_TREE;
2236 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2237 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2238 The latter determines the length of the HOST_WIDE_INT vector. */
2241 make_int_cst_stat (int len, int ext_len MEM_STAT_DECL)
2244 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2245 + sizeof (struct tree_int_cst));
2248 record_node_allocation_statistics (INTEGER_CST, length);
2250 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2252 TREE_SET_CODE (t, INTEGER_CST);
2253 TREE_INT_CST_NUNITS (t) = len;
2254 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2255 /* to_offset can only be applied to trees that are offset_int-sized
2256 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2257 must be exactly the precision of offset_int and so LEN is correct. */
2258 if (ext_len <= OFFSET_INT_ELTS)
2259 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2261 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2263 TREE_CONSTANT (t) = 1;
2268 /* Build a newly constructed TREE_VEC node of length LEN. */
2271 make_tree_vec_stat (int len MEM_STAT_DECL)
2274 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2276 record_node_allocation_statistics (TREE_VEC, length);
2278 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2280 TREE_SET_CODE (t, TREE_VEC);
2281 TREE_VEC_LENGTH (t) = len;
2286 /* Grow a TREE_VEC node to new length LEN. */
2289 grow_tree_vec_stat (tree v, int len MEM_STAT_DECL)
2291 gcc_assert (TREE_CODE (v) == TREE_VEC);
2293 int oldlen = TREE_VEC_LENGTH (v);
2294 gcc_assert (len > oldlen);
2296 int oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2297 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2299 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2301 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2303 TREE_VEC_LENGTH (v) = len;
2308 /* Return 1 if EXPR is the constant zero, whether it is integral, float or
2309 fixed, and scalar, complex or vector. */
2312 zerop (const_tree expr)
2314 return (integer_zerop (expr)
2315 || real_zerop (expr)
2316 || fixed_zerop (expr));
2319 /* Return 1 if EXPR is the integer constant zero or a complex constant
2323 integer_zerop (const_tree expr)
2325 switch (TREE_CODE (expr))
2328 return wi::eq_p (expr, 0);
2330 return (integer_zerop (TREE_REALPART (expr))
2331 && integer_zerop (TREE_IMAGPART (expr)));
2335 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2336 if (!integer_zerop (VECTOR_CST_ELT (expr, i)))
2345 /* Return 1 if EXPR is the integer constant one or the corresponding
2346 complex constant. */
2349 integer_onep (const_tree expr)
2351 switch (TREE_CODE (expr))
2354 return wi::eq_p (wi::to_widest (expr), 1);
2356 return (integer_onep (TREE_REALPART (expr))
2357 && integer_zerop (TREE_IMAGPART (expr)));
2361 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2362 if (!integer_onep (VECTOR_CST_ELT (expr, i)))
2371 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2372 return 1 if every piece is the integer constant one. */
2375 integer_each_onep (const_tree expr)
2377 if (TREE_CODE (expr) == COMPLEX_CST)
2378 return (integer_onep (TREE_REALPART (expr))
2379 && integer_onep (TREE_IMAGPART (expr)));
2381 return integer_onep (expr);
2384 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2385 it contains, or a complex or vector whose subparts are such integers. */
2388 integer_all_onesp (const_tree expr)
2390 if (TREE_CODE (expr) == COMPLEX_CST
2391 && integer_all_onesp (TREE_REALPART (expr))
2392 && integer_all_onesp (TREE_IMAGPART (expr)))
2395 else if (TREE_CODE (expr) == VECTOR_CST)
2398 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2399 if (!integer_all_onesp (VECTOR_CST_ELT (expr, i)))
2404 else if (TREE_CODE (expr) != INTEGER_CST)
2407 return wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED) == expr;
2410 /* Return 1 if EXPR is the integer constant minus one. */
2413 integer_minus_onep (const_tree expr)
2415 if (TREE_CODE (expr) == COMPLEX_CST)
2416 return (integer_all_onesp (TREE_REALPART (expr))
2417 && integer_zerop (TREE_IMAGPART (expr)));
2419 return integer_all_onesp (expr);
2422 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2426 integer_pow2p (const_tree expr)
2428 if (TREE_CODE (expr) == COMPLEX_CST
2429 && integer_pow2p (TREE_REALPART (expr))
2430 && integer_zerop (TREE_IMAGPART (expr)))
2433 if (TREE_CODE (expr) != INTEGER_CST)
2436 return wi::popcount (expr) == 1;
2439 /* Return 1 if EXPR is an integer constant other than zero or a
2440 complex constant other than zero. */
2443 integer_nonzerop (const_tree expr)
2445 return ((TREE_CODE (expr) == INTEGER_CST
2446 && !wi::eq_p (expr, 0))
2447 || (TREE_CODE (expr) == COMPLEX_CST
2448 && (integer_nonzerop (TREE_REALPART (expr))
2449 || integer_nonzerop (TREE_IMAGPART (expr)))));
2452 /* Return 1 if EXPR is the integer constant one. For vector,
2453 return 1 if every piece is the integer constant minus one
2454 (representing the value TRUE). */
2457 integer_truep (const_tree expr)
2459 if (TREE_CODE (expr) == VECTOR_CST)
2460 return integer_all_onesp (expr);
2461 return integer_onep (expr);
2464 /* Return 1 if EXPR is the fixed-point constant zero. */
2467 fixed_zerop (const_tree expr)
2469 return (TREE_CODE (expr) == FIXED_CST
2470 && TREE_FIXED_CST (expr).data.is_zero ());
2473 /* Return the power of two represented by a tree node known to be a
2477 tree_log2 (const_tree expr)
2479 if (TREE_CODE (expr) == COMPLEX_CST)
2480 return tree_log2 (TREE_REALPART (expr));
2482 return wi::exact_log2 (expr);
2485 /* Similar, but return the largest integer Y such that 2 ** Y is less
2486 than or equal to EXPR. */
2489 tree_floor_log2 (const_tree expr)
2491 if (TREE_CODE (expr) == COMPLEX_CST)
2492 return tree_log2 (TREE_REALPART (expr));
2494 return wi::floor_log2 (expr);
2497 /* Return number of known trailing zero bits in EXPR, or, if the value of
2498 EXPR is known to be zero, the precision of it's type. */
2501 tree_ctz (const_tree expr)
2503 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2504 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2507 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2508 switch (TREE_CODE (expr))
2511 ret1 = wi::ctz (expr);
2512 return MIN (ret1, prec);
2514 ret1 = wi::ctz (get_nonzero_bits (expr));
2515 return MIN (ret1, prec);
2522 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2525 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2526 return MIN (ret1, ret2);
2527 case POINTER_PLUS_EXPR:
2528 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2529 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2530 /* Second operand is sizetype, which could be in theory
2531 wider than pointer's precision. Make sure we never
2532 return more than prec. */
2533 ret2 = MIN (ret2, prec);
2534 return MIN (ret1, ret2);
2536 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2537 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2538 return MAX (ret1, ret2);
2540 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2541 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2542 return MIN (ret1 + ret2, prec);
2544 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2545 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2546 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2548 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2549 return MIN (ret1 + ret2, prec);
2553 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2554 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2556 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2557 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2562 case TRUNC_DIV_EXPR:
2564 case FLOOR_DIV_EXPR:
2565 case ROUND_DIV_EXPR:
2566 case EXACT_DIV_EXPR:
2567 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2568 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2570 int l = tree_log2 (TREE_OPERAND (expr, 1));
2573 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2581 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2582 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2584 return MIN (ret1, prec);
2586 return tree_ctz (TREE_OPERAND (expr, 0));
2588 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2591 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2592 return MIN (ret1, ret2);
2594 return tree_ctz (TREE_OPERAND (expr, 1));
2596 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2597 if (ret1 > BITS_PER_UNIT)
2599 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2600 return MIN (ret1, prec);
2608 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2609 decimal float constants, so don't return 1 for them. */
2612 real_zerop (const_tree expr)
2614 switch (TREE_CODE (expr))
2617 return real_equal (&TREE_REAL_CST (expr), &dconst0)
2618 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2620 return real_zerop (TREE_REALPART (expr))
2621 && real_zerop (TREE_IMAGPART (expr));
2625 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2626 if (!real_zerop (VECTOR_CST_ELT (expr, i)))
2635 /* Return 1 if EXPR is the real constant one in real or complex form.
2636 Trailing zeroes matter for decimal float constants, so don't return
2640 real_onep (const_tree expr)
2642 switch (TREE_CODE (expr))
2645 return real_equal (&TREE_REAL_CST (expr), &dconst1)
2646 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2648 return real_onep (TREE_REALPART (expr))
2649 && real_zerop (TREE_IMAGPART (expr));
2653 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2654 if (!real_onep (VECTOR_CST_ELT (expr, i)))
2663 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2664 matter for decimal float constants, so don't return 1 for them. */
2667 real_minus_onep (const_tree expr)
2669 switch (TREE_CODE (expr))
2672 return real_equal (&TREE_REAL_CST (expr), &dconstm1)
2673 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2675 return real_minus_onep (TREE_REALPART (expr))
2676 && real_zerop (TREE_IMAGPART (expr));
2680 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2681 if (!real_minus_onep (VECTOR_CST_ELT (expr, i)))
2690 /* Nonzero if EXP is a constant or a cast of a constant. */
2693 really_constant_p (const_tree exp)
2695 /* This is not quite the same as STRIP_NOPS. It does more. */
2696 while (CONVERT_EXPR_P (exp)
2697 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2698 exp = TREE_OPERAND (exp, 0);
2699 return TREE_CONSTANT (exp);
2702 /* Return first list element whose TREE_VALUE is ELEM.
2703 Return 0 if ELEM is not in LIST. */
2706 value_member (tree elem, tree list)
2710 if (elem == TREE_VALUE (list))
2712 list = TREE_CHAIN (list);
2717 /* Return first list element whose TREE_PURPOSE is ELEM.
2718 Return 0 if ELEM is not in LIST. */
2721 purpose_member (const_tree elem, tree list)
2725 if (elem == TREE_PURPOSE (list))
2727 list = TREE_CHAIN (list);
2732 /* Return true if ELEM is in V. */
2735 vec_member (const_tree elem, vec<tree, va_gc> *v)
2739 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
2745 /* Returns element number IDX (zero-origin) of chain CHAIN, or
2749 chain_index (int idx, tree chain)
2751 for (; chain && idx > 0; --idx)
2752 chain = TREE_CHAIN (chain);
2756 /* Return nonzero if ELEM is part of the chain CHAIN. */
2759 chain_member (const_tree elem, const_tree chain)
2765 chain = DECL_CHAIN (chain);
2771 /* Return the length of a chain of nodes chained through TREE_CHAIN.
2772 We expect a null pointer to mark the end of the chain.
2773 This is the Lisp primitive `length'. */
2776 list_length (const_tree t)
2779 #ifdef ENABLE_TREE_CHECKING
2787 #ifdef ENABLE_TREE_CHECKING
2790 gcc_assert (p != q);
2798 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
2799 UNION_TYPE TYPE, or NULL_TREE if none. */
2802 first_field (const_tree type)
2804 tree t = TYPE_FIELDS (type);
2805 while (t && TREE_CODE (t) != FIELD_DECL)
2810 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
2811 by modifying the last node in chain 1 to point to chain 2.
2812 This is the Lisp primitive `nconc'. */
2815 chainon (tree op1, tree op2)
2824 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
2826 TREE_CHAIN (t1) = op2;
2828 #ifdef ENABLE_TREE_CHECKING
2831 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
2832 gcc_assert (t2 != t1);
2839 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
2842 tree_last (tree chain)
2846 while ((next = TREE_CHAIN (chain)))
2851 /* Reverse the order of elements in the chain T,
2852 and return the new head of the chain (old last element). */
2857 tree prev = 0, decl, next;
2858 for (decl = t; decl; decl = next)
2860 /* We shouldn't be using this function to reverse BLOCK chains; we
2861 have blocks_nreverse for that. */
2862 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
2863 next = TREE_CHAIN (decl);
2864 TREE_CHAIN (decl) = prev;
2870 /* Return a newly created TREE_LIST node whose
2871 purpose and value fields are PARM and VALUE. */
2874 build_tree_list_stat (tree parm, tree value MEM_STAT_DECL)
2876 tree t = make_node_stat (TREE_LIST PASS_MEM_STAT);
2877 TREE_PURPOSE (t) = parm;
2878 TREE_VALUE (t) = value;
2882 /* Build a chain of TREE_LIST nodes from a vector. */
2885 build_tree_list_vec_stat (const vec<tree, va_gc> *vec MEM_STAT_DECL)
2887 tree ret = NULL_TREE;
2891 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
2893 *pp = build_tree_list_stat (NULL, t PASS_MEM_STAT);
2894 pp = &TREE_CHAIN (*pp);
2899 /* Return a newly created TREE_LIST node whose
2900 purpose and value fields are PURPOSE and VALUE
2901 and whose TREE_CHAIN is CHAIN. */
2904 tree_cons_stat (tree purpose, tree value, tree chain MEM_STAT_DECL)
2908 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
2909 memset (node, 0, sizeof (struct tree_common));
2911 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
2913 TREE_SET_CODE (node, TREE_LIST);
2914 TREE_CHAIN (node) = chain;
2915 TREE_PURPOSE (node) = purpose;
2916 TREE_VALUE (node) = value;
2920 /* Return the values of the elements of a CONSTRUCTOR as a vector of
2924 ctor_to_vec (tree ctor)
2926 vec<tree, va_gc> *vec;
2927 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
2931 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
2932 vec->quick_push (val);
2937 /* Return the size nominally occupied by an object of type TYPE
2938 when it resides in memory. The value is measured in units of bytes,
2939 and its data type is that normally used for type sizes
2940 (which is the first type created by make_signed_type or
2941 make_unsigned_type). */
2944 size_in_bytes (const_tree type)
2948 if (type == error_mark_node)
2949 return integer_zero_node;
2951 type = TYPE_MAIN_VARIANT (type);
2952 t = TYPE_SIZE_UNIT (type);
2956 lang_hooks.types.incomplete_type_error (NULL_TREE, type);
2957 return size_zero_node;
2963 /* Return the size of TYPE (in bytes) as a wide integer
2964 or return -1 if the size can vary or is larger than an integer. */
2967 int_size_in_bytes (const_tree type)
2971 if (type == error_mark_node)
2974 type = TYPE_MAIN_VARIANT (type);
2975 t = TYPE_SIZE_UNIT (type);
2977 if (t && tree_fits_uhwi_p (t))
2978 return TREE_INT_CST_LOW (t);
2983 /* Return the maximum size of TYPE (in bytes) as a wide integer
2984 or return -1 if the size can vary or is larger than an integer. */
2987 max_int_size_in_bytes (const_tree type)
2989 HOST_WIDE_INT size = -1;
2992 /* If this is an array type, check for a possible MAX_SIZE attached. */
2994 if (TREE_CODE (type) == ARRAY_TYPE)
2996 size_tree = TYPE_ARRAY_MAX_SIZE (type);
2998 if (size_tree && tree_fits_uhwi_p (size_tree))
2999 size = tree_to_uhwi (size_tree);
3002 /* If we still haven't been able to get a size, see if the language
3003 can compute a maximum size. */
3007 size_tree = lang_hooks.types.max_size (type);
3009 if (size_tree && tree_fits_uhwi_p (size_tree))
3010 size = tree_to_uhwi (size_tree);
3016 /* Return the bit position of FIELD, in bits from the start of the record.
3017 This is a tree of type bitsizetype. */
3020 bit_position (const_tree field)
3022 return bit_from_pos (DECL_FIELD_OFFSET (field),
3023 DECL_FIELD_BIT_OFFSET (field));
3026 /* Return the byte position of FIELD, in bytes from the start of the record.
3027 This is a tree of type sizetype. */
3030 byte_position (const_tree field)
3032 return byte_from_pos (DECL_FIELD_OFFSET (field),
3033 DECL_FIELD_BIT_OFFSET (field));
3036 /* Likewise, but return as an integer. It must be representable in
3037 that way (since it could be a signed value, we don't have the
3038 option of returning -1 like int_size_in_byte can. */
3041 int_byte_position (const_tree field)
3043 return tree_to_shwi (byte_position (field));
3046 /* Return the strictest alignment, in bits, that T is known to have. */
3049 expr_align (const_tree t)
3051 unsigned int align0, align1;
3053 switch (TREE_CODE (t))
3055 CASE_CONVERT: case NON_LVALUE_EXPR:
3056 /* If we have conversions, we know that the alignment of the
3057 object must meet each of the alignments of the types. */
3058 align0 = expr_align (TREE_OPERAND (t, 0));
3059 align1 = TYPE_ALIGN (TREE_TYPE (t));
3060 return MAX (align0, align1);
3062 case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR:
3063 case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR:
3064 case CLEANUP_POINT_EXPR:
3065 /* These don't change the alignment of an object. */
3066 return expr_align (TREE_OPERAND (t, 0));
3069 /* The best we can do is say that the alignment is the least aligned
3071 align0 = expr_align (TREE_OPERAND (t, 1));
3072 align1 = expr_align (TREE_OPERAND (t, 2));
3073 return MIN (align0, align1);
3075 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
3076 meaningfully, it's always 1. */
3077 case LABEL_DECL: case CONST_DECL:
3078 case VAR_DECL: case PARM_DECL: case RESULT_DECL:
3080 gcc_assert (DECL_ALIGN (t) != 0);
3081 return DECL_ALIGN (t);
3087 /* Otherwise take the alignment from that of the type. */
3088 return TYPE_ALIGN (TREE_TYPE (t));
3091 /* Return, as a tree node, the number of elements for TYPE (which is an
3092 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3095 array_type_nelts (const_tree type)
3097 tree index_type, min, max;
3099 /* If they did it with unspecified bounds, then we should have already
3100 given an error about it before we got here. */
3101 if (! TYPE_DOMAIN (type))
3102 return error_mark_node;
3104 index_type = TYPE_DOMAIN (type);
3105 min = TYPE_MIN_VALUE (index_type);
3106 max = TYPE_MAX_VALUE (index_type);
3108 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3110 return error_mark_node;
3112 return (integer_zerop (min)
3114 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
3117 /* If arg is static -- a reference to an object in static storage -- then
3118 return the object. This is not the same as the C meaning of `static'.
3119 If arg isn't static, return NULL. */
3124 switch (TREE_CODE (arg))
3127 /* Nested functions are static, even though taking their address will
3128 involve a trampoline as we unnest the nested function and create
3129 the trampoline on the tree level. */
3133 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3134 && ! DECL_THREAD_LOCAL_P (arg)
3135 && ! DECL_DLLIMPORT_P (arg)
3139 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3143 return TREE_STATIC (arg) ? arg : NULL;
3150 /* If the thing being referenced is not a field, then it is
3151 something language specific. */
3152 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
3154 /* If we are referencing a bitfield, we can't evaluate an
3155 ADDR_EXPR at compile time and so it isn't a constant. */
3156 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
3159 return staticp (TREE_OPERAND (arg, 0));
3165 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3168 case ARRAY_RANGE_REF:
3169 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3170 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3171 return staticp (TREE_OPERAND (arg, 0));
3175 case COMPOUND_LITERAL_EXPR:
3176 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3186 /* Return whether OP is a DECL whose address is function-invariant. */
3189 decl_address_invariant_p (const_tree op)
3191 /* The conditions below are slightly less strict than the one in
3194 switch (TREE_CODE (op))
3203 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3204 || DECL_THREAD_LOCAL_P (op)
3205 || DECL_CONTEXT (op) == current_function_decl
3206 || decl_function_context (op) == current_function_decl)
3211 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3212 || decl_function_context (op) == current_function_decl)
3223 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3226 decl_address_ip_invariant_p (const_tree op)
3228 /* The conditions below are slightly less strict than the one in
3231 switch (TREE_CODE (op))
3239 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3240 && !DECL_DLLIMPORT_P (op))
3241 || DECL_THREAD_LOCAL_P (op))
3246 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3258 /* Return true if T is function-invariant (internal function, does
3259 not handle arithmetic; that's handled in skip_simple_arithmetic and
3260 tree_invariant_p). */
3263 tree_invariant_p_1 (tree t)
3267 if (TREE_CONSTANT (t)
3268 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3271 switch (TREE_CODE (t))
3277 op = TREE_OPERAND (t, 0);
3278 while (handled_component_p (op))
3280 switch (TREE_CODE (op))
3283 case ARRAY_RANGE_REF:
3284 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3285 || TREE_OPERAND (op, 2) != NULL_TREE
3286 || TREE_OPERAND (op, 3) != NULL_TREE)
3291 if (TREE_OPERAND (op, 2) != NULL_TREE)
3297 op = TREE_OPERAND (op, 0);
3300 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3309 /* Return true if T is function-invariant. */
3312 tree_invariant_p (tree t)
3314 tree inner = skip_simple_arithmetic (t);
3315 return tree_invariant_p_1 (inner);
3318 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3319 Do this to any expression which may be used in more than one place,
3320 but must be evaluated only once.
3322 Normally, expand_expr would reevaluate the expression each time.
3323 Calling save_expr produces something that is evaluated and recorded
3324 the first time expand_expr is called on it. Subsequent calls to
3325 expand_expr just reuse the recorded value.
3327 The call to expand_expr that generates code that actually computes
3328 the value is the first call *at compile time*. Subsequent calls
3329 *at compile time* generate code to use the saved value.
3330 This produces correct result provided that *at run time* control
3331 always flows through the insns made by the first expand_expr
3332 before reaching the other places where the save_expr was evaluated.
3333 You, the caller of save_expr, must make sure this is so.
3335 Constants, and certain read-only nodes, are returned with no
3336 SAVE_EXPR because that is safe. Expressions containing placeholders
3337 are not touched; see tree.def for an explanation of what these
3341 save_expr (tree expr)
3343 tree t = fold (expr);
3346 /* If the tree evaluates to a constant, then we don't want to hide that
3347 fact (i.e. this allows further folding, and direct checks for constants).
3348 However, a read-only object that has side effects cannot be bypassed.
3349 Since it is no problem to reevaluate literals, we just return the
3351 inner = skip_simple_arithmetic (t);
3352 if (TREE_CODE (inner) == ERROR_MARK)
3355 if (tree_invariant_p_1 (inner))
3358 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3359 it means that the size or offset of some field of an object depends on
3360 the value within another field.
3362 Note that it must not be the case that T contains both a PLACEHOLDER_EXPR
3363 and some variable since it would then need to be both evaluated once and
3364 evaluated more than once. Front-ends must assure this case cannot
3365 happen by surrounding any such subexpressions in their own SAVE_EXPR
3366 and forcing evaluation at the proper time. */
3367 if (contains_placeholder_p (inner))
3370 t = build1 (SAVE_EXPR, TREE_TYPE (expr), t);
3371 SET_EXPR_LOCATION (t, EXPR_LOCATION (expr));
3373 /* This expression might be placed ahead of a jump to ensure that the
3374 value was computed on both sides of the jump. So make sure it isn't
3375 eliminated as dead. */
3376 TREE_SIDE_EFFECTS (t) = 1;
3380 /* Look inside EXPR into any simple arithmetic operations. Return the
3381 outermost non-arithmetic or non-invariant node. */
3384 skip_simple_arithmetic (tree expr)
3386 /* We don't care about whether this can be used as an lvalue in this
3388 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3389 expr = TREE_OPERAND (expr, 0);
3391 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3392 a constant, it will be more efficient to not make another SAVE_EXPR since
3393 it will allow better simplification and GCSE will be able to merge the
3394 computations if they actually occur. */
3397 if (UNARY_CLASS_P (expr))
3398 expr = TREE_OPERAND (expr, 0);
3399 else if (BINARY_CLASS_P (expr))
3401 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3402 expr = TREE_OPERAND (expr, 0);
3403 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3404 expr = TREE_OPERAND (expr, 1);
3415 /* Look inside EXPR into simple arithmetic operations involving constants.
3416 Return the outermost non-arithmetic or non-constant node. */
3419 skip_simple_constant_arithmetic (tree expr)
3421 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3422 expr = TREE_OPERAND (expr, 0);
3426 if (UNARY_CLASS_P (expr))
3427 expr = TREE_OPERAND (expr, 0);
3428 else if (BINARY_CLASS_P (expr))
3430 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3431 expr = TREE_OPERAND (expr, 0);
3432 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3433 expr = TREE_OPERAND (expr, 1);
3444 /* Return which tree structure is used by T. */
3446 enum tree_node_structure_enum
3447 tree_node_structure (const_tree t)
3449 const enum tree_code code = TREE_CODE (t);
3450 return tree_node_structure_for_code (code);
3453 /* Set various status flags when building a CALL_EXPR object T. */
3456 process_call_operands (tree t)
3458 bool side_effects = TREE_SIDE_EFFECTS (t);
3459 bool read_only = false;
3460 int i = call_expr_flags (t);
3462 /* Calls have side-effects, except those to const or pure functions. */
3463 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3464 side_effects = true;
3465 /* Propagate TREE_READONLY of arguments for const functions. */
3469 if (!side_effects || read_only)
3470 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3472 tree op = TREE_OPERAND (t, i);
3473 if (op && TREE_SIDE_EFFECTS (op))
3474 side_effects = true;
3475 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3479 TREE_SIDE_EFFECTS (t) = side_effects;
3480 TREE_READONLY (t) = read_only;
3483 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3484 size or offset that depends on a field within a record. */
3487 contains_placeholder_p (const_tree exp)
3489 enum tree_code code;
3494 code = TREE_CODE (exp);
3495 if (code == PLACEHOLDER_EXPR)
3498 switch (TREE_CODE_CLASS (code))
3501 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3502 position computations since they will be converted into a
3503 WITH_RECORD_EXPR involving the reference, which will assume
3504 here will be valid. */
3505 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3507 case tcc_exceptional:
3508 if (code == TREE_LIST)
3509 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3510 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3515 case tcc_comparison:
3516 case tcc_expression:
3520 /* Ignoring the first operand isn't quite right, but works best. */
3521 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3524 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3525 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3526 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3529 /* The save_expr function never wraps anything containing
3530 a PLACEHOLDER_EXPR. */
3537 switch (TREE_CODE_LENGTH (code))
3540 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3542 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3543 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3554 const_call_expr_arg_iterator iter;
3555 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3556 if (CONTAINS_PLACEHOLDER_P (arg))
3570 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3571 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3575 type_contains_placeholder_1 (const_tree type)
3577 /* If the size contains a placeholder or the parent type (component type in
3578 the case of arrays) type involves a placeholder, this type does. */
3579 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3580 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3581 || (!POINTER_TYPE_P (type)
3583 && type_contains_placeholder_p (TREE_TYPE (type))))
3586 /* Now do type-specific checks. Note that the last part of the check above
3587 greatly limits what we have to do below. */
3588 switch (TREE_CODE (type))
3591 case POINTER_BOUNDS_TYPE:
3597 case REFERENCE_TYPE:
3606 case FIXED_POINT_TYPE:
3607 /* Here we just check the bounds. */
3608 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3609 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3612 /* We have already checked the component type above, so just check
3613 the domain type. Flexible array members have a null domain. */
3614 return TYPE_DOMAIN (type) ?
3615 type_contains_placeholder_p (TYPE_DOMAIN (type)) : false;
3619 case QUAL_UNION_TYPE:
3623 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3624 if (TREE_CODE (field) == FIELD_DECL
3625 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3626 || (TREE_CODE (type) == QUAL_UNION_TYPE
3627 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3628 || type_contains_placeholder_p (TREE_TYPE (field))))
3639 /* Wrapper around above function used to cache its result. */
3642 type_contains_placeholder_p (tree type)
3646 /* If the contains_placeholder_bits field has been initialized,
3647 then we know the answer. */
3648 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3649 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3651 /* Indicate that we've seen this type node, and the answer is false.
3652 This is what we want to return if we run into recursion via fields. */
3653 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3655 /* Compute the real value. */
3656 result = type_contains_placeholder_1 (type);
3658 /* Store the real value. */
3659 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3664 /* Push tree EXP onto vector QUEUE if it is not already present. */
3667 push_without_duplicates (tree exp, vec<tree> *queue)
3672 FOR_EACH_VEC_ELT (*queue, i, iter)
3673 if (simple_cst_equal (iter, exp) == 1)
3677 queue->safe_push (exp);
3680 /* Given a tree EXP, find all occurrences of references to fields
3681 in a PLACEHOLDER_EXPR and place them in vector REFS without
3682 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3683 we assume here that EXP contains only arithmetic expressions
3684 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3688 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3690 enum tree_code code = TREE_CODE (exp);
3694 /* We handle TREE_LIST and COMPONENT_REF separately. */
3695 if (code == TREE_LIST)
3697 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
3698 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
3700 else if (code == COMPONENT_REF)
3702 for (inner = TREE_OPERAND (exp, 0);
3703 REFERENCE_CLASS_P (inner);
3704 inner = TREE_OPERAND (inner, 0))
3707 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
3708 push_without_duplicates (exp, refs);
3710 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
3713 switch (TREE_CODE_CLASS (code))
3718 case tcc_declaration:
3719 /* Variables allocated to static storage can stay. */
3720 if (!TREE_STATIC (exp))
3721 push_without_duplicates (exp, refs);
3724 case tcc_expression:
3725 /* This is the pattern built in ada/make_aligning_type. */
3726 if (code == ADDR_EXPR
3727 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
3729 push_without_duplicates (exp, refs);
3735 case tcc_exceptional:
3738 case tcc_comparison:
3740 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
3741 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3745 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3746 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3754 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
3755 return a tree with all occurrences of references to F in a
3756 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
3757 CONST_DECLs. Note that we assume here that EXP contains only
3758 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
3759 occurring only in their argument list. */
3762 substitute_in_expr (tree exp, tree f, tree r)
3764 enum tree_code code = TREE_CODE (exp);
3765 tree op0, op1, op2, op3;
3768 /* We handle TREE_LIST and COMPONENT_REF separately. */
3769 if (code == TREE_LIST)
3771 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
3772 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
3773 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3776 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3778 else if (code == COMPONENT_REF)
3782 /* If this expression is getting a value from a PLACEHOLDER_EXPR
3783 and it is the right field, replace it with R. */
3784 for (inner = TREE_OPERAND (exp, 0);
3785 REFERENCE_CLASS_P (inner);
3786 inner = TREE_OPERAND (inner, 0))
3790 op1 = TREE_OPERAND (exp, 1);
3792 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
3795 /* If this expression hasn't been completed let, leave it alone. */
3796 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
3799 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3800 if (op0 == TREE_OPERAND (exp, 0))
3804 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
3807 switch (TREE_CODE_CLASS (code))
3812 case tcc_declaration:
3818 case tcc_expression:
3824 case tcc_exceptional:
3827 case tcc_comparison:
3829 switch (TREE_CODE_LENGTH (code))
3835 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3836 if (op0 == TREE_OPERAND (exp, 0))
3839 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3843 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3844 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3846 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3849 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3853 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3854 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3855 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3857 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3858 && op2 == TREE_OPERAND (exp, 2))
3861 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3865 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3866 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3867 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3868 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
3870 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3871 && op2 == TREE_OPERAND (exp, 2)
3872 && op3 == TREE_OPERAND (exp, 3))
3876 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3888 new_tree = NULL_TREE;
3890 /* If we are trying to replace F with a constant, inline back
3891 functions which do nothing else than computing a value from
3892 the arguments they are passed. This makes it possible to
3893 fold partially or entirely the replacement expression. */
3894 if (CONSTANT_CLASS_P (r) && code == CALL_EXPR)
3896 tree t = maybe_inline_call_in_expr (exp);
3898 return SUBSTITUTE_IN_EXPR (t, f, r);
3901 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3903 tree op = TREE_OPERAND (exp, i);
3904 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
3908 new_tree = copy_node (exp);
3909 TREE_OPERAND (new_tree, i) = new_op;
3915 new_tree = fold (new_tree);
3916 if (TREE_CODE (new_tree) == CALL_EXPR)
3917 process_call_operands (new_tree);
3928 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3930 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3931 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3936 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
3937 for it within OBJ, a tree that is an object or a chain of references. */
3940 substitute_placeholder_in_expr (tree exp, tree obj)
3942 enum tree_code code = TREE_CODE (exp);
3943 tree op0, op1, op2, op3;
3946 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
3947 in the chain of OBJ. */
3948 if (code == PLACEHOLDER_EXPR)
3950 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
3953 for (elt = obj; elt != 0;
3954 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3955 || TREE_CODE (elt) == COND_EXPR)
3956 ? TREE_OPERAND (elt, 1)
3957 : (REFERENCE_CLASS_P (elt)
3958 || UNARY_CLASS_P (elt)
3959 || BINARY_CLASS_P (elt)
3960 || VL_EXP_CLASS_P (elt)
3961 || EXPRESSION_CLASS_P (elt))
3962 ? TREE_OPERAND (elt, 0) : 0))
3963 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
3966 for (elt = obj; elt != 0;
3967 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3968 || TREE_CODE (elt) == COND_EXPR)
3969 ? TREE_OPERAND (elt, 1)
3970 : (REFERENCE_CLASS_P (elt)
3971 || UNARY_CLASS_P (elt)
3972 || BINARY_CLASS_P (elt)
3973 || VL_EXP_CLASS_P (elt)
3974 || EXPRESSION_CLASS_P (elt))
3975 ? TREE_OPERAND (elt, 0) : 0))
3976 if (POINTER_TYPE_P (TREE_TYPE (elt))
3977 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
3979 return fold_build1 (INDIRECT_REF, need_type, elt);
3981 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
3982 survives until RTL generation, there will be an error. */
3986 /* TREE_LIST is special because we need to look at TREE_VALUE
3987 and TREE_CHAIN, not TREE_OPERANDS. */
3988 else if (code == TREE_LIST)
3990 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
3991 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
3992 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3995 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3998 switch (TREE_CODE_CLASS (code))
4001 case tcc_declaration:
4004 case tcc_exceptional:
4007 case tcc_comparison:
4008 case tcc_expression:
4011 switch (TREE_CODE_LENGTH (code))
4017 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4018 if (op0 == TREE_OPERAND (exp, 0))
4021 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
4025 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4026 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4028 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4031 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
4035 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4036 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4037 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4039 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4040 && op2 == TREE_OPERAND (exp, 2))
4043 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
4047 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
4048 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
4049 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
4050 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
4052 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
4053 && op2 == TREE_OPERAND (exp, 2)
4054 && op3 == TREE_OPERAND (exp, 3))
4058 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
4070 new_tree = NULL_TREE;
4072 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
4074 tree op = TREE_OPERAND (exp, i);
4075 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
4079 new_tree = copy_node (exp);
4080 TREE_OPERAND (new_tree, i) = new_op;
4086 new_tree = fold (new_tree);
4087 if (TREE_CODE (new_tree) == CALL_EXPR)
4088 process_call_operands (new_tree);
4099 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4101 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4102 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4108 /* Subroutine of stabilize_reference; this is called for subtrees of
4109 references. Any expression with side-effects must be put in a SAVE_EXPR
4110 to ensure that it is only evaluated once.
4112 We don't put SAVE_EXPR nodes around everything, because assigning very
4113 simple expressions to temporaries causes us to miss good opportunities
4114 for optimizations. Among other things, the opportunity to fold in the
4115 addition of a constant into an addressing mode often gets lost, e.g.
4116 "y[i+1] += x;". In general, we take the approach that we should not make
4117 an assignment unless we are forced into it - i.e., that any non-side effect
4118 operator should be allowed, and that cse should take care of coalescing
4119 multiple utterances of the same expression should that prove fruitful. */
4122 stabilize_reference_1 (tree e)
4125 enum tree_code code = TREE_CODE (e);
4127 /* We cannot ignore const expressions because it might be a reference
4128 to a const array but whose index contains side-effects. But we can
4129 ignore things that are actual constant or that already have been
4130 handled by this function. */
4132 if (tree_invariant_p (e))
4135 switch (TREE_CODE_CLASS (code))
4137 case tcc_exceptional:
4139 case tcc_declaration:
4140 case tcc_comparison:
4142 case tcc_expression:
4145 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4146 so that it will only be evaluated once. */
4147 /* The reference (r) and comparison (<) classes could be handled as
4148 below, but it is generally faster to only evaluate them once. */
4149 if (TREE_SIDE_EFFECTS (e))
4150 return save_expr (e);
4154 /* Constants need no processing. In fact, we should never reach
4159 /* Division is slow and tends to be compiled with jumps,
4160 especially the division by powers of 2 that is often
4161 found inside of an array reference. So do it just once. */
4162 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4163 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4164 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4165 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4166 return save_expr (e);
4167 /* Recursively stabilize each operand. */
4168 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4169 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4173 /* Recursively stabilize each operand. */
4174 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4181 TREE_TYPE (result) = TREE_TYPE (e);
4182 TREE_READONLY (result) = TREE_READONLY (e);
4183 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4184 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4189 /* Stabilize a reference so that we can use it any number of times
4190 without causing its operands to be evaluated more than once.
4191 Returns the stabilized reference. This works by means of save_expr,
4192 so see the caveats in the comments about save_expr.
4194 Also allows conversion expressions whose operands are references.
4195 Any other kind of expression is returned unchanged. */
4198 stabilize_reference (tree ref)
4201 enum tree_code code = TREE_CODE (ref);
4208 /* No action is needed in this case. */
4213 case FIX_TRUNC_EXPR:
4214 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4218 result = build_nt (INDIRECT_REF,
4219 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4223 result = build_nt (COMPONENT_REF,
4224 stabilize_reference (TREE_OPERAND (ref, 0)),
4225 TREE_OPERAND (ref, 1), NULL_TREE);
4229 result = build_nt (BIT_FIELD_REF,
4230 stabilize_reference (TREE_OPERAND (ref, 0)),
4231 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4232 REF_REVERSE_STORAGE_ORDER (result) = REF_REVERSE_STORAGE_ORDER (ref);
4236 result = build_nt (ARRAY_REF,
4237 stabilize_reference (TREE_OPERAND (ref, 0)),
4238 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4239 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4242 case ARRAY_RANGE_REF:
4243 result = build_nt (ARRAY_RANGE_REF,
4244 stabilize_reference (TREE_OPERAND (ref, 0)),
4245 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4246 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4250 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4251 it wouldn't be ignored. This matters when dealing with
4253 return stabilize_reference_1 (ref);
4255 /* If arg isn't a kind of lvalue we recognize, make no change.
4256 Caller should recognize the error for an invalid lvalue. */
4261 return error_mark_node;
4264 TREE_TYPE (result) = TREE_TYPE (ref);
4265 TREE_READONLY (result) = TREE_READONLY (ref);
4266 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4267 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4272 /* Low-level constructors for expressions. */
4274 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4275 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4278 recompute_tree_invariant_for_addr_expr (tree t)
4281 bool tc = true, se = false;
4283 gcc_assert (TREE_CODE (t) == ADDR_EXPR);
4285 /* We started out assuming this address is both invariant and constant, but
4286 does not have side effects. Now go down any handled components and see if
4287 any of them involve offsets that are either non-constant or non-invariant.
4288 Also check for side-effects.
4290 ??? Note that this code makes no attempt to deal with the case where
4291 taking the address of something causes a copy due to misalignment. */
4293 #define UPDATE_FLAGS(NODE) \
4294 do { tree _node = (NODE); \
4295 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4296 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4298 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4299 node = TREE_OPERAND (node, 0))
4301 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4302 array reference (probably made temporarily by the G++ front end),
4303 so ignore all the operands. */
4304 if ((TREE_CODE (node) == ARRAY_REF
4305 || TREE_CODE (node) == ARRAY_RANGE_REF)
4306 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4308 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4309 if (TREE_OPERAND (node, 2))
4310 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4311 if (TREE_OPERAND (node, 3))
4312 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4314 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4315 FIELD_DECL, apparently. The G++ front end can put something else
4316 there, at least temporarily. */
4317 else if (TREE_CODE (node) == COMPONENT_REF
4318 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4320 if (TREE_OPERAND (node, 2))
4321 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4325 node = lang_hooks.expr_to_decl (node, &tc, &se);
4327 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4328 the address, since &(*a)->b is a form of addition. If it's a constant, the
4329 address is constant too. If it's a decl, its address is constant if the
4330 decl is static. Everything else is not constant and, furthermore,
4331 taking the address of a volatile variable is not volatile. */
4332 if (TREE_CODE (node) == INDIRECT_REF
4333 || TREE_CODE (node) == MEM_REF)
4334 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4335 else if (CONSTANT_CLASS_P (node))
4337 else if (DECL_P (node))
4338 tc &= (staticp (node) != NULL_TREE);
4342 se |= TREE_SIDE_EFFECTS (node);
4346 TREE_CONSTANT (t) = tc;
4347 TREE_SIDE_EFFECTS (t) = se;
4351 /* Build an expression of code CODE, data type TYPE, and operands as
4352 specified. Expressions and reference nodes can be created this way.
4353 Constants, decls, types and misc nodes cannot be.
4355 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4356 enough for all extant tree codes. */
4359 build0_stat (enum tree_code code, tree tt MEM_STAT_DECL)
4363 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4365 t = make_node_stat (code PASS_MEM_STAT);
4372 build1_stat (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4374 int length = sizeof (struct tree_exp);
4377 record_node_allocation_statistics (code, length);
4379 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4381 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4383 memset (t, 0, sizeof (struct tree_common));
4385 TREE_SET_CODE (t, code);
4387 TREE_TYPE (t) = type;
4388 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4389 TREE_OPERAND (t, 0) = node;
4390 if (node && !TYPE_P (node))
4392 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4393 TREE_READONLY (t) = TREE_READONLY (node);
4396 if (TREE_CODE_CLASS (code) == tcc_statement)
4397 TREE_SIDE_EFFECTS (t) = 1;
4401 /* All of these have side-effects, no matter what their
4403 TREE_SIDE_EFFECTS (t) = 1;
4404 TREE_READONLY (t) = 0;
4408 /* Whether a dereference is readonly has nothing to do with whether
4409 its operand is readonly. */
4410 TREE_READONLY (t) = 0;
4415 recompute_tree_invariant_for_addr_expr (t);
4419 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4420 && node && !TYPE_P (node)
4421 && TREE_CONSTANT (node))
4422 TREE_CONSTANT (t) = 1;
4423 if (TREE_CODE_CLASS (code) == tcc_reference
4424 && node && TREE_THIS_VOLATILE (node))
4425 TREE_THIS_VOLATILE (t) = 1;
4432 #define PROCESS_ARG(N) \
4434 TREE_OPERAND (t, N) = arg##N; \
4435 if (arg##N &&!TYPE_P (arg##N)) \
4437 if (TREE_SIDE_EFFECTS (arg##N)) \
4439 if (!TREE_READONLY (arg##N) \
4440 && !CONSTANT_CLASS_P (arg##N)) \
4441 (void) (read_only = 0); \
4442 if (!TREE_CONSTANT (arg##N)) \
4443 (void) (constant = 0); \
4448 build2_stat (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4450 bool constant, read_only, side_effects;
4453 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4455 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4456 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4457 /* When sizetype precision doesn't match that of pointers
4458 we need to be able to build explicit extensions or truncations
4459 of the offset argument. */
4460 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4461 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4462 && TREE_CODE (arg1) == INTEGER_CST);
4464 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4465 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4466 && ptrofftype_p (TREE_TYPE (arg1)));
4468 t = make_node_stat (code PASS_MEM_STAT);
4471 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4472 result based on those same flags for the arguments. But if the
4473 arguments aren't really even `tree' expressions, we shouldn't be trying
4476 /* Expressions without side effects may be constant if their
4477 arguments are as well. */
4478 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4479 || TREE_CODE_CLASS (code) == tcc_binary);
4481 side_effects = TREE_SIDE_EFFECTS (t);
4486 TREE_SIDE_EFFECTS (t) = side_effects;
4487 if (code == MEM_REF)
4489 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4491 tree o = TREE_OPERAND (arg0, 0);
4492 TREE_READONLY (t) = TREE_READONLY (o);
4493 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4498 TREE_READONLY (t) = read_only;
4499 TREE_CONSTANT (t) = constant;
4500 TREE_THIS_VOLATILE (t)
4501 = (TREE_CODE_CLASS (code) == tcc_reference
4502 && arg0 && TREE_THIS_VOLATILE (arg0));
4510 build3_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4511 tree arg2 MEM_STAT_DECL)
4513 bool constant, read_only, side_effects;
4516 gcc_assert (TREE_CODE_LENGTH (code) == 3);
4517 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4519 t = make_node_stat (code PASS_MEM_STAT);
4524 /* As a special exception, if COND_EXPR has NULL branches, we
4525 assume that it is a gimple statement and always consider
4526 it to have side effects. */
4527 if (code == COND_EXPR
4528 && tt == void_type_node
4529 && arg1 == NULL_TREE
4530 && arg2 == NULL_TREE)
4531 side_effects = true;
4533 side_effects = TREE_SIDE_EFFECTS (t);
4539 if (code == COND_EXPR)
4540 TREE_READONLY (t) = read_only;
4542 TREE_SIDE_EFFECTS (t) = side_effects;
4543 TREE_THIS_VOLATILE (t)
4544 = (TREE_CODE_CLASS (code) == tcc_reference
4545 && arg0 && TREE_THIS_VOLATILE (arg0));
4551 build4_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4552 tree arg2, tree arg3 MEM_STAT_DECL)
4554 bool constant, read_only, side_effects;
4557 gcc_assert (TREE_CODE_LENGTH (code) == 4);
4559 t = make_node_stat (code PASS_MEM_STAT);
4562 side_effects = TREE_SIDE_EFFECTS (t);
4569 TREE_SIDE_EFFECTS (t) = side_effects;
4570 TREE_THIS_VOLATILE (t)
4571 = (TREE_CODE_CLASS (code) == tcc_reference
4572 && arg0 && TREE_THIS_VOLATILE (arg0));
4578 build5_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4579 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4581 bool constant, read_only, side_effects;
4584 gcc_assert (TREE_CODE_LENGTH (code) == 5);
4586 t = make_node_stat (code PASS_MEM_STAT);
4589 side_effects = TREE_SIDE_EFFECTS (t);
4597 TREE_SIDE_EFFECTS (t) = side_effects;
4598 if (code == TARGET_MEM_REF)
4600 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4602 tree o = TREE_OPERAND (arg0, 0);
4603 TREE_READONLY (t) = TREE_READONLY (o);
4604 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4608 TREE_THIS_VOLATILE (t)
4609 = (TREE_CODE_CLASS (code) == tcc_reference
4610 && arg0 && TREE_THIS_VOLATILE (arg0));
4615 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4616 on the pointer PTR. */
4619 build_simple_mem_ref_loc (location_t loc, tree ptr)
4621 HOST_WIDE_INT offset = 0;
4622 tree ptype = TREE_TYPE (ptr);
4624 /* For convenience allow addresses that collapse to a simple base
4626 if (TREE_CODE (ptr) == ADDR_EXPR
4627 && (handled_component_p (TREE_OPERAND (ptr, 0))
4628 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4630 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4632 ptr = build_fold_addr_expr (ptr);
4633 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4635 tem = build2 (MEM_REF, TREE_TYPE (ptype),
4636 ptr, build_int_cst (ptype, offset));
4637 SET_EXPR_LOCATION (tem, loc);
4641 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4644 mem_ref_offset (const_tree t)
4646 return offset_int::from (TREE_OPERAND (t, 1), SIGNED);
4649 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4650 offsetted by OFFSET units. */
4653 build_invariant_address (tree type, tree base, HOST_WIDE_INT offset)
4655 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
4656 build_fold_addr_expr (base),
4657 build_int_cst (ptr_type_node, offset));
4658 tree addr = build1 (ADDR_EXPR, type, ref);
4659 recompute_tree_invariant_for_addr_expr (addr);
4663 /* Similar except don't specify the TREE_TYPE
4664 and leave the TREE_SIDE_EFFECTS as 0.
4665 It is permissible for arguments to be null,
4666 or even garbage if their values do not matter. */
4669 build_nt (enum tree_code code, ...)
4676 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4680 t = make_node (code);
4681 length = TREE_CODE_LENGTH (code);
4683 for (i = 0; i < length; i++)
4684 TREE_OPERAND (t, i) = va_arg (p, tree);
4690 /* Similar to build_nt, but for creating a CALL_EXPR object with a
4694 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
4699 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
4700 CALL_EXPR_FN (ret) = fn;
4701 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
4702 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
4703 CALL_EXPR_ARG (ret, ix) = t;
4707 /* Create a DECL_... node of code CODE, name NAME and data type TYPE.
4708 We do NOT enter this node in any sort of symbol table.
4710 LOC is the location of the decl.
4712 layout_decl is used to set up the decl's storage layout.
4713 Other slots are initialized to 0 or null pointers. */
4716 build_decl_stat (location_t loc, enum tree_code code, tree name,
4717 tree type MEM_STAT_DECL)
4721 t = make_node_stat (code PASS_MEM_STAT);
4722 DECL_SOURCE_LOCATION (t) = loc;
4724 /* if (type == error_mark_node)
4725 type = integer_type_node; */
4726 /* That is not done, deliberately, so that having error_mark_node
4727 as the type can suppress useless errors in the use of this variable. */
4729 DECL_NAME (t) = name;
4730 TREE_TYPE (t) = type;
4732 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
4738 /* Builds and returns function declaration with NAME and TYPE. */
4741 build_fn_decl (const char *name, tree type)
4743 tree id = get_identifier (name);
4744 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
4746 DECL_EXTERNAL (decl) = 1;
4747 TREE_PUBLIC (decl) = 1;
4748 DECL_ARTIFICIAL (decl) = 1;
4749 TREE_NOTHROW (decl) = 1;
4754 vec<tree, va_gc> *all_translation_units;
4756 /* Builds a new translation-unit decl with name NAME, queues it in the
4757 global list of translation-unit decls and returns it. */
4760 build_translation_unit_decl (tree name)
4762 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
4764 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
4765 vec_safe_push (all_translation_units, tu);
4770 /* BLOCK nodes are used to represent the structure of binding contours
4771 and declarations, once those contours have been exited and their contents
4772 compiled. This information is used for outputting debugging info. */
4775 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
4777 tree block = make_node (BLOCK);
4779 BLOCK_VARS (block) = vars;
4780 BLOCK_SUBBLOCKS (block) = subblocks;
4781 BLOCK_SUPERCONTEXT (block) = supercontext;
4782 BLOCK_CHAIN (block) = chain;
4787 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
4789 LOC is the location to use in tree T. */
4792 protected_set_expr_location (tree t, location_t loc)
4794 if (CAN_HAVE_LOCATION_P (t))
4795 SET_EXPR_LOCATION (t, loc);
4798 /* Return a declaration like DDECL except that its DECL_ATTRIBUTES
4802 build_decl_attribute_variant (tree ddecl, tree attribute)
4804 DECL_ATTRIBUTES (ddecl) = attribute;
4808 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4809 is ATTRIBUTE and its qualifiers are QUALS.
4811 Record such modified types already made so we don't make duplicates. */
4814 build_type_attribute_qual_variant (tree ttype, tree attribute, int quals)
4816 if (! attribute_list_equal (TYPE_ATTRIBUTES (ttype), attribute))
4818 inchash::hash hstate;
4822 enum tree_code code = TREE_CODE (ttype);
4824 /* Building a distinct copy of a tagged type is inappropriate; it
4825 causes breakage in code that expects there to be a one-to-one
4826 relationship between a struct and its fields.
4827 build_duplicate_type is another solution (as used in
4828 handle_transparent_union_attribute), but that doesn't play well
4829 with the stronger C++ type identity model. */
4830 if (TREE_CODE (ttype) == RECORD_TYPE
4831 || TREE_CODE (ttype) == UNION_TYPE
4832 || TREE_CODE (ttype) == QUAL_UNION_TYPE
4833 || TREE_CODE (ttype) == ENUMERAL_TYPE)
4835 warning (OPT_Wattributes,
4836 "ignoring attributes applied to %qT after definition",
4837 TYPE_MAIN_VARIANT (ttype));
4838 return build_qualified_type (ttype, quals);
4841 ttype = build_qualified_type (ttype, TYPE_UNQUALIFIED);
4842 ntype = build_distinct_type_copy (ttype);
4844 TYPE_ATTRIBUTES (ntype) = attribute;
4846 hstate.add_int (code);
4847 if (TREE_TYPE (ntype))
4848 hstate.add_object (TYPE_HASH (TREE_TYPE (ntype)));
4849 attribute_hash_list (attribute, hstate);
4851 switch (TREE_CODE (ntype))
4854 type_hash_list (TYPE_ARG_TYPES (ntype), hstate);
4857 if (TYPE_DOMAIN (ntype))
4858 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (ntype)));
4861 t = TYPE_MAX_VALUE (ntype);
4862 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
4863 hstate.add_object (TREE_INT_CST_ELT (t, i));
4866 case FIXED_POINT_TYPE:
4868 unsigned int precision = TYPE_PRECISION (ntype);
4869 hstate.add_object (precision);
4876 ntype = type_hash_canon (hstate.end(), ntype);
4878 /* If the target-dependent attributes make NTYPE different from
4879 its canonical type, we will need to use structural equality
4880 checks for this type. */
4881 if (TYPE_STRUCTURAL_EQUALITY_P (ttype)
4882 || !comp_type_attributes (ntype, ttype))
4883 SET_TYPE_STRUCTURAL_EQUALITY (ntype);
4884 else if (TYPE_CANONICAL (ntype) == ntype)
4885 TYPE_CANONICAL (ntype) = TYPE_CANONICAL (ttype);
4887 ttype = build_qualified_type (ntype, quals);
4889 else if (TYPE_QUALS (ttype) != quals)
4890 ttype = build_qualified_type (ttype, quals);
4895 /* Check if "omp declare simd" attribute arguments, CLAUSES1 and CLAUSES2, are
4899 omp_declare_simd_clauses_equal (tree clauses1, tree clauses2)
4902 for (cl1 = clauses1, cl2 = clauses2;
4904 cl1 = OMP_CLAUSE_CHAIN (cl1), cl2 = OMP_CLAUSE_CHAIN (cl2))
4906 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_CODE (cl2))
4908 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_SIMDLEN)
4910 if (simple_cst_equal (OMP_CLAUSE_DECL (cl1),
4911 OMP_CLAUSE_DECL (cl2)) != 1)
4914 switch (OMP_CLAUSE_CODE (cl1))
4916 case OMP_CLAUSE_ALIGNED:
4917 if (simple_cst_equal (OMP_CLAUSE_ALIGNED_ALIGNMENT (cl1),
4918 OMP_CLAUSE_ALIGNED_ALIGNMENT (cl2)) != 1)
4921 case OMP_CLAUSE_LINEAR:
4922 if (simple_cst_equal (OMP_CLAUSE_LINEAR_STEP (cl1),
4923 OMP_CLAUSE_LINEAR_STEP (cl2)) != 1)
4926 case OMP_CLAUSE_SIMDLEN:
4927 if (simple_cst_equal (OMP_CLAUSE_SIMDLEN_EXPR (cl1),
4928 OMP_CLAUSE_SIMDLEN_EXPR (cl2)) != 1)
4937 /* Compare two constructor-element-type constants. Return 1 if the lists
4938 are known to be equal; otherwise return 0. */
4941 simple_cst_list_equal (const_tree l1, const_tree l2)
4943 while (l1 != NULL_TREE && l2 != NULL_TREE)
4945 if (simple_cst_equal (TREE_VALUE (l1), TREE_VALUE (l2)) != 1)
4948 l1 = TREE_CHAIN (l1);
4949 l2 = TREE_CHAIN (l2);
4955 /* Compare two identifier nodes representing attributes. Either one may
4956 be in wrapped __ATTR__ form. Return true if they are the same, false
4960 cmp_attrib_identifiers (const_tree attr1, const_tree attr2)
4962 /* Make sure we're dealing with IDENTIFIER_NODEs. */
4963 gcc_checking_assert (TREE_CODE (attr1) == IDENTIFIER_NODE
4964 && TREE_CODE (attr2) == IDENTIFIER_NODE);
4966 /* Identifiers can be compared directly for equality. */
4970 /* If they are not equal, they may still be one in the form
4971 'text' while the other one is in the form '__text__'. TODO:
4972 If we were storing attributes in normalized 'text' form, then
4973 this could all go away and we could take full advantage of
4974 the fact that we're comparing identifiers. :-) */
4975 const size_t attr1_len = IDENTIFIER_LENGTH (attr1);
4976 const size_t attr2_len = IDENTIFIER_LENGTH (attr2);
4978 if (attr2_len == attr1_len + 4)
4980 const char *p = IDENTIFIER_POINTER (attr2);
4981 const char *q = IDENTIFIER_POINTER (attr1);
4982 if (p[0] == '_' && p[1] == '_'
4983 && p[attr2_len - 2] == '_' && p[attr2_len - 1] == '_'
4984 && strncmp (q, p + 2, attr1_len) == 0)
4987 else if (attr2_len + 4 == attr1_len)
4989 const char *p = IDENTIFIER_POINTER (attr2);
4990 const char *q = IDENTIFIER_POINTER (attr1);
4991 if (q[0] == '_' && q[1] == '_'
4992 && q[attr1_len - 2] == '_' && q[attr1_len - 1] == '_'
4993 && strncmp (q + 2, p, attr2_len) == 0)
5000 /* Compare two attributes for their value identity. Return true if the
5001 attribute values are known to be equal; otherwise return false. */
5004 attribute_value_equal (const_tree attr1, const_tree attr2)
5006 if (TREE_VALUE (attr1) == TREE_VALUE (attr2))
5009 if (TREE_VALUE (attr1) != NULL_TREE
5010 && TREE_CODE (TREE_VALUE (attr1)) == TREE_LIST
5011 && TREE_VALUE (attr2) != NULL_TREE
5012 && TREE_CODE (TREE_VALUE (attr2)) == TREE_LIST)
5014 /* Handle attribute format. */
5015 if (is_attribute_p ("format", get_attribute_name (attr1)))
5017 attr1 = TREE_VALUE (attr1);
5018 attr2 = TREE_VALUE (attr2);
5019 /* Compare the archetypes (printf/scanf/strftime/...). */
5020 if (!cmp_attrib_identifiers (TREE_VALUE (attr1),
5021 TREE_VALUE (attr2)))
5023 /* Archetypes are the same. Compare the rest. */
5024 return (simple_cst_list_equal (TREE_CHAIN (attr1),
5025 TREE_CHAIN (attr2)) == 1);
5027 return (simple_cst_list_equal (TREE_VALUE (attr1),
5028 TREE_VALUE (attr2)) == 1);
5031 if ((flag_openmp || flag_openmp_simd)
5032 && TREE_VALUE (attr1) && TREE_VALUE (attr2)
5033 && TREE_CODE (TREE_VALUE (attr1)) == OMP_CLAUSE
5034 && TREE_CODE (TREE_VALUE (attr2)) == OMP_CLAUSE)
5035 return omp_declare_simd_clauses_equal (TREE_VALUE (attr1),
5036 TREE_VALUE (attr2));
5038 return (simple_cst_equal (TREE_VALUE (attr1), TREE_VALUE (attr2)) == 1);
5041 /* Return 0 if the attributes for two types are incompatible, 1 if they
5042 are compatible, and 2 if they are nearly compatible (which causes a
5043 warning to be generated). */
5045 comp_type_attributes (const_tree type1, const_tree type2)
5047 const_tree a1 = TYPE_ATTRIBUTES (type1);
5048 const_tree a2 = TYPE_ATTRIBUTES (type2);
5053 for (a = a1; a != NULL_TREE; a = TREE_CHAIN (a))
5055 const struct attribute_spec *as;
5058 as = lookup_attribute_spec (get_attribute_name (a));
5059 if (!as || as->affects_type_identity == false)
5062 attr = lookup_attribute (as->name, CONST_CAST_TREE (a2));
5063 if (!attr || !attribute_value_equal (a, attr))
5068 for (a = a2; a != NULL_TREE; a = TREE_CHAIN (a))
5070 const struct attribute_spec *as;
5072 as = lookup_attribute_spec (get_attribute_name (a));
5073 if (!as || as->affects_type_identity == false)
5076 if (!lookup_attribute (as->name, CONST_CAST_TREE (a1)))
5078 /* We don't need to compare trees again, as we did this
5079 already in first loop. */
5081 /* All types - affecting identity - are equal, so
5082 there is no need to call target hook for comparison. */
5086 if (lookup_attribute ("transaction_safe", CONST_CAST_TREE (a)))
5088 /* As some type combinations - like default calling-convention - might
5089 be compatible, we have to call the target hook to get the final result. */
5090 return targetm.comp_type_attributes (type1, type2);
5093 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
5096 Record such modified types already made so we don't make duplicates. */
5099 build_type_attribute_variant (tree ttype, tree attribute)
5101 return build_type_attribute_qual_variant (ttype, attribute,
5102 TYPE_QUALS (ttype));
5106 /* Reset the expression *EXPR_P, a size or position.
5108 ??? We could reset all non-constant sizes or positions. But it's cheap
5109 enough to not do so and refrain from adding workarounds to dwarf2out.c.
5111 We need to reset self-referential sizes or positions because they cannot
5112 be gimplified and thus can contain a CALL_EXPR after the gimplification
5113 is finished, which will run afoul of LTO streaming. And they need to be
5114 reset to something essentially dummy but not constant, so as to preserve
5115 the properties of the object they are attached to. */
5118 free_lang_data_in_one_sizepos (tree *expr_p)
5120 tree expr = *expr_p;
5121 if (CONTAINS_PLACEHOLDER_P (expr))
5122 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
5126 /* Reset all the fields in a binfo node BINFO. We only keep
5127 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
5130 free_lang_data_in_binfo (tree binfo)
5135 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
5137 BINFO_VIRTUALS (binfo) = NULL_TREE;
5138 BINFO_BASE_ACCESSES (binfo) = NULL;
5139 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
5140 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
5142 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
5143 free_lang_data_in_binfo (t);
5147 /* Reset all language specific information still present in TYPE. */
5150 free_lang_data_in_type (tree type)
5152 gcc_assert (TYPE_P (type));
5154 /* Give the FE a chance to remove its own data first. */
5155 lang_hooks.free_lang_data (type);
5157 TREE_LANG_FLAG_0 (type) = 0;
5158 TREE_LANG_FLAG_1 (type) = 0;
5159 TREE_LANG_FLAG_2 (type) = 0;
5160 TREE_LANG_FLAG_3 (type) = 0;
5161 TREE_LANG_FLAG_4 (type) = 0;
5162 TREE_LANG_FLAG_5 (type) = 0;
5163 TREE_LANG_FLAG_6 (type) = 0;
5165 if (TREE_CODE (type) == FUNCTION_TYPE)
5167 /* Remove the const and volatile qualifiers from arguments. The
5168 C++ front end removes them, but the C front end does not,
5169 leading to false ODR violation errors when merging two
5170 instances of the same function signature compiled by
5171 different front ends. */
5174 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5176 tree arg_type = TREE_VALUE (p);
5178 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
5180 int quals = TYPE_QUALS (arg_type)
5182 & ~TYPE_QUAL_VOLATILE;
5183 TREE_VALUE (p) = build_qualified_type (arg_type, quals);
5184 free_lang_data_in_type (TREE_VALUE (p));
5186 /* C++ FE uses TREE_PURPOSE to store initial values. */
5187 TREE_PURPOSE (p) = NULL;
5189 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */
5190 TYPE_MINVAL (type) = NULL;
5192 if (TREE_CODE (type) == METHOD_TYPE)
5196 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5198 /* C++ FE uses TREE_PURPOSE to store initial values. */
5199 TREE_PURPOSE (p) = NULL;
5201 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */
5202 TYPE_MINVAL (type) = NULL;
5205 /* Remove members that are not actually FIELD_DECLs from the field
5206 list of an aggregate. These occur in C++. */
5207 if (RECORD_OR_UNION_TYPE_P (type))
5211 /* Note that TYPE_FIELDS can be shared across distinct
5212 TREE_TYPEs. Therefore, if the first field of TYPE_FIELDS is
5213 to be removed, we cannot set its TREE_CHAIN to NULL.
5214 Otherwise, we would not be able to find all the other fields
5215 in the other instances of this TREE_TYPE.
5217 This was causing an ICE in testsuite/g++.dg/lto/20080915.C. */
5219 member = TYPE_FIELDS (type);
5222 if (TREE_CODE (member) == FIELD_DECL
5223 || (TREE_CODE (member) == TYPE_DECL
5224 && !DECL_IGNORED_P (member)
5225 && debug_info_level > DINFO_LEVEL_TERSE
5226 && !is_redundant_typedef (member)))
5229 TREE_CHAIN (prev) = member;
5231 TYPE_FIELDS (type) = member;
5235 member = TREE_CHAIN (member);
5239 TREE_CHAIN (prev) = NULL_TREE;
5241 TYPE_FIELDS (type) = NULL_TREE;
5243 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
5244 and danagle the pointer from time to time. */
5245 if (TYPE_VFIELD (type) && TREE_CODE (TYPE_VFIELD (type)) != FIELD_DECL)
5246 TYPE_VFIELD (type) = NULL_TREE;
5248 /* Remove TYPE_METHODS list. While it would be nice to keep it
5249 to enable ODR warnings about different method lists, doing so
5250 seems to impractically increase size of LTO data streamed.
5251 Keep the information if TYPE_METHODS was non-NULL. This is used
5252 by function.c and pretty printers. */
5253 if (TYPE_METHODS (type))
5254 TYPE_METHODS (type) = error_mark_node;
5255 if (TYPE_BINFO (type))
5257 free_lang_data_in_binfo (TYPE_BINFO (type));
5258 /* We need to preserve link to bases and virtual table for all
5259 polymorphic types to make devirtualization machinery working.
5260 Debug output cares only about bases, but output also
5261 virtual table pointers so merging of -fdevirtualize and
5262 -fno-devirtualize units is easier. */
5263 if ((!BINFO_VTABLE (TYPE_BINFO (type))
5264 || !flag_devirtualize)
5265 && ((!BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
5266 && !BINFO_VTABLE (TYPE_BINFO (type)))
5267 || debug_info_level != DINFO_LEVEL_NONE))
5268 TYPE_BINFO (type) = NULL;
5273 /* For non-aggregate types, clear out the language slot (which
5274 overloads TYPE_BINFO). */
5275 TYPE_LANG_SLOT_1 (type) = NULL_TREE;
5277 if (INTEGRAL_TYPE_P (type)
5278 || SCALAR_FLOAT_TYPE_P (type)
5279 || FIXED_POINT_TYPE_P (type))
5281 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
5282 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
5286 free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
5287 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
5289 if (TYPE_CONTEXT (type)
5290 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
5292 tree ctx = TYPE_CONTEXT (type);
5295 ctx = BLOCK_SUPERCONTEXT (ctx);
5297 while (ctx && TREE_CODE (ctx) == BLOCK);
5298 TYPE_CONTEXT (type) = ctx;
5303 /* Return true if DECL may need an assembler name to be set. */
5306 need_assembler_name_p (tree decl)
5308 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition
5309 Rule merging. This makes type_odr_p to return true on those types during
5310 LTO and by comparing the mangled name, we can say what types are intended
5311 to be equivalent across compilation unit.
5313 We do not store names of type_in_anonymous_namespace_p.
5315 Record, union and enumeration type have linkage that allows use
5316 to check type_in_anonymous_namespace_p. We do not mangle compound types
5317 that always can be compared structurally.
5319 Similarly for builtin types, we compare properties of their main variant.
5320 A special case are integer types where mangling do make differences
5321 between char/signed char/unsigned char etc. Storing name for these makes
5322 e.g. -fno-signed-char/-fsigned-char mismatches to be handled well.
5323 See cp/mangle.c:write_builtin_type for details. */
5325 if (flag_lto_odr_type_mering
5326 && TREE_CODE (decl) == TYPE_DECL
5328 && decl == TYPE_NAME (TREE_TYPE (decl))
5329 && TYPE_MAIN_VARIANT (TREE_TYPE (decl)) == TREE_TYPE (decl)
5330 && !TYPE_ARTIFICIAL (TREE_TYPE (decl))
5331 && (type_with_linkage_p (TREE_TYPE (decl))
5332 || TREE_CODE (TREE_TYPE (decl)) == INTEGER_TYPE)
5333 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE))
5334 return !DECL_ASSEMBLER_NAME_SET_P (decl);
5335 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
5336 if (TREE_CODE (decl) != FUNCTION_DECL
5337 && TREE_CODE (decl) != VAR_DECL)
5340 /* If DECL already has its assembler name set, it does not need a
5342 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
5343 || DECL_ASSEMBLER_NAME_SET_P (decl))
5346 /* Abstract decls do not need an assembler name. */
5347 if (DECL_ABSTRACT_P (decl))
5350 /* For VAR_DECLs, only static, public and external symbols need an
5352 if (TREE_CODE (decl) == VAR_DECL
5353 && !TREE_STATIC (decl)
5354 && !TREE_PUBLIC (decl)
5355 && !DECL_EXTERNAL (decl))
5358 if (TREE_CODE (decl) == FUNCTION_DECL)
5360 /* Do not set assembler name on builtins. Allow RTL expansion to
5361 decide whether to expand inline or via a regular call. */
5362 if (DECL_BUILT_IN (decl)
5363 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
5366 /* Functions represented in the callgraph need an assembler name. */
5367 if (cgraph_node::get (decl) != NULL)
5370 /* Unused and not public functions don't need an assembler name. */
5371 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
5379 /* Reset all language specific information still present in symbol
5383 free_lang_data_in_decl (tree decl)
5385 gcc_assert (DECL_P (decl));
5387 /* Give the FE a chance to remove its own data first. */
5388 lang_hooks.free_lang_data (decl);
5390 TREE_LANG_FLAG_0 (decl) = 0;
5391 TREE_LANG_FLAG_1 (decl) = 0;
5392 TREE_LANG_FLAG_2 (decl) = 0;
5393 TREE_LANG_FLAG_3 (decl) = 0;
5394 TREE_LANG_FLAG_4 (decl) = 0;
5395 TREE_LANG_FLAG_5 (decl) = 0;
5396 TREE_LANG_FLAG_6 (decl) = 0;
5398 free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
5399 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
5400 if (TREE_CODE (decl) == FIELD_DECL)
5402 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
5403 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5404 DECL_QUALIFIER (decl) = NULL_TREE;
5407 if (TREE_CODE (decl) == FUNCTION_DECL)
5409 struct cgraph_node *node;
5410 if (!(node = cgraph_node::get (decl))
5411 || (!node->definition && !node->clones))
5414 node->release_body ();
5417 release_function_body (decl);
5418 DECL_ARGUMENTS (decl) = NULL;
5419 DECL_RESULT (decl) = NULL;
5420 DECL_INITIAL (decl) = error_mark_node;
5423 if (gimple_has_body_p (decl))
5427 /* If DECL has a gimple body, then the context for its
5428 arguments must be DECL. Otherwise, it doesn't really
5429 matter, as we will not be emitting any code for DECL. In
5430 general, there may be other instances of DECL created by
5431 the front end and since PARM_DECLs are generally shared,
5432 their DECL_CONTEXT changes as the replicas of DECL are
5433 created. The only time where DECL_CONTEXT is important
5434 is for the FUNCTION_DECLs that have a gimple body (since
5435 the PARM_DECL will be used in the function's body). */
5436 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5437 DECL_CONTEXT (t) = decl;
5438 if (!DECL_FUNCTION_SPECIFIC_TARGET (decl))
5439 DECL_FUNCTION_SPECIFIC_TARGET (decl)
5440 = target_option_default_node;
5441 if (!DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl))
5442 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
5443 = optimization_default_node;
5446 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5447 At this point, it is not needed anymore. */
5448 DECL_SAVED_TREE (decl) = NULL_TREE;
5450 /* Clear the abstract origin if it refers to a method. Otherwise
5451 dwarf2out.c will ICE as we clear TYPE_METHODS and thus the
5452 origin will not be output correctly. */
5453 if (DECL_ABSTRACT_ORIGIN (decl)
5454 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5455 && RECORD_OR_UNION_TYPE_P
5456 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5457 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5459 /* Sometimes the C++ frontend doesn't manage to transform a temporary
5460 DECL_VINDEX referring to itself into a vtable slot number as it
5461 should. Happens with functions that are copied and then forgotten
5462 about. Just clear it, it won't matter anymore. */
5463 if (DECL_VINDEX (decl) && !tree_fits_shwi_p (DECL_VINDEX (decl)))
5464 DECL_VINDEX (decl) = NULL_TREE;
5466 else if (TREE_CODE (decl) == VAR_DECL)
5468 if ((DECL_EXTERNAL (decl)
5469 && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5470 || (decl_function_context (decl) && !TREE_STATIC (decl)))
5471 DECL_INITIAL (decl) = NULL_TREE;
5473 else if (TREE_CODE (decl) == TYPE_DECL)
5475 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5476 DECL_VISIBILITY_SPECIFIED (decl) = 0;
5477 DECL_INITIAL (decl) = NULL_TREE;
5479 else if (TREE_CODE (decl) == FIELD_DECL)
5480 DECL_INITIAL (decl) = NULL_TREE;
5481 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5482 && DECL_INITIAL (decl)
5483 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5485 /* Strip builtins from the translation-unit BLOCK. We still have targets
5486 without builtin_decl_explicit support and also builtins are shared
5487 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5488 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5492 if (TREE_CODE (var) == FUNCTION_DECL
5493 && DECL_BUILT_IN (var))
5494 *nextp = TREE_CHAIN (var);
5496 nextp = &TREE_CHAIN (var);
5502 /* Data used when collecting DECLs and TYPEs for language data removal. */
5504 struct free_lang_data_d
5506 /* Worklist to avoid excessive recursion. */
5509 /* Set of traversed objects. Used to avoid duplicate visits. */
5510 hash_set<tree> *pset;
5512 /* Array of symbols to process with free_lang_data_in_decl. */
5515 /* Array of types to process with free_lang_data_in_type. */
5520 /* Save all language fields needed to generate proper debug information
5521 for DECL. This saves most fields cleared out by free_lang_data_in_decl. */
5524 save_debug_info_for_decl (tree t)
5526 /*struct saved_debug_info_d *sdi;*/
5528 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && DECL_P (t));
5530 /* FIXME. Partial implementation for saving debug info removed. */
5534 /* Save all language fields needed to generate proper debug information
5535 for TYPE. This saves most fields cleared out by free_lang_data_in_type. */
5538 save_debug_info_for_type (tree t)
5540 /*struct saved_debug_info_d *sdi;*/
5542 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && TYPE_P (t));
5544 /* FIXME. Partial implementation for saving debug info removed. */
5548 /* Add type or decl T to one of the list of tree nodes that need their
5549 language data removed. The lists are held inside FLD. */
5552 add_tree_to_fld_list (tree t, struct free_lang_data_d *fld)
5556 fld->decls.safe_push (t);
5557 if (debug_info_level > DINFO_LEVEL_TERSE)
5558 save_debug_info_for_decl (t);
5560 else if (TYPE_P (t))
5562 fld->types.safe_push (t);
5563 if (debug_info_level > DINFO_LEVEL_TERSE)
5564 save_debug_info_for_type (t);
5570 /* Push tree node T into FLD->WORKLIST. */
5573 fld_worklist_push (tree t, struct free_lang_data_d *fld)
5575 if (t && !is_lang_specific (t) && !fld->pset->contains (t))
5576 fld->worklist.safe_push ((t));
5580 /* Operand callback helper for free_lang_data_in_node. *TP is the
5581 subtree operand being considered. */
5584 find_decls_types_r (tree *tp, int *ws, void *data)
5587 struct free_lang_data_d *fld = (struct free_lang_data_d *) data;
5589 if (TREE_CODE (t) == TREE_LIST)
5592 /* Language specific nodes will be removed, so there is no need
5593 to gather anything under them. */
5594 if (is_lang_specific (t))
5602 /* Note that walk_tree does not traverse every possible field in
5603 decls, so we have to do our own traversals here. */
5604 add_tree_to_fld_list (t, fld);
5606 fld_worklist_push (DECL_NAME (t), fld);
5607 fld_worklist_push (DECL_CONTEXT (t), fld);
5608 fld_worklist_push (DECL_SIZE (t), fld);
5609 fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5611 /* We are going to remove everything under DECL_INITIAL for
5612 TYPE_DECLs. No point walking them. */
5613 if (TREE_CODE (t) != TYPE_DECL)
5614 fld_worklist_push (DECL_INITIAL (t), fld);
5616 fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5617 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5619 if (TREE_CODE (t) == FUNCTION_DECL)
5621 fld_worklist_push (DECL_ARGUMENTS (t), fld);
5622 fld_worklist_push (DECL_RESULT (t), fld);
5624 else if (TREE_CODE (t) == TYPE_DECL)
5626 fld_worklist_push (DECL_ORIGINAL_TYPE (t), fld);
5628 else if (TREE_CODE (t) == FIELD_DECL)
5630 fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5631 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5632 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5633 fld_worklist_push (DECL_FCONTEXT (t), fld);
5636 if ((TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
5637 && DECL_HAS_VALUE_EXPR_P (t))
5638 fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5640 if (TREE_CODE (t) != FIELD_DECL
5641 && TREE_CODE (t) != TYPE_DECL)
5642 fld_worklist_push (TREE_CHAIN (t), fld);
5645 else if (TYPE_P (t))
5647 /* Note that walk_tree does not traverse every possible field in
5648 types, so we have to do our own traversals here. */
5649 add_tree_to_fld_list (t, fld);
5651 if (!RECORD_OR_UNION_TYPE_P (t))
5652 fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5653 fld_worklist_push (TYPE_SIZE (t), fld);
5654 fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5655 fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5656 fld_worklist_push (TYPE_POINTER_TO (t), fld);
5657 fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5658 fld_worklist_push (TYPE_NAME (t), fld);
5659 /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream
5660 them and thus do not and want not to reach unused pointer types
5662 if (!POINTER_TYPE_P (t))
5663 fld_worklist_push (TYPE_MINVAL (t), fld);
5664 if (!RECORD_OR_UNION_TYPE_P (t))
5665 fld_worklist_push (TYPE_MAXVAL (t), fld);
5666 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5667 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5668 do not and want not to reach unused variants this way. */
5669 if (TYPE_CONTEXT (t))
5671 tree ctx = TYPE_CONTEXT (t);
5672 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5673 So push that instead. */
5674 while (ctx && TREE_CODE (ctx) == BLOCK)
5675 ctx = BLOCK_SUPERCONTEXT (ctx);
5676 fld_worklist_push (ctx, fld);
5678 /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not
5679 and want not to reach unused types this way. */
5681 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5685 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5686 fld_worklist_push (TREE_TYPE (tem), fld);
5687 tem = BINFO_VIRTUALS (TYPE_BINFO (t));
5689 /* The Java FE overloads BINFO_VIRTUALS for its own purpose. */
5690 && TREE_CODE (tem) == TREE_LIST)
5693 fld_worklist_push (TREE_VALUE (tem), fld);
5694 tem = TREE_CHAIN (tem);
5698 if (RECORD_OR_UNION_TYPE_P (t))
5701 /* Push all TYPE_FIELDS - there can be interleaving interesting
5702 and non-interesting things. */
5703 tem = TYPE_FIELDS (t);
5706 if (TREE_CODE (tem) == FIELD_DECL
5707 || (TREE_CODE (tem) == TYPE_DECL
5708 && !DECL_IGNORED_P (tem)
5709 && debug_info_level > DINFO_LEVEL_TERSE
5710 && !is_redundant_typedef (tem)))
5711 fld_worklist_push (tem, fld);
5712 tem = TREE_CHAIN (tem);
5716 fld_worklist_push (TYPE_STUB_DECL (t), fld);
5719 else if (TREE_CODE (t) == BLOCK)
5722 for (tem = BLOCK_VARS (t); tem; tem = TREE_CHAIN (tem))
5723 fld_worklist_push (tem, fld);
5724 for (tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
5725 fld_worklist_push (tem, fld);
5726 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
5729 if (TREE_CODE (t) != IDENTIFIER_NODE
5730 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
5731 fld_worklist_push (TREE_TYPE (t), fld);
5737 /* Find decls and types in T. */
5740 find_decls_types (tree t, struct free_lang_data_d *fld)
5744 if (!fld->pset->contains (t))
5745 walk_tree (&t, find_decls_types_r, fld, fld->pset);
5746 if (fld->worklist.is_empty ())
5748 t = fld->worklist.pop ();
5752 /* Translate all the types in LIST with the corresponding runtime
5756 get_eh_types_for_runtime (tree list)
5760 if (list == NULL_TREE)
5763 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5765 list = TREE_CHAIN (list);
5768 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5769 TREE_CHAIN (prev) = n;
5770 prev = TREE_CHAIN (prev);
5771 list = TREE_CHAIN (list);
5778 /* Find decls and types referenced in EH region R and store them in
5779 FLD->DECLS and FLD->TYPES. */
5782 find_decls_types_in_eh_region (eh_region r, struct free_lang_data_d *fld)
5793 /* The types referenced in each catch must first be changed to the
5794 EH types used at runtime. This removes references to FE types
5796 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
5798 c->type_list = get_eh_types_for_runtime (c->type_list);
5799 walk_tree (&c->type_list, find_decls_types_r, fld, fld->pset);
5804 case ERT_ALLOWED_EXCEPTIONS:
5805 r->u.allowed.type_list
5806 = get_eh_types_for_runtime (r->u.allowed.type_list);
5807 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, fld->pset);
5810 case ERT_MUST_NOT_THROW:
5811 walk_tree (&r->u.must_not_throw.failure_decl,
5812 find_decls_types_r, fld, fld->pset);
5818 /* Find decls and types referenced in cgraph node N and store them in
5819 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5820 look for *every* kind of DECL and TYPE node reachable from N,
5821 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5822 NAMESPACE_DECLs, etc). */
5825 find_decls_types_in_node (struct cgraph_node *n, struct free_lang_data_d *fld)
5828 struct function *fn;
5832 find_decls_types (n->decl, fld);
5834 if (!gimple_has_body_p (n->decl))
5837 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
5839 fn = DECL_STRUCT_FUNCTION (n->decl);
5841 /* Traverse locals. */
5842 FOR_EACH_LOCAL_DECL (fn, ix, t)
5843 find_decls_types (t, fld);
5845 /* Traverse EH regions in FN. */
5848 FOR_ALL_EH_REGION_FN (r, fn)
5849 find_decls_types_in_eh_region (r, fld);
5852 /* Traverse every statement in FN. */
5853 FOR_EACH_BB_FN (bb, fn)
5856 gimple_stmt_iterator si;
5859 for (psi = gsi_start_phis (bb); !gsi_end_p (psi); gsi_next (&psi))
5861 gphi *phi = psi.phi ();
5863 for (i = 0; i < gimple_phi_num_args (phi); i++)
5865 tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
5866 find_decls_types (*arg_p, fld);
5870 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5872 gimple *stmt = gsi_stmt (si);
5874 if (is_gimple_call (stmt))
5875 find_decls_types (gimple_call_fntype (stmt), fld);
5877 for (i = 0; i < gimple_num_ops (stmt); i++)
5879 tree arg = gimple_op (stmt, i);
5880 find_decls_types (arg, fld);
5887 /* Find decls and types referenced in varpool node N and store them in
5888 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5889 look for *every* kind of DECL and TYPE node reachable from N,
5890 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5891 NAMESPACE_DECLs, etc). */
5894 find_decls_types_in_var (varpool_node *v, struct free_lang_data_d *fld)
5896 find_decls_types (v->decl, fld);
5899 /* If T needs an assembler name, have one created for it. */
5902 assign_assembler_name_if_neeeded (tree t)
5904 if (need_assembler_name_p (t))
5906 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
5907 diagnostics that use input_location to show locus
5908 information. The problem here is that, at this point,
5909 input_location is generally anchored to the end of the file
5910 (since the parser is long gone), so we don't have a good
5911 position to pin it to.
5913 To alleviate this problem, this uses the location of T's
5914 declaration. Examples of this are
5915 testsuite/g++.dg/template/cond2.C and
5916 testsuite/g++.dg/template/pr35240.C. */
5917 location_t saved_location = input_location;
5918 input_location = DECL_SOURCE_LOCATION (t);
5920 decl_assembler_name (t);
5922 input_location = saved_location;
5927 /* Free language specific information for every operand and expression
5928 in every node of the call graph. This process operates in three stages:
5930 1- Every callgraph node and varpool node is traversed looking for
5931 decls and types embedded in them. This is a more exhaustive
5932 search than that done by find_referenced_vars, because it will
5933 also collect individual fields, decls embedded in types, etc.
5935 2- All the decls found are sent to free_lang_data_in_decl.
5937 3- All the types found are sent to free_lang_data_in_type.
5939 The ordering between decls and types is important because
5940 free_lang_data_in_decl sets assembler names, which includes
5941 mangling. So types cannot be freed up until assembler names have
5945 free_lang_data_in_cgraph (void)
5947 struct cgraph_node *n;
5949 struct free_lang_data_d fld;
5954 /* Initialize sets and arrays to store referenced decls and types. */
5955 fld.pset = new hash_set<tree>;
5956 fld.worklist.create (0);
5957 fld.decls.create (100);
5958 fld.types.create (100);
5960 /* Find decls and types in the body of every function in the callgraph. */
5961 FOR_EACH_FUNCTION (n)
5962 find_decls_types_in_node (n, &fld);
5964 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
5965 find_decls_types (p->decl, &fld);
5967 /* Find decls and types in every varpool symbol. */
5968 FOR_EACH_VARIABLE (v)
5969 find_decls_types_in_var (v, &fld);
5971 /* Set the assembler name on every decl found. We need to do this
5972 now because free_lang_data_in_decl will invalidate data needed
5973 for mangling. This breaks mangling on interdependent decls. */
5974 FOR_EACH_VEC_ELT (fld.decls, i, t)
5975 assign_assembler_name_if_neeeded (t);
5977 /* Traverse every decl found freeing its language data. */
5978 FOR_EACH_VEC_ELT (fld.decls, i, t)
5979 free_lang_data_in_decl (t);
5981 /* Traverse every type found freeing its language data. */
5982 FOR_EACH_VEC_ELT (fld.types, i, t)
5983 free_lang_data_in_type (t);
5986 FOR_EACH_VEC_ELT (fld.types, i, t)
5991 fld.worklist.release ();
5992 fld.decls.release ();
5993 fld.types.release ();
5997 /* Free resources that are used by FE but are not needed once they are done. */
6000 free_lang_data (void)
6004 /* If we are the LTO frontend we have freed lang-specific data already. */
6006 || (!flag_generate_lto && !flag_generate_offload))
6009 /* Allocate and assign alias sets to the standard integer types
6010 while the slots are still in the way the frontends generated them. */
6011 for (i = 0; i < itk_none; ++i)
6012 if (integer_types[i])
6013 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
6015 /* Traverse the IL resetting language specific information for
6016 operands, expressions, etc. */
6017 free_lang_data_in_cgraph ();
6019 /* Create gimple variants for common types. */
6020 ptrdiff_type_node = integer_type_node;
6021 fileptr_type_node = ptr_type_node;
6023 /* Reset some langhooks. Do not reset types_compatible_p, it may
6024 still be used indirectly via the get_alias_set langhook. */
6025 lang_hooks.dwarf_name = lhd_dwarf_name;
6026 lang_hooks.decl_printable_name = gimple_decl_printable_name;
6027 lang_hooks.gimplify_expr = lhd_gimplify_expr;
6029 /* We do not want the default decl_assembler_name implementation,
6030 rather if we have fixed everything we want a wrapper around it
6031 asserting that all non-local symbols already got their assembler
6032 name and only produce assembler names for local symbols. Or rather
6033 make sure we never call decl_assembler_name on local symbols and
6034 devise a separate, middle-end private scheme for it. */
6036 /* Reset diagnostic machinery. */
6037 tree_diagnostics_defaults (global_dc);
6045 const pass_data pass_data_ipa_free_lang_data =
6047 SIMPLE_IPA_PASS, /* type */
6048 "*free_lang_data", /* name */
6049 OPTGROUP_NONE, /* optinfo_flags */
6050 TV_IPA_FREE_LANG_DATA, /* tv_id */
6051 0, /* properties_required */
6052 0, /* properties_provided */
6053 0, /* properties_destroyed */
6054 0, /* todo_flags_start */
6055 0, /* todo_flags_finish */
6058 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
6061 pass_ipa_free_lang_data (gcc::context *ctxt)
6062 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
6065 /* opt_pass methods: */
6066 virtual unsigned int execute (function *) { return free_lang_data (); }
6068 }; // class pass_ipa_free_lang_data
6072 simple_ipa_opt_pass *
6073 make_pass_ipa_free_lang_data (gcc::context *ctxt)
6075 return new pass_ipa_free_lang_data (ctxt);
6078 /* The backbone of is_attribute_p(). ATTR_LEN is the string length of
6079 ATTR_NAME. Also used internally by remove_attribute(). */
6081 private_is_attribute_p (const char *attr_name, size_t attr_len, const_tree ident)
6083 size_t ident_len = IDENTIFIER_LENGTH (ident);
6085 if (ident_len == attr_len)
6087 if (strcmp (attr_name, IDENTIFIER_POINTER (ident)) == 0)
6090 else if (ident_len == attr_len + 4)
6092 /* There is the possibility that ATTR is 'text' and IDENT is
6094 const char *p = IDENTIFIER_POINTER (ident);
6095 if (p[0] == '_' && p[1] == '_'
6096 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
6097 && strncmp (attr_name, p + 2, attr_len) == 0)
6104 /* The backbone of lookup_attribute(). ATTR_LEN is the string length
6105 of ATTR_NAME, and LIST is not NULL_TREE. */
6107 private_lookup_attribute (const char *attr_name, size_t attr_len, tree list)
6111 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
6113 if (ident_len == attr_len)
6115 if (!strcmp (attr_name,
6116 IDENTIFIER_POINTER (get_attribute_name (list))))
6119 /* TODO: If we made sure that attributes were stored in the
6120 canonical form without '__...__' (ie, as in 'text' as opposed
6121 to '__text__') then we could avoid the following case. */
6122 else if (ident_len == attr_len + 4)
6124 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
6125 if (p[0] == '_' && p[1] == '_'
6126 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
6127 && strncmp (attr_name, p + 2, attr_len) == 0)
6130 list = TREE_CHAIN (list);
6136 /* Given an attribute name ATTR_NAME and a list of attributes LIST,
6137 return a pointer to the attribute's list first element if the attribute
6138 starts with ATTR_NAME. ATTR_NAME must be in the form 'text' (not
6142 private_lookup_attribute_by_prefix (const char *attr_name, size_t attr_len,
6147 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
6149 if (attr_len > ident_len)
6151 list = TREE_CHAIN (list);
6155 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
6157 if (strncmp (attr_name, p, attr_len) == 0)
6160 /* TODO: If we made sure that attributes were stored in the
6161 canonical form without '__...__' (ie, as in 'text' as opposed
6162 to '__text__') then we could avoid the following case. */
6163 if (p[0] == '_' && p[1] == '_' &&
6164 strncmp (attr_name, p + 2, attr_len) == 0)
6167 list = TREE_CHAIN (list);
6174 /* A variant of lookup_attribute() that can be used with an identifier
6175 as the first argument, and where the identifier can be either
6176 'text' or '__text__'.
6178 Given an attribute ATTR_IDENTIFIER, and a list of attributes LIST,
6179 return a pointer to the attribute's list element if the attribute
6180 is part of the list, or NULL_TREE if not found. If the attribute
6181 appears more than once, this only returns the first occurrence; the
6182 TREE_CHAIN of the return value should be passed back in if further
6183 occurrences are wanted. ATTR_IDENTIFIER must be an identifier but
6184 can be in the form 'text' or '__text__'. */
6186 lookup_ident_attribute (tree attr_identifier, tree list)
6188 gcc_checking_assert (TREE_CODE (attr_identifier) == IDENTIFIER_NODE);
6192 gcc_checking_assert (TREE_CODE (get_attribute_name (list))
6193 == IDENTIFIER_NODE);
6195 if (cmp_attrib_identifiers (attr_identifier,
6196 get_attribute_name (list)))
6199 list = TREE_CHAIN (list);
6205 /* Remove any instances of attribute ATTR_NAME in LIST and return the
6209 remove_attribute (const char *attr_name, tree list)
6212 size_t attr_len = strlen (attr_name);
6214 gcc_checking_assert (attr_name[0] != '_');
6216 for (p = &list; *p; )
6219 /* TODO: If we were storing attributes in normalized form, here
6220 we could use a simple strcmp(). */
6221 if (private_is_attribute_p (attr_name, attr_len, get_attribute_name (l)))
6222 *p = TREE_CHAIN (l);
6224 p = &TREE_CHAIN (l);
6230 /* Return an attribute list that is the union of a1 and a2. */
6233 merge_attributes (tree a1, tree a2)
6237 /* Either one unset? Take the set one. */
6239 if ((attributes = a1) == 0)
6242 /* One that completely contains the other? Take it. */
6244 else if (a2 != 0 && ! attribute_list_contained (a1, a2))
6246 if (attribute_list_contained (a2, a1))
6250 /* Pick the longest list, and hang on the other list. */
6252 if (list_length (a1) < list_length (a2))
6253 attributes = a2, a2 = a1;
6255 for (; a2 != 0; a2 = TREE_CHAIN (a2))
6258 for (a = lookup_ident_attribute (get_attribute_name (a2),
6260 a != NULL_TREE && !attribute_value_equal (a, a2);
6261 a = lookup_ident_attribute (get_attribute_name (a2),
6266 a1 = copy_node (a2);
6267 TREE_CHAIN (a1) = attributes;
6276 /* Given types T1 and T2, merge their attributes and return
6280 merge_type_attributes (tree t1, tree t2)
6282 return merge_attributes (TYPE_ATTRIBUTES (t1),
6283 TYPE_ATTRIBUTES (t2));
6286 /* Given decls OLDDECL and NEWDECL, merge their attributes and return
6290 merge_decl_attributes (tree olddecl, tree newdecl)
6292 return merge_attributes (DECL_ATTRIBUTES (olddecl),
6293 DECL_ATTRIBUTES (newdecl));
6296 #if TARGET_DLLIMPORT_DECL_ATTRIBUTES
6298 /* Specialization of merge_decl_attributes for various Windows targets.
6300 This handles the following situation:
6302 __declspec (dllimport) int foo;
6305 The second instance of `foo' nullifies the dllimport. */
6308 merge_dllimport_decl_attributes (tree old, tree new_tree)
6311 int delete_dllimport_p = 1;
6313 /* What we need to do here is remove from `old' dllimport if it doesn't
6314 appear in `new'. dllimport behaves like extern: if a declaration is
6315 marked dllimport and a definition appears later, then the object
6316 is not dllimport'd. We also remove a `new' dllimport if the old list
6317 contains dllexport: dllexport always overrides dllimport, regardless
6318 of the order of declaration. */
6319 if (!VAR_OR_FUNCTION_DECL_P (new_tree))
6320 delete_dllimport_p = 0;
6321 else if (DECL_DLLIMPORT_P (new_tree)
6322 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (old)))
6324 DECL_DLLIMPORT_P (new_tree) = 0;
6325 warning (OPT_Wattributes, "%q+D already declared with dllexport attribute: "
6326 "dllimport ignored", new_tree);
6328 else if (DECL_DLLIMPORT_P (old) && !DECL_DLLIMPORT_P (new_tree))
6330 /* Warn about overriding a symbol that has already been used, e.g.:
6331 extern int __attribute__ ((dllimport)) foo;
6332 int* bar () {return &foo;}
6335 if (TREE_USED (old))
6337 warning (0, "%q+D redeclared without dllimport attribute "
6338 "after being referenced with dll linkage", new_tree);
6339 /* If we have used a variable's address with dllimport linkage,
6340 keep the old DECL_DLLIMPORT_P flag: the ADDR_EXPR using the
6341 decl may already have had TREE_CONSTANT computed.
6342 We still remove the attribute so that assembler code refers
6343 to '&foo rather than '_imp__foo'. */
6344 if (TREE_CODE (old) == VAR_DECL && TREE_ADDRESSABLE (old))
6345 DECL_DLLIMPORT_P (new_tree) = 1;
6348 /* Let an inline definition silently override the external reference,
6349 but otherwise warn about attribute inconsistency. */
6350 else if (TREE_CODE (new_tree) == VAR_DECL
6351 || !DECL_DECLARED_INLINE_P (new_tree))
6352 warning (OPT_Wattributes, "%q+D redeclared without dllimport attribute: "
6353 "previous dllimport ignored", new_tree);
6356 delete_dllimport_p = 0;
6358 a = merge_attributes (DECL_ATTRIBUTES (old), DECL_ATTRIBUTES (new_tree));
6360 if (delete_dllimport_p)
6361 a = remove_attribute ("dllimport", a);
6366 /* Handle a "dllimport" or "dllexport" attribute; arguments as in
6367 struct attribute_spec.handler. */
6370 handle_dll_attribute (tree * pnode, tree name, tree args, int flags,
6376 /* These attributes may apply to structure and union types being created,
6377 but otherwise should pass to the declaration involved. */
6380 if (flags & ((int) ATTR_FLAG_DECL_NEXT | (int) ATTR_FLAG_FUNCTION_NEXT
6381 | (int) ATTR_FLAG_ARRAY_NEXT))
6383 *no_add_attrs = true;
6384 return tree_cons (name, args, NULL_TREE);
6386 if (TREE_CODE (node) == RECORD_TYPE
6387 || TREE_CODE (node) == UNION_TYPE)
6389 node = TYPE_NAME (node);
6395 warning (OPT_Wattributes, "%qE attribute ignored",
6397 *no_add_attrs = true;
6402 if (TREE_CODE (node) != FUNCTION_DECL
6403 && TREE_CODE (node) != VAR_DECL
6404 && TREE_CODE (node) != TYPE_DECL)
6406 *no_add_attrs = true;
6407 warning (OPT_Wattributes, "%qE attribute ignored",
6412 if (TREE_CODE (node) == TYPE_DECL
6413 && TREE_CODE (TREE_TYPE (node)) != RECORD_TYPE
6414 && TREE_CODE (TREE_TYPE (node)) != UNION_TYPE)
6416 *no_add_attrs = true;
6417 warning (OPT_Wattributes, "%qE attribute ignored",
6422 is_dllimport = is_attribute_p ("dllimport", name);
6424 /* Report error on dllimport ambiguities seen now before they cause
6428 /* Honor any target-specific overrides. */
6429 if (!targetm.valid_dllimport_attribute_p (node))
6430 *no_add_attrs = true;
6432 else if (TREE_CODE (node) == FUNCTION_DECL
6433 && DECL_DECLARED_INLINE_P (node))
6435 warning (OPT_Wattributes, "inline function %q+D declared as "
6436 " dllimport: attribute ignored", node);
6437 *no_add_attrs = true;
6439 /* Like MS, treat definition of dllimported variables and
6440 non-inlined functions on declaration as syntax errors. */
6441 else if (TREE_CODE (node) == FUNCTION_DECL && DECL_INITIAL (node))
6443 error ("function %q+D definition is marked dllimport", node);
6444 *no_add_attrs = true;
6447 else if (TREE_CODE (node) == VAR_DECL)
6449 if (DECL_INITIAL (node))
6451 error ("variable %q+D definition is marked dllimport",
6453 *no_add_attrs = true;
6456 /* `extern' needn't be specified with dllimport.
6457 Specify `extern' now and hope for the best. Sigh. */
6458 DECL_EXTERNAL (node) = 1;
6459 /* Also, implicitly give dllimport'd variables declared within
6460 a function global scope, unless declared static. */
6461 if (current_function_decl != NULL_TREE && !TREE_STATIC (node))
6462 TREE_PUBLIC (node) = 1;
6465 if (*no_add_attrs == false)
6466 DECL_DLLIMPORT_P (node) = 1;
6468 else if (TREE_CODE (node) == FUNCTION_DECL
6469 && DECL_DECLARED_INLINE_P (node)
6470 && flag_keep_inline_dllexport)
6471 /* An exported function, even if inline, must be emitted. */
6472 DECL_EXTERNAL (node) = 0;
6474 /* Report error if symbol is not accessible at global scope. */
6475 if (!TREE_PUBLIC (node)
6476 && (TREE_CODE (node) == VAR_DECL
6477 || TREE_CODE (node) == FUNCTION_DECL))
6479 error ("external linkage required for symbol %q+D because of "
6480 "%qE attribute", node, name);
6481 *no_add_attrs = true;
6484 /* A dllexport'd entity must have default visibility so that other
6485 program units (shared libraries or the main executable) can see
6486 it. A dllimport'd entity must have default visibility so that
6487 the linker knows that undefined references within this program
6488 unit can be resolved by the dynamic linker. */
6491 if (DECL_VISIBILITY_SPECIFIED (node)
6492 && DECL_VISIBILITY (node) != VISIBILITY_DEFAULT)
6493 error ("%qE implies default visibility, but %qD has already "
6494 "been declared with a different visibility",
6496 DECL_VISIBILITY (node) = VISIBILITY_DEFAULT;
6497 DECL_VISIBILITY_SPECIFIED (node) = 1;
6503 #endif /* TARGET_DLLIMPORT_DECL_ATTRIBUTES */
6505 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6506 of the various TYPE_QUAL values. */
6509 set_type_quals (tree type, int type_quals)
6511 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
6512 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
6513 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
6514 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
6515 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
6518 /* Returns true iff unqualified CAND and BASE are equivalent. */
6521 check_base_type (const_tree cand, const_tree base)
6523 return (TYPE_NAME (cand) == TYPE_NAME (base)
6524 /* Apparently this is needed for Objective-C. */
6525 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6526 /* Check alignment. */
6527 && TYPE_ALIGN (cand) == TYPE_ALIGN (base)
6528 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6529 TYPE_ATTRIBUTES (base)));
6532 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
6535 check_qualified_type (const_tree cand, const_tree base, int type_quals)
6537 return (TYPE_QUALS (cand) == type_quals
6538 && check_base_type (cand, base));
6541 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
6544 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
6546 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
6547 && TYPE_NAME (cand) == TYPE_NAME (base)
6548 /* Apparently this is needed for Objective-C. */
6549 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6550 /* Check alignment. */
6551 && TYPE_ALIGN (cand) == align
6552 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6553 TYPE_ATTRIBUTES (base)));
6556 /* This function checks to see if TYPE matches the size one of the built-in
6557 atomic types, and returns that core atomic type. */
6560 find_atomic_core_type (tree type)
6562 tree base_atomic_type;
6564 /* Only handle complete types. */
6565 if (TYPE_SIZE (type) == NULL_TREE)
6568 HOST_WIDE_INT type_size = tree_to_uhwi (TYPE_SIZE (type));
6572 base_atomic_type = atomicQI_type_node;
6576 base_atomic_type = atomicHI_type_node;
6580 base_atomic_type = atomicSI_type_node;
6584 base_atomic_type = atomicDI_type_node;
6588 base_atomic_type = atomicTI_type_node;
6592 base_atomic_type = NULL_TREE;
6595 return base_atomic_type;
6598 /* Return a version of the TYPE, qualified as indicated by the
6599 TYPE_QUALS, if one exists. If no qualified version exists yet,
6600 return NULL_TREE. */
6603 get_qualified_type (tree type, int type_quals)
6607 if (TYPE_QUALS (type) == type_quals)
6610 /* Search the chain of variants to see if there is already one there just
6611 like the one we need to have. If so, use that existing one. We must
6612 preserve the TYPE_NAME, since there is code that depends on this. */
6613 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6614 if (check_qualified_type (t, type, type_quals))
6620 /* Like get_qualified_type, but creates the type if it does not
6621 exist. This function never returns NULL_TREE. */
6624 build_qualified_type (tree type, int type_quals)
6628 /* See if we already have the appropriate qualified variant. */
6629 t = get_qualified_type (type, type_quals);
6631 /* If not, build it. */
6634 t = build_variant_type_copy (type);
6635 set_type_quals (t, type_quals);
6637 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
6639 /* See if this object can map to a basic atomic type. */
6640 tree atomic_type = find_atomic_core_type (type);
6643 /* Ensure the alignment of this type is compatible with
6644 the required alignment of the atomic type. */
6645 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
6646 SET_TYPE_ALIGN (t, TYPE_ALIGN (atomic_type));
6650 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6651 /* Propagate structural equality. */
6652 SET_TYPE_STRUCTURAL_EQUALITY (t);
6653 else if (TYPE_CANONICAL (type) != type)
6654 /* Build the underlying canonical type, since it is different
6657 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
6658 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
6661 /* T is its own canonical type. */
6662 TYPE_CANONICAL (t) = t;
6669 /* Create a variant of type T with alignment ALIGN. */
6672 build_aligned_type (tree type, unsigned int align)
6676 if (TYPE_PACKED (type)
6677 || TYPE_ALIGN (type) == align)
6680 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6681 if (check_aligned_type (t, type, align))
6684 t = build_variant_type_copy (type);
6685 SET_TYPE_ALIGN (t, align);
6690 /* Create a new distinct copy of TYPE. The new type is made its own
6691 MAIN_VARIANT. If TYPE requires structural equality checks, the
6692 resulting type requires structural equality checks; otherwise, its
6693 TYPE_CANONICAL points to itself. */
6696 build_distinct_type_copy (tree type)
6698 tree t = copy_node (type);
6700 TYPE_POINTER_TO (t) = 0;
6701 TYPE_REFERENCE_TO (t) = 0;
6703 /* Set the canonical type either to a new equivalence class, or
6704 propagate the need for structural equality checks. */
6705 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6706 SET_TYPE_STRUCTURAL_EQUALITY (t);
6708 TYPE_CANONICAL (t) = t;
6710 /* Make it its own variant. */
6711 TYPE_MAIN_VARIANT (t) = t;
6712 TYPE_NEXT_VARIANT (t) = 0;
6714 /* We do not record methods in type copies nor variants
6715 so we do not need to keep them up to date when new method
6717 if (RECORD_OR_UNION_TYPE_P (t))
6718 TYPE_METHODS (t) = NULL_TREE;
6720 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6721 whose TREE_TYPE is not t. This can also happen in the Ada
6722 frontend when using subtypes. */
6727 /* Create a new variant of TYPE, equivalent but distinct. This is so
6728 the caller can modify it. TYPE_CANONICAL for the return type will
6729 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6730 are considered equal by the language itself (or that both types
6731 require structural equality checks). */
6734 build_variant_type_copy (tree type)
6736 tree t, m = TYPE_MAIN_VARIANT (type);
6738 t = build_distinct_type_copy (type);
6740 /* Since we're building a variant, assume that it is a non-semantic
6741 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6742 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6743 /* Type variants have no alias set defined. */
6744 TYPE_ALIAS_SET (t) = -1;
6746 /* Add the new type to the chain of variants of TYPE. */
6747 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6748 TYPE_NEXT_VARIANT (m) = t;
6749 TYPE_MAIN_VARIANT (t) = m;
6754 /* Return true if the from tree in both tree maps are equal. */
6757 tree_map_base_eq (const void *va, const void *vb)
6759 const struct tree_map_base *const a = (const struct tree_map_base *) va,
6760 *const b = (const struct tree_map_base *) vb;
6761 return (a->from == b->from);
6764 /* Hash a from tree in a tree_base_map. */
6767 tree_map_base_hash (const void *item)
6769 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6772 /* Return true if this tree map structure is marked for garbage collection
6773 purposes. We simply return true if the from tree is marked, so that this
6774 structure goes away when the from tree goes away. */
6777 tree_map_base_marked_p (const void *p)
6779 return ggc_marked_p (((const struct tree_map_base *) p)->from);
6782 /* Hash a from tree in a tree_map. */
6785 tree_map_hash (const void *item)
6787 return (((const struct tree_map *) item)->hash);
6790 /* Hash a from tree in a tree_decl_map. */
6793 tree_decl_map_hash (const void *item)
6795 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6798 /* Return the initialization priority for DECL. */
6801 decl_init_priority_lookup (tree decl)
6803 symtab_node *snode = symtab_node::get (decl);
6806 return DEFAULT_INIT_PRIORITY;
6808 snode->get_init_priority ();
6811 /* Return the finalization priority for DECL. */
6814 decl_fini_priority_lookup (tree decl)
6816 cgraph_node *node = cgraph_node::get (decl);
6819 return DEFAULT_INIT_PRIORITY;
6821 node->get_fini_priority ();
6824 /* Set the initialization priority for DECL to PRIORITY. */
6827 decl_init_priority_insert (tree decl, priority_type priority)
6829 struct symtab_node *snode;
6831 if (priority == DEFAULT_INIT_PRIORITY)
6833 snode = symtab_node::get (decl);
6837 else if (TREE_CODE (decl) == VAR_DECL)
6838 snode = varpool_node::get_create (decl);
6840 snode = cgraph_node::get_create (decl);
6841 snode->set_init_priority (priority);
6844 /* Set the finalization priority for DECL to PRIORITY. */
6847 decl_fini_priority_insert (tree decl, priority_type priority)
6849 struct cgraph_node *node;
6851 if (priority == DEFAULT_INIT_PRIORITY)
6853 node = cgraph_node::get (decl);
6858 node = cgraph_node::get_create (decl);
6859 node->set_fini_priority (priority);
6862 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6865 print_debug_expr_statistics (void)
6867 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6868 (long) debug_expr_for_decl->size (),
6869 (long) debug_expr_for_decl->elements (),
6870 debug_expr_for_decl->collisions ());
6873 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6876 print_value_expr_statistics (void)
6878 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6879 (long) value_expr_for_decl->size (),
6880 (long) value_expr_for_decl->elements (),
6881 value_expr_for_decl->collisions ());
6884 /* Lookup a debug expression for FROM, and return it if we find one. */
6887 decl_debug_expr_lookup (tree from)
6889 struct tree_decl_map *h, in;
6890 in.base.from = from;
6892 h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6898 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6901 decl_debug_expr_insert (tree from, tree to)
6903 struct tree_decl_map *h;
6905 h = ggc_alloc<tree_decl_map> ();
6906 h->base.from = from;
6908 *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6911 /* Lookup a value expression for FROM, and return it if we find one. */
6914 decl_value_expr_lookup (tree from)
6916 struct tree_decl_map *h, in;
6917 in.base.from = from;
6919 h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6925 /* Insert a mapping FROM->TO in the value expression hashtable. */
6928 decl_value_expr_insert (tree from, tree to)
6930 struct tree_decl_map *h;
6932 h = ggc_alloc<tree_decl_map> ();
6933 h->base.from = from;
6935 *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6938 /* Lookup a vector of debug arguments for FROM, and return it if we
6942 decl_debug_args_lookup (tree from)
6944 struct tree_vec_map *h, in;
6946 if (!DECL_HAS_DEBUG_ARGS_P (from))
6948 gcc_checking_assert (debug_args_for_decl != NULL);
6949 in.base.from = from;
6950 h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
6956 /* Insert a mapping FROM->empty vector of debug arguments in the value
6957 expression hashtable. */
6960 decl_debug_args_insert (tree from)
6962 struct tree_vec_map *h;
6965 if (DECL_HAS_DEBUG_ARGS_P (from))
6966 return decl_debug_args_lookup (from);
6967 if (debug_args_for_decl == NULL)
6968 debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
6969 h = ggc_alloc<tree_vec_map> ();
6970 h->base.from = from;
6972 loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
6974 DECL_HAS_DEBUG_ARGS_P (from) = 1;
6978 /* Hashing of types so that we don't make duplicates.
6979 The entry point is `type_hash_canon'. */
6981 /* Compute a hash code for a list of types (chain of TREE_LIST nodes
6982 with types in the TREE_VALUE slots), by adding the hash codes
6983 of the individual types. */
6986 type_hash_list (const_tree list, inchash::hash &hstate)
6990 for (tail = list; tail; tail = TREE_CHAIN (tail))
6991 if (TREE_VALUE (tail) != error_mark_node)
6992 hstate.add_object (TYPE_HASH (TREE_VALUE (tail)));
6995 /* These are the Hashtable callback functions. */
6997 /* Returns true iff the types are equivalent. */
7000 type_cache_hasher::equal (type_hash *a, type_hash *b)
7002 /* First test the things that are the same for all types. */
7003 if (a->hash != b->hash
7004 || TREE_CODE (a->type) != TREE_CODE (b->type)
7005 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
7006 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
7007 TYPE_ATTRIBUTES (b->type))
7008 || (TREE_CODE (a->type) != COMPLEX_TYPE
7009 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
7012 /* Be careful about comparing arrays before and after the element type
7013 has been completed; don't compare TYPE_ALIGN unless both types are
7015 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
7016 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
7017 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
7020 switch (TREE_CODE (a->type))
7025 case REFERENCE_TYPE:
7030 return TYPE_VECTOR_SUBPARTS (a->type) == TYPE_VECTOR_SUBPARTS (b->type);
7033 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
7034 && !(TYPE_VALUES (a->type)
7035 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
7036 && TYPE_VALUES (b->type)
7037 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
7038 && type_list_equal (TYPE_VALUES (a->type),
7039 TYPE_VALUES (b->type))))
7047 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
7049 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
7050 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
7051 TYPE_MAX_VALUE (b->type)))
7052 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
7053 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
7054 TYPE_MIN_VALUE (b->type))));
7056 case FIXED_POINT_TYPE:
7057 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
7060 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
7063 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
7064 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
7065 || (TYPE_ARG_TYPES (a->type)
7066 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
7067 && TYPE_ARG_TYPES (b->type)
7068 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
7069 && type_list_equal (TYPE_ARG_TYPES (a->type),
7070 TYPE_ARG_TYPES (b->type)))))
7074 return TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type);
7078 case QUAL_UNION_TYPE:
7079 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
7080 || (TYPE_FIELDS (a->type)
7081 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
7082 && TYPE_FIELDS (b->type)
7083 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
7084 && type_list_equal (TYPE_FIELDS (a->type),
7085 TYPE_FIELDS (b->type))));
7088 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
7089 || (TYPE_ARG_TYPES (a->type)
7090 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
7091 && TYPE_ARG_TYPES (b->type)
7092 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
7093 && type_list_equal (TYPE_ARG_TYPES (a->type),
7094 TYPE_ARG_TYPES (b->type))))
7102 if (lang_hooks.types.type_hash_eq != NULL)
7103 return lang_hooks.types.type_hash_eq (a->type, b->type);
7108 /* Given TYPE, and HASHCODE its hash code, return the canonical
7109 object for an identical type if one already exists.
7110 Otherwise, return TYPE, and record it as the canonical object.
7112 To use this function, first create a type of the sort you want.
7113 Then compute its hash code from the fields of the type that
7114 make it different from other similar types.
7115 Then call this function and use the value. */
7118 type_hash_canon (unsigned int hashcode, tree type)
7123 /* The hash table only contains main variants, so ensure that's what we're
7125 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
7127 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
7128 must call that routine before comparing TYPE_ALIGNs. */
7134 loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
7137 tree t1 = ((type_hash *) *loc)->type;
7138 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1);
7144 struct type_hash *h;
7146 h = ggc_alloc<type_hash> ();
7156 print_type_hash_statistics (void)
7158 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
7159 (long) type_hash_table->size (),
7160 (long) type_hash_table->elements (),
7161 type_hash_table->collisions ());
7164 /* Compute a hash code for a list of attributes (chain of TREE_LIST nodes
7165 with names in the TREE_PURPOSE slots and args in the TREE_VALUE slots),
7166 by adding the hash codes of the individual attributes. */
7169 attribute_hash_list (const_tree list, inchash::hash &hstate)
7173 for (tail = list; tail; tail = TREE_CHAIN (tail))
7174 /* ??? Do we want to add in TREE_VALUE too? */
7175 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (tail)));
7178 /* Given two lists of attributes, return true if list l2 is
7179 equivalent to l1. */
7182 attribute_list_equal (const_tree l1, const_tree l2)
7187 return attribute_list_contained (l1, l2)
7188 && attribute_list_contained (l2, l1);
7191 /* Given two lists of attributes, return true if list L2 is
7192 completely contained within L1. */
7193 /* ??? This would be faster if attribute names were stored in a canonicalized
7194 form. Otherwise, if L1 uses `foo' and L2 uses `__foo__', the long method
7195 must be used to show these elements are equivalent (which they are). */
7196 /* ??? It's not clear that attributes with arguments will always be handled
7200 attribute_list_contained (const_tree l1, const_tree l2)
7204 /* First check the obvious, maybe the lists are identical. */
7208 /* Maybe the lists are similar. */
7209 for (t1 = l1, t2 = l2;
7211 && get_attribute_name (t1) == get_attribute_name (t2)
7212 && TREE_VALUE (t1) == TREE_VALUE (t2);
7213 t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
7216 /* Maybe the lists are equal. */
7217 if (t1 == 0 && t2 == 0)
7220 for (; t2 != 0; t2 = TREE_CHAIN (t2))
7223 /* This CONST_CAST is okay because lookup_attribute does not
7224 modify its argument and the return value is assigned to a
7226 for (attr = lookup_ident_attribute (get_attribute_name (t2),
7227 CONST_CAST_TREE (l1));
7228 attr != NULL_TREE && !attribute_value_equal (t2, attr);
7229 attr = lookup_ident_attribute (get_attribute_name (t2),
7233 if (attr == NULL_TREE)
7240 /* Given two lists of types
7241 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
7242 return 1 if the lists contain the same types in the same order.
7243 Also, the TREE_PURPOSEs must match. */
7246 type_list_equal (const_tree l1, const_tree l2)
7250 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
7251 if (TREE_VALUE (t1) != TREE_VALUE (t2)
7252 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
7253 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
7254 && (TREE_TYPE (TREE_PURPOSE (t1))
7255 == TREE_TYPE (TREE_PURPOSE (t2))))))
7261 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
7262 given by TYPE. If the argument list accepts variable arguments,
7263 then this function counts only the ordinary arguments. */
7266 type_num_arguments (const_tree type)
7271 for (t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
7272 /* If the function does not take a variable number of arguments,
7273 the last element in the list will have type `void'. */
7274 if (VOID_TYPE_P (TREE_VALUE (t)))
7282 /* Nonzero if integer constants T1 and T2
7283 represent the same constant value. */
7286 tree_int_cst_equal (const_tree t1, const_tree t2)
7291 if (t1 == 0 || t2 == 0)
7294 if (TREE_CODE (t1) == INTEGER_CST
7295 && TREE_CODE (t2) == INTEGER_CST
7296 && wi::to_widest (t1) == wi::to_widest (t2))
7302 /* Return true if T is an INTEGER_CST whose numerical value (extended
7303 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
7306 tree_fits_shwi_p (const_tree t)
7308 return (t != NULL_TREE
7309 && TREE_CODE (t) == INTEGER_CST
7310 && wi::fits_shwi_p (wi::to_widest (t)));
7313 /* Return true if T is an INTEGER_CST whose numerical value (extended
7314 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
7317 tree_fits_uhwi_p (const_tree t)
7319 return (t != NULL_TREE
7320 && TREE_CODE (t) == INTEGER_CST
7321 && wi::fits_uhwi_p (wi::to_widest (t)));
7324 /* T is an INTEGER_CST whose numerical value (extended according to
7325 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
7329 tree_to_shwi (const_tree t)
7331 gcc_assert (tree_fits_shwi_p (t));
7332 return TREE_INT_CST_LOW (t);
7335 /* T is an INTEGER_CST whose numerical value (extended according to
7336 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
7339 unsigned HOST_WIDE_INT
7340 tree_to_uhwi (const_tree t)
7342 gcc_assert (tree_fits_uhwi_p (t));
7343 return TREE_INT_CST_LOW (t);
7346 /* Return the most significant (sign) bit of T. */
7349 tree_int_cst_sign_bit (const_tree t)
7351 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
7353 return wi::extract_uhwi (t, bitno, 1);
7356 /* Return an indication of the sign of the integer constant T.
7357 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
7358 Note that -1 will never be returned if T's type is unsigned. */
7361 tree_int_cst_sgn (const_tree t)
7363 if (wi::eq_p (t, 0))
7365 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
7367 else if (wi::neg_p (t))
7373 /* Return the minimum number of bits needed to represent VALUE in a
7374 signed or unsigned type, UNSIGNEDP says which. */
7377 tree_int_cst_min_precision (tree value, signop sgn)
7379 /* If the value is negative, compute its negative minus 1. The latter
7380 adjustment is because the absolute value of the largest negative value
7381 is one larger than the largest positive value. This is equivalent to
7382 a bit-wise negation, so use that operation instead. */
7384 if (tree_int_cst_sgn (value) < 0)
7385 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
7387 /* Return the number of bits needed, taking into account the fact
7388 that we need one more bit for a signed than unsigned type.
7389 If value is 0 or -1, the minimum precision is 1 no matter
7390 whether unsignedp is true or false. */
7392 if (integer_zerop (value))
7395 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
7398 /* Return truthvalue of whether T1 is the same tree structure as T2.
7399 Return 1 if they are the same.
7400 Return 0 if they are understandably different.
7401 Return -1 if either contains tree structure not understood by
7405 simple_cst_equal (const_tree t1, const_tree t2)
7407 enum tree_code code1, code2;
7413 if (t1 == 0 || t2 == 0)
7416 code1 = TREE_CODE (t1);
7417 code2 = TREE_CODE (t2);
7419 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
7421 if (CONVERT_EXPR_CODE_P (code2)
7422 || code2 == NON_LVALUE_EXPR)
7423 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7425 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
7428 else if (CONVERT_EXPR_CODE_P (code2)
7429 || code2 == NON_LVALUE_EXPR)
7430 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
7438 return wi::to_widest (t1) == wi::to_widest (t2);
7441 return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2));
7444 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
7447 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
7448 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
7449 TREE_STRING_LENGTH (t1)));
7453 unsigned HOST_WIDE_INT idx;
7454 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
7455 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
7457 if (vec_safe_length (v1) != vec_safe_length (v2))
7460 for (idx = 0; idx < vec_safe_length (v1); ++idx)
7461 /* ??? Should we handle also fields here? */
7462 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
7468 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7471 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
7474 if (call_expr_nargs (t1) != call_expr_nargs (t2))
7477 const_tree arg1, arg2;
7478 const_call_expr_arg_iterator iter1, iter2;
7479 for (arg1 = first_const_call_expr_arg (t1, &iter1),
7480 arg2 = first_const_call_expr_arg (t2, &iter2);
7482 arg1 = next_const_call_expr_arg (&iter1),
7483 arg2 = next_const_call_expr_arg (&iter2))
7485 cmp = simple_cst_equal (arg1, arg2);
7489 return arg1 == arg2;
7493 /* Special case: if either target is an unallocated VAR_DECL,
7494 it means that it's going to be unified with whatever the
7495 TARGET_EXPR is really supposed to initialize, so treat it
7496 as being equivalent to anything. */
7497 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
7498 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
7499 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
7500 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
7501 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
7502 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
7505 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7510 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
7512 case WITH_CLEANUP_EXPR:
7513 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7517 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
7520 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
7521 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7535 /* This general rule works for most tree codes. All exceptions should be
7536 handled above. If this is a language-specific tree code, we can't
7537 trust what might be in the operand, so say we don't know
7539 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
7542 switch (TREE_CODE_CLASS (code1))
7546 case tcc_comparison:
7547 case tcc_expression:
7551 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
7553 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
7565 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7566 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7567 than U, respectively. */
7570 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
7572 if (tree_int_cst_sgn (t) < 0)
7574 else if (!tree_fits_uhwi_p (t))
7576 else if (TREE_INT_CST_LOW (t) == u)
7578 else if (TREE_INT_CST_LOW (t) < u)
7584 /* Return true if SIZE represents a constant size that is in bounds of
7585 what the middle-end and the backend accepts (covering not more than
7586 half of the address-space). */
7589 valid_constant_size_p (const_tree size)
7591 if (! tree_fits_uhwi_p (size)
7592 || TREE_OVERFLOW (size)
7593 || tree_int_cst_sign_bit (size) != 0)
7598 /* Return the precision of the type, or for a complex or vector type the
7599 precision of the type of its elements. */
7602 element_precision (const_tree type)
7605 type = TREE_TYPE (type);
7606 enum tree_code code = TREE_CODE (type);
7607 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
7608 type = TREE_TYPE (type);
7610 return TYPE_PRECISION (type);
7613 /* Return true if CODE represents an associative tree code. Otherwise
7616 associative_tree_code (enum tree_code code)
7635 /* Return true if CODE represents a commutative tree code. Otherwise
7638 commutative_tree_code (enum tree_code code)
7644 case MULT_HIGHPART_EXPR:
7652 case UNORDERED_EXPR:
7656 case TRUTH_AND_EXPR:
7657 case TRUTH_XOR_EXPR:
7659 case WIDEN_MULT_EXPR:
7660 case VEC_WIDEN_MULT_HI_EXPR:
7661 case VEC_WIDEN_MULT_LO_EXPR:
7662 case VEC_WIDEN_MULT_EVEN_EXPR:
7663 case VEC_WIDEN_MULT_ODD_EXPR:
7672 /* Return true if CODE represents a ternary tree code for which the
7673 first two operands are commutative. Otherwise return false. */
7675 commutative_ternary_tree_code (enum tree_code code)
7679 case WIDEN_MULT_PLUS_EXPR:
7680 case WIDEN_MULT_MINUS_EXPR:
7691 /* Returns true if CODE can overflow. */
7694 operation_can_overflow (enum tree_code code)
7702 /* Can overflow in various ways. */
7704 case TRUNC_DIV_EXPR:
7705 case EXACT_DIV_EXPR:
7706 case FLOOR_DIV_EXPR:
7708 /* For INT_MIN / -1. */
7715 /* These operators cannot overflow. */
7720 /* Returns true if CODE operating on operands of type TYPE doesn't overflow, or
7721 ftrapv doesn't generate trapping insns for CODE. */
7724 operation_no_trapping_overflow (tree type, enum tree_code code)
7726 gcc_checking_assert (ANY_INTEGRAL_TYPE_P (type));
7728 /* We don't generate instructions that trap on overflow for complex or vector
7730 if (!INTEGRAL_TYPE_P (type))
7733 if (!TYPE_OVERFLOW_TRAPS (type))
7743 /* These operators can overflow, and -ftrapv generates trapping code for
7746 case TRUNC_DIV_EXPR:
7747 case EXACT_DIV_EXPR:
7748 case FLOOR_DIV_EXPR:
7751 /* These operators can overflow, but -ftrapv does not generate trapping
7755 /* These operators cannot overflow. */
7763 /* Generate a hash value for an expression. This can be used iteratively
7764 by passing a previous result as the HSTATE argument.
7766 This function is intended to produce the same hash for expressions which
7767 would compare equal using operand_equal_p. */
7769 add_expr (const_tree t, inchash::hash &hstate)
7772 enum tree_code code;
7773 enum tree_code_class tclass;
7777 hstate.merge_hash (0);
7781 code = TREE_CODE (t);
7785 /* Alas, constants aren't shared, so we can't rely on pointer
7788 hstate.merge_hash (0);
7791 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
7792 hstate.add_wide_int (TREE_INT_CST_ELT (t, i));
7796 unsigned int val2 = real_hash (TREE_REAL_CST_PTR (t));
7797 hstate.merge_hash (val2);
7802 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
7803 hstate.merge_hash (val2);
7807 hstate.add ((const void *) TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
7810 inchash::add_expr (TREE_REALPART (t), hstate);
7811 inchash::add_expr (TREE_IMAGPART (t), hstate);
7816 for (i = 0; i < VECTOR_CST_NELTS (t); ++i)
7817 inchash::add_expr (VECTOR_CST_ELT (t, i), hstate);
7821 /* We can just compare by pointer. */
7822 hstate.add_wide_int (SSA_NAME_VERSION (t));
7824 case PLACEHOLDER_EXPR:
7825 /* The node itself doesn't matter. */
7828 /* A list of expressions, for a CALL_EXPR or as the elements of a
7830 for (; t; t = TREE_CHAIN (t))
7831 inchash::add_expr (TREE_VALUE (t), hstate);
7835 unsigned HOST_WIDE_INT idx;
7837 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
7839 inchash::add_expr (field, hstate);
7840 inchash::add_expr (value, hstate);
7845 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7846 Otherwise nodes that compare equal according to operand_equal_p might
7847 get different hash codes. However, don't do this for machine specific
7848 or front end builtins, since the function code is overloaded in those
7850 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
7851 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
7853 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
7854 code = TREE_CODE (t);
7858 tclass = TREE_CODE_CLASS (code);
7860 if (tclass == tcc_declaration)
7862 /* DECL's have a unique ID */
7863 hstate.add_wide_int (DECL_UID (t));
7867 gcc_assert (IS_EXPR_CODE_CLASS (tclass));
7869 hstate.add_object (code);
7871 /* Don't hash the type, that can lead to having nodes which
7872 compare equal according to operand_equal_p, but which
7873 have different hash codes. */
7874 if (CONVERT_EXPR_CODE_P (code)
7875 || code == NON_LVALUE_EXPR)
7877 /* Make sure to include signness in the hash computation. */
7878 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
7879 inchash::add_expr (TREE_OPERAND (t, 0), hstate);
7882 else if (commutative_tree_code (code))
7884 /* It's a commutative expression. We want to hash it the same
7885 however it appears. We do this by first hashing both operands
7886 and then rehashing based on the order of their independent
7888 inchash::hash one, two;
7889 inchash::add_expr (TREE_OPERAND (t, 0), one);
7890 inchash::add_expr (TREE_OPERAND (t, 1), two);
7891 hstate.add_commutative (one, two);
7894 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
7895 inchash::add_expr (TREE_OPERAND (t, i), hstate);
7903 /* Constructors for pointer, array and function types.
7904 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7905 constructed by language-dependent code, not here.) */
7907 /* Construct, lay out and return the type of pointers to TO_TYPE with
7908 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
7909 reference all of memory. If such a type has already been
7910 constructed, reuse it. */
7913 build_pointer_type_for_mode (tree to_type, machine_mode mode,
7917 bool could_alias = can_alias_all;
7919 if (to_type == error_mark_node)
7920 return error_mark_node;
7922 /* If the pointed-to type has the may_alias attribute set, force
7923 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7924 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7925 can_alias_all = true;
7927 /* In some cases, languages will have things that aren't a POINTER_TYPE
7928 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7929 In that case, return that type without regard to the rest of our
7932 ??? This is a kludge, but consistent with the way this function has
7933 always operated and there doesn't seem to be a good way to avoid this
7935 if (TYPE_POINTER_TO (to_type) != 0
7936 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
7937 return TYPE_POINTER_TO (to_type);
7939 /* First, if we already have a type for pointers to TO_TYPE and it's
7940 the proper mode, use it. */
7941 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
7942 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7945 t = make_node (POINTER_TYPE);
7947 TREE_TYPE (t) = to_type;
7948 SET_TYPE_MODE (t, mode);
7949 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7950 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
7951 TYPE_POINTER_TO (to_type) = t;
7953 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
7954 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
7955 SET_TYPE_STRUCTURAL_EQUALITY (t);
7956 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
7958 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
7961 /* Lay out the type. This function has many callers that are concerned
7962 with expression-construction, and this simplifies them all. */
7968 /* By default build pointers in ptr_mode. */
7971 build_pointer_type (tree to_type)
7973 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7974 : TYPE_ADDR_SPACE (to_type);
7975 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7976 return build_pointer_type_for_mode (to_type, pointer_mode, false);
7979 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7982 build_reference_type_for_mode (tree to_type, machine_mode mode,
7986 bool could_alias = can_alias_all;
7988 if (to_type == error_mark_node)
7989 return error_mark_node;
7991 /* If the pointed-to type has the may_alias attribute set, force
7992 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7993 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7994 can_alias_all = true;
7996 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7997 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7998 In that case, return that type without regard to the rest of our
8001 ??? This is a kludge, but consistent with the way this function has
8002 always operated and there doesn't seem to be a good way to avoid this
8004 if (TYPE_REFERENCE_TO (to_type) != 0
8005 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
8006 return TYPE_REFERENCE_TO (to_type);
8008 /* First, if we already have a type for pointers to TO_TYPE and it's
8009 the proper mode, use it. */
8010 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
8011 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
8014 t = make_node (REFERENCE_TYPE);
8016 TREE_TYPE (t) = to_type;
8017 SET_TYPE_MODE (t, mode);
8018 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
8019 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
8020 TYPE_REFERENCE_TO (to_type) = t;
8022 /* During LTO we do not set TYPE_CANONICAL of pointers and references. */
8023 if (TYPE_STRUCTURAL_EQUALITY_P (to_type) || in_lto_p)
8024 SET_TYPE_STRUCTURAL_EQUALITY (t);
8025 else if (TYPE_CANONICAL (to_type) != to_type || could_alias)
8027 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
8036 /* Build the node for the type of references-to-TO_TYPE by default
8040 build_reference_type (tree to_type)
8042 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
8043 : TYPE_ADDR_SPACE (to_type);
8044 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
8045 return build_reference_type_for_mode (to_type, pointer_mode, false);
8048 #define MAX_INT_CACHED_PREC \
8049 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
8050 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
8052 /* Builds a signed or unsigned integer type of precision PRECISION.
8053 Used for C bitfields whose precision does not match that of
8054 built-in target types. */
8056 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
8062 unsignedp = MAX_INT_CACHED_PREC + 1;
8064 if (precision <= MAX_INT_CACHED_PREC)
8066 itype = nonstandard_integer_type_cache[precision + unsignedp];
8071 itype = make_node (INTEGER_TYPE);
8072 TYPE_PRECISION (itype) = precision;
8075 fixup_unsigned_type (itype);
8077 fixup_signed_type (itype);
8080 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (itype)))
8081 ret = type_hash_canon (tree_to_uhwi (TYPE_MAX_VALUE (itype)), itype);
8082 if (precision <= MAX_INT_CACHED_PREC)
8083 nonstandard_integer_type_cache[precision + unsignedp] = ret;
8088 #define MAX_BOOL_CACHED_PREC \
8089 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
8090 static GTY(()) tree nonstandard_boolean_type_cache[MAX_BOOL_CACHED_PREC + 1];
8092 /* Builds a boolean type of precision PRECISION.
8093 Used for boolean vectors to choose proper vector element size. */
8095 build_nonstandard_boolean_type (unsigned HOST_WIDE_INT precision)
8099 if (precision <= MAX_BOOL_CACHED_PREC)
8101 type = nonstandard_boolean_type_cache[precision];
8106 type = make_node (BOOLEAN_TYPE);
8107 TYPE_PRECISION (type) = precision;
8108 fixup_signed_type (type);
8110 if (precision <= MAX_INT_CACHED_PREC)
8111 nonstandard_boolean_type_cache[precision] = type;
8116 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
8117 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
8118 is true, reuse such a type that has already been constructed. */
8121 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
8123 tree itype = make_node (INTEGER_TYPE);
8124 inchash::hash hstate;
8126 TREE_TYPE (itype) = type;
8128 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
8129 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
8131 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
8132 SET_TYPE_MODE (itype, TYPE_MODE (type));
8133 TYPE_SIZE (itype) = TYPE_SIZE (type);
8134 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
8135 SET_TYPE_ALIGN (itype, TYPE_ALIGN (type));
8136 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
8141 if ((TYPE_MIN_VALUE (itype)
8142 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
8143 || (TYPE_MAX_VALUE (itype)
8144 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
8146 /* Since we cannot reliably merge this type, we need to compare it using
8147 structural equality checks. */
8148 SET_TYPE_STRUCTURAL_EQUALITY (itype);
8152 inchash::add_expr (TYPE_MIN_VALUE (itype), hstate);
8153 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
8154 hstate.merge_hash (TYPE_HASH (type));
8155 itype = type_hash_canon (hstate.end (), itype);
8160 /* Wrapper around build_range_type_1 with SHARED set to true. */
8163 build_range_type (tree type, tree lowval, tree highval)
8165 return build_range_type_1 (type, lowval, highval, true);
8168 /* Wrapper around build_range_type_1 with SHARED set to false. */
8171 build_nonshared_range_type (tree type, tree lowval, tree highval)
8173 return build_range_type_1 (type, lowval, highval, false);
8176 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
8177 MAXVAL should be the maximum value in the domain
8178 (one less than the length of the array).
8180 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
8181 We don't enforce this limit, that is up to caller (e.g. language front end).
8182 The limit exists because the result is a signed type and we don't handle
8183 sizes that use more than one HOST_WIDE_INT. */
8186 build_index_type (tree maxval)
8188 return build_range_type (sizetype, size_zero_node, maxval);
8191 /* Return true if the debug information for TYPE, a subtype, should be emitted
8192 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
8193 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
8194 debug info and doesn't reflect the source code. */
8197 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
8199 tree base_type = TREE_TYPE (type), low, high;
8201 /* Subrange types have a base type which is an integral type. */
8202 if (!INTEGRAL_TYPE_P (base_type))
8205 /* Get the real bounds of the subtype. */
8206 if (lang_hooks.types.get_subrange_bounds)
8207 lang_hooks.types.get_subrange_bounds (type, &low, &high);
8210 low = TYPE_MIN_VALUE (type);
8211 high = TYPE_MAX_VALUE (type);
8214 /* If the type and its base type have the same representation and the same
8215 name, then the type is not a subrange but a copy of the base type. */
8216 if ((TREE_CODE (base_type) == INTEGER_TYPE
8217 || TREE_CODE (base_type) == BOOLEAN_TYPE)
8218 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
8219 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
8220 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
8221 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
8231 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
8232 and number of elements specified by the range of values of INDEX_TYPE.
8233 If SHARED is true, reuse such a type that has already been constructed. */
8236 build_array_type_1 (tree elt_type, tree index_type, bool shared)
8240 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
8242 error ("arrays of functions are not meaningful");
8243 elt_type = integer_type_node;
8246 t = make_node (ARRAY_TYPE);
8247 TREE_TYPE (t) = elt_type;
8248 TYPE_DOMAIN (t) = index_type;
8249 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
8252 /* If the element type is incomplete at this point we get marked for
8253 structural equality. Do not record these types in the canonical
8255 if (TYPE_STRUCTURAL_EQUALITY_P (t))
8260 inchash::hash hstate;
8261 hstate.add_object (TYPE_HASH (elt_type));
8263 hstate.add_object (TYPE_HASH (index_type));
8264 t = type_hash_canon (hstate.end (), t);
8267 if (TYPE_CANONICAL (t) == t)
8269 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
8270 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type))
8272 SET_TYPE_STRUCTURAL_EQUALITY (t);
8273 else if (TYPE_CANONICAL (elt_type) != elt_type
8274 || (index_type && TYPE_CANONICAL (index_type) != index_type))
8276 = build_array_type_1 (TYPE_CANONICAL (elt_type),
8278 ? TYPE_CANONICAL (index_type) : NULL_TREE,
8285 /* Wrapper around build_array_type_1 with SHARED set to true. */
8288 build_array_type (tree elt_type, tree index_type)
8290 return build_array_type_1 (elt_type, index_type, true);
8293 /* Wrapper around build_array_type_1 with SHARED set to false. */
8296 build_nonshared_array_type (tree elt_type, tree index_type)
8298 return build_array_type_1 (elt_type, index_type, false);
8301 /* Return a representation of ELT_TYPE[NELTS], using indices of type
8305 build_array_type_nelts (tree elt_type, unsigned HOST_WIDE_INT nelts)
8307 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
8310 /* Recursively examines the array elements of TYPE, until a non-array
8311 element type is found. */
8314 strip_array_types (tree type)
8316 while (TREE_CODE (type) == ARRAY_TYPE)
8317 type = TREE_TYPE (type);
8322 /* Computes the canonical argument types from the argument type list
8325 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
8326 on entry to this function, or if any of the ARGTYPES are
8329 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
8330 true on entry to this function, or if any of the ARGTYPES are
8333 Returns a canonical argument list, which may be ARGTYPES when the
8334 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
8335 true) or would not differ from ARGTYPES. */
8338 maybe_canonicalize_argtypes (tree argtypes,
8339 bool *any_structural_p,
8340 bool *any_noncanonical_p)
8343 bool any_noncanonical_argtypes_p = false;
8345 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
8347 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
8348 /* Fail gracefully by stating that the type is structural. */
8349 *any_structural_p = true;
8350 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
8351 *any_structural_p = true;
8352 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
8353 || TREE_PURPOSE (arg))
8354 /* If the argument has a default argument, we consider it
8355 non-canonical even though the type itself is canonical.
8356 That way, different variants of function and method types
8357 with default arguments will all point to the variant with
8358 no defaults as their canonical type. */
8359 any_noncanonical_argtypes_p = true;
8362 if (*any_structural_p)
8365 if (any_noncanonical_argtypes_p)
8367 /* Build the canonical list of argument types. */
8368 tree canon_argtypes = NULL_TREE;
8369 bool is_void = false;
8371 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
8373 if (arg == void_list_node)
8376 canon_argtypes = tree_cons (NULL_TREE,
8377 TYPE_CANONICAL (TREE_VALUE (arg)),
8381 canon_argtypes = nreverse (canon_argtypes);
8383 canon_argtypes = chainon (canon_argtypes, void_list_node);
8385 /* There is a non-canonical type. */
8386 *any_noncanonical_p = true;
8387 return canon_argtypes;
8390 /* The canonical argument types are the same as ARGTYPES. */
8394 /* Construct, lay out and return
8395 the type of functions returning type VALUE_TYPE
8396 given arguments of types ARG_TYPES.
8397 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
8398 are data type nodes for the arguments of the function.
8399 If such a type has already been constructed, reuse it. */
8402 build_function_type (tree value_type, tree arg_types)
8405 inchash::hash hstate;
8406 bool any_structural_p, any_noncanonical_p;
8407 tree canon_argtypes;
8409 if (TREE_CODE (value_type) == FUNCTION_TYPE)
8411 error ("function return type cannot be function");
8412 value_type = integer_type_node;
8415 /* Make a node of the sort we want. */
8416 t = make_node (FUNCTION_TYPE);
8417 TREE_TYPE (t) = value_type;
8418 TYPE_ARG_TYPES (t) = arg_types;
8420 /* If we already have such a type, use the old one. */
8421 hstate.add_object (TYPE_HASH (value_type));
8422 type_hash_list (arg_types, hstate);
8423 t = type_hash_canon (hstate.end (), t);
8425 /* Set up the canonical type. */
8426 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
8427 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
8428 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
8430 &any_noncanonical_p);
8431 if (any_structural_p)
8432 SET_TYPE_STRUCTURAL_EQUALITY (t);
8433 else if (any_noncanonical_p)
8434 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
8437 if (!COMPLETE_TYPE_P (t))
8442 /* Build a function type. The RETURN_TYPE is the type returned by the
8443 function. If VAARGS is set, no void_type_node is appended to the
8444 list. ARGP must be always be terminated be a NULL_TREE. */
8447 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
8451 t = va_arg (argp, tree);
8452 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
8453 args = tree_cons (NULL_TREE, t, args);
8458 if (args != NULL_TREE)
8459 args = nreverse (args);
8460 gcc_assert (last != void_list_node);
8462 else if (args == NULL_TREE)
8463 args = void_list_node;
8467 args = nreverse (args);
8468 TREE_CHAIN (last) = void_list_node;
8470 args = build_function_type (return_type, args);
8475 /* Build a function type. The RETURN_TYPE is the type returned by the
8476 function. If additional arguments are provided, they are
8477 additional argument types. The list of argument types must always
8478 be terminated by NULL_TREE. */
8481 build_function_type_list (tree return_type, ...)
8486 va_start (p, return_type);
8487 args = build_function_type_list_1 (false, return_type, p);
8492 /* Build a variable argument function type. The RETURN_TYPE is the
8493 type returned by the function. If additional arguments are provided,
8494 they are additional argument types. The list of argument types must
8495 always be terminated by NULL_TREE. */
8498 build_varargs_function_type_list (tree return_type, ...)
8503 va_start (p, return_type);
8504 args = build_function_type_list_1 (true, return_type, p);
8510 /* Build a function type. RETURN_TYPE is the type returned by the
8511 function; VAARGS indicates whether the function takes varargs. The
8512 function takes N named arguments, the types of which are provided in
8516 build_function_type_array_1 (bool vaargs, tree return_type, int n,
8520 tree t = vaargs ? NULL_TREE : void_list_node;
8522 for (i = n - 1; i >= 0; i--)
8523 t = tree_cons (NULL_TREE, arg_types[i], t);
8525 return build_function_type (return_type, t);
8528 /* Build a function type. RETURN_TYPE is the type returned by the
8529 function. The function takes N named arguments, the types of which
8530 are provided in ARG_TYPES. */
8533 build_function_type_array (tree return_type, int n, tree *arg_types)
8535 return build_function_type_array_1 (false, return_type, n, arg_types);
8538 /* Build a variable argument function type. RETURN_TYPE is the type
8539 returned by the function. The function takes N named arguments, the
8540 types of which are provided in ARG_TYPES. */
8543 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
8545 return build_function_type_array_1 (true, return_type, n, arg_types);
8548 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
8549 and ARGTYPES (a TREE_LIST) are the return type and arguments types
8550 for the method. An implicit additional parameter (of type
8551 pointer-to-BASETYPE) is added to the ARGTYPES. */
8554 build_method_type_directly (tree basetype,
8560 inchash::hash hstate;
8561 bool any_structural_p, any_noncanonical_p;
8562 tree canon_argtypes;
8564 /* Make a node of the sort we want. */
8565 t = make_node (METHOD_TYPE);
8567 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8568 TREE_TYPE (t) = rettype;
8569 ptype = build_pointer_type (basetype);
8571 /* The actual arglist for this function includes a "hidden" argument
8572 which is "this". Put it into the list of argument types. */
8573 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
8574 TYPE_ARG_TYPES (t) = argtypes;
8576 /* If we already have such a type, use the old one. */
8577 hstate.add_object (TYPE_HASH (basetype));
8578 hstate.add_object (TYPE_HASH (rettype));
8579 type_hash_list (argtypes, hstate);
8580 t = type_hash_canon (hstate.end (), t);
8582 /* Set up the canonical type. */
8584 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8585 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
8587 = (TYPE_CANONICAL (basetype) != basetype
8588 || TYPE_CANONICAL (rettype) != rettype);
8589 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
8591 &any_noncanonical_p);
8592 if (any_structural_p)
8593 SET_TYPE_STRUCTURAL_EQUALITY (t);
8594 else if (any_noncanonical_p)
8596 = build_method_type_directly (TYPE_CANONICAL (basetype),
8597 TYPE_CANONICAL (rettype),
8599 if (!COMPLETE_TYPE_P (t))
8605 /* Construct, lay out and return the type of methods belonging to class
8606 BASETYPE and whose arguments and values are described by TYPE.
8607 If that type exists already, reuse it.
8608 TYPE must be a FUNCTION_TYPE node. */
8611 build_method_type (tree basetype, tree type)
8613 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8615 return build_method_type_directly (basetype,
8617 TYPE_ARG_TYPES (type));
8620 /* Construct, lay out and return the type of offsets to a value
8621 of type TYPE, within an object of type BASETYPE.
8622 If a suitable offset type exists already, reuse it. */
8625 build_offset_type (tree basetype, tree type)
8628 inchash::hash hstate;
8630 /* Make a node of the sort we want. */
8631 t = make_node (OFFSET_TYPE);
8633 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8634 TREE_TYPE (t) = type;
8636 /* If we already have such a type, use the old one. */
8637 hstate.add_object (TYPE_HASH (basetype));
8638 hstate.add_object (TYPE_HASH (type));
8639 t = type_hash_canon (hstate.end (), t);
8641 if (!COMPLETE_TYPE_P (t))
8644 if (TYPE_CANONICAL (t) == t)
8646 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8647 || TYPE_STRUCTURAL_EQUALITY_P (type))
8648 SET_TYPE_STRUCTURAL_EQUALITY (t);
8649 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8650 || TYPE_CANONICAL (type) != type)
8652 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8653 TYPE_CANONICAL (type));
8659 /* Create a complex type whose components are COMPONENT_TYPE.
8661 If NAMED is true, the type is given a TYPE_NAME. We do not always
8662 do so because this creates a DECL node and thus make the DECL_UIDs
8663 dependent on the type canonicalization hashtable, which is GC-ed,
8664 so the DECL_UIDs would not be stable wrt garbage collection. */
8667 build_complex_type (tree component_type, bool named)
8670 inchash::hash hstate;
8672 gcc_assert (INTEGRAL_TYPE_P (component_type)
8673 || SCALAR_FLOAT_TYPE_P (component_type)
8674 || FIXED_POINT_TYPE_P (component_type));
8676 /* Make a node of the sort we want. */
8677 t = make_node (COMPLEX_TYPE);
8679 TREE_TYPE (t) = TYPE_MAIN_VARIANT (component_type);
8681 /* If we already have such a type, use the old one. */
8682 hstate.add_object (TYPE_HASH (component_type));
8683 t = type_hash_canon (hstate.end (), t);
8685 if (!COMPLETE_TYPE_P (t))
8688 if (TYPE_CANONICAL (t) == t)
8690 if (TYPE_STRUCTURAL_EQUALITY_P (component_type))
8691 SET_TYPE_STRUCTURAL_EQUALITY (t);
8692 else if (TYPE_CANONICAL (component_type) != component_type)
8694 = build_complex_type (TYPE_CANONICAL (component_type), named);
8697 /* We need to create a name, since complex is a fundamental type. */
8698 if (!TYPE_NAME (t) && named)
8701 if (component_type == char_type_node)
8702 name = "complex char";
8703 else if (component_type == signed_char_type_node)
8704 name = "complex signed char";
8705 else if (component_type == unsigned_char_type_node)
8706 name = "complex unsigned char";
8707 else if (component_type == short_integer_type_node)
8708 name = "complex short int";
8709 else if (component_type == short_unsigned_type_node)
8710 name = "complex short unsigned int";
8711 else if (component_type == integer_type_node)
8712 name = "complex int";
8713 else if (component_type == unsigned_type_node)
8714 name = "complex unsigned int";
8715 else if (component_type == long_integer_type_node)
8716 name = "complex long int";
8717 else if (component_type == long_unsigned_type_node)
8718 name = "complex long unsigned int";
8719 else if (component_type == long_long_integer_type_node)
8720 name = "complex long long int";
8721 else if (component_type == long_long_unsigned_type_node)
8722 name = "complex long long unsigned int";
8727 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8728 get_identifier (name), t);
8731 return build_qualified_type (t, TYPE_QUALS (component_type));
8734 /* If TYPE is a real or complex floating-point type and the target
8735 does not directly support arithmetic on TYPE then return the wider
8736 type to be used for arithmetic on TYPE. Otherwise, return
8740 excess_precision_type (tree type)
8742 if (flag_excess_precision != EXCESS_PRECISION_FAST)
8744 int flt_eval_method = TARGET_FLT_EVAL_METHOD;
8745 switch (TREE_CODE (type))
8748 switch (flt_eval_method)
8751 if (TYPE_MODE (type) == TYPE_MODE (float_type_node))
8752 return double_type_node;
8755 if (TYPE_MODE (type) == TYPE_MODE (float_type_node)
8756 || TYPE_MODE (type) == TYPE_MODE (double_type_node))
8757 return long_double_type_node;
8764 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8766 switch (flt_eval_method)
8769 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node))
8770 return complex_double_type_node;
8773 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node)
8774 || (TYPE_MODE (TREE_TYPE (type))
8775 == TYPE_MODE (double_type_node)))
8776 return complex_long_double_type_node;
8789 /* Return OP, stripped of any conversions to wider types as much as is safe.
8790 Converting the value back to OP's type makes a value equivalent to OP.
8792 If FOR_TYPE is nonzero, we return a value which, if converted to
8793 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8795 OP must have integer, real or enumeral type. Pointers are not allowed!
8797 There are some cases where the obvious value we could return
8798 would regenerate to OP if converted to OP's type,
8799 but would not extend like OP to wider types.
8800 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8801 For example, if OP is (unsigned short)(signed char)-1,
8802 we avoid returning (signed char)-1 if FOR_TYPE is int,
8803 even though extending that to an unsigned short would regenerate OP,
8804 since the result of extending (signed char)-1 to (int)
8805 is different from (int) OP. */
8808 get_unwidened (tree op, tree for_type)
8810 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8811 tree type = TREE_TYPE (op);
8813 = TYPE_PRECISION (for_type != 0 ? for_type : type);
8815 = (for_type != 0 && for_type != type
8816 && final_prec > TYPE_PRECISION (type)
8817 && TYPE_UNSIGNED (type));
8820 while (CONVERT_EXPR_P (op))
8824 /* TYPE_PRECISION on vector types has different meaning
8825 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8826 so avoid them here. */
8827 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
8830 bitschange = TYPE_PRECISION (TREE_TYPE (op))
8831 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
8833 /* Truncations are many-one so cannot be removed.
8834 Unless we are later going to truncate down even farther. */
8836 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
8839 /* See what's inside this conversion. If we decide to strip it,
8841 op = TREE_OPERAND (op, 0);
8843 /* If we have not stripped any zero-extensions (uns is 0),
8844 we can strip any kind of extension.
8845 If we have previously stripped a zero-extension,
8846 only zero-extensions can safely be stripped.
8847 Any extension can be stripped if the bits it would produce
8848 are all going to be discarded later by truncating to FOR_TYPE. */
8852 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
8854 /* TYPE_UNSIGNED says whether this is a zero-extension.
8855 Let's avoid computing it if it does not affect WIN
8856 and if UNS will not be needed again. */
8858 || CONVERT_EXPR_P (op))
8859 && TYPE_UNSIGNED (TREE_TYPE (op)))
8867 /* If we finally reach a constant see if it fits in for_type and
8868 in that case convert it. */
8870 && TREE_CODE (win) == INTEGER_CST
8871 && TREE_TYPE (win) != for_type
8872 && int_fits_type_p (win, for_type))
8873 win = fold_convert (for_type, win);
8878 /* Return OP or a simpler expression for a narrower value
8879 which can be sign-extended or zero-extended to give back OP.
8880 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8881 or 0 if the value should be sign-extended. */
8884 get_narrower (tree op, int *unsignedp_ptr)
8889 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8891 while (TREE_CODE (op) == NOP_EXPR)
8894 = (TYPE_PRECISION (TREE_TYPE (op))
8895 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8897 /* Truncations are many-one so cannot be removed. */
8901 /* See what's inside this conversion. If we decide to strip it,
8906 op = TREE_OPERAND (op, 0);
8907 /* An extension: the outermost one can be stripped,
8908 but remember whether it is zero or sign extension. */
8910 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8911 /* Otherwise, if a sign extension has been stripped,
8912 only sign extensions can now be stripped;
8913 if a zero extension has been stripped, only zero-extensions. */
8914 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8918 else /* bitschange == 0 */
8920 /* A change in nominal type can always be stripped, but we must
8921 preserve the unsignedness. */
8923 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8925 op = TREE_OPERAND (op, 0);
8926 /* Keep trying to narrow, but don't assign op to win if it
8927 would turn an integral type into something else. */
8928 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8935 if (TREE_CODE (op) == COMPONENT_REF
8936 /* Since type_for_size always gives an integer type. */
8937 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8938 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8939 /* Ensure field is laid out already. */
8940 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8941 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8943 unsigned HOST_WIDE_INT innerprec
8944 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8945 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8946 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8947 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8949 /* We can get this structure field in a narrower type that fits it,
8950 but the resulting extension to its nominal type (a fullword type)
8951 must satisfy the same conditions as for other extensions.
8953 Do this only for fields that are aligned (not bit-fields),
8954 because when bit-field insns will be used there is no
8955 advantage in doing this. */
8957 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8958 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8959 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8963 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8964 win = fold_convert (type, op);
8968 *unsignedp_ptr = uns;
8972 /* Return true if integer constant C has a value that is permissible
8973 for TYPE, an integral type. */
8976 int_fits_type_p (const_tree c, const_tree type)
8978 tree type_low_bound, type_high_bound;
8979 bool ok_for_low_bound, ok_for_high_bound;
8980 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8982 /* Non-standard boolean types can have arbitrary precision but various
8983 transformations assume that they can only take values 0 and +/-1. */
8984 if (TREE_CODE (type) == BOOLEAN_TYPE)
8985 return wi::fits_to_boolean_p (c, type);
8988 type_low_bound = TYPE_MIN_VALUE (type);
8989 type_high_bound = TYPE_MAX_VALUE (type);
8991 /* If at least one bound of the type is a constant integer, we can check
8992 ourselves and maybe make a decision. If no such decision is possible, but
8993 this type is a subtype, try checking against that. Otherwise, use
8994 fits_to_tree_p, which checks against the precision.
8996 Compute the status for each possibly constant bound, and return if we see
8997 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8998 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8999 for "constant known to fit". */
9001 /* Check if c >= type_low_bound. */
9002 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
9004 if (tree_int_cst_lt (c, type_low_bound))
9006 ok_for_low_bound = true;
9009 ok_for_low_bound = false;
9011 /* Check if c <= type_high_bound. */
9012 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
9014 if (tree_int_cst_lt (type_high_bound, c))
9016 ok_for_high_bound = true;
9019 ok_for_high_bound = false;
9021 /* If the constant fits both bounds, the result is known. */
9022 if (ok_for_low_bound && ok_for_high_bound)
9025 /* Perform some generic filtering which may allow making a decision
9026 even if the bounds are not constant. First, negative integers
9027 never fit in unsigned types, */
9028 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (c))
9031 /* Second, narrower types always fit in wider ones. */
9032 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
9035 /* Third, unsigned integers with top bit set never fit signed types. */
9036 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
9038 int prec = GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (c))) - 1;
9039 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
9041 /* When a tree_cst is converted to a wide-int, the precision
9042 is taken from the type. However, if the precision of the
9043 mode underneath the type is smaller than that, it is
9044 possible that the value will not fit. The test below
9045 fails if any bit is set between the sign bit of the
9046 underlying mode and the top bit of the type. */
9047 if (wi::ne_p (wi::zext (c, prec - 1), c))
9050 else if (wi::neg_p (c))
9054 /* If we haven't been able to decide at this point, there nothing more we
9055 can check ourselves here. Look at the base type if we have one and it
9056 has the same precision. */
9057 if (TREE_CODE (type) == INTEGER_TYPE
9058 && TREE_TYPE (type) != 0
9059 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
9061 type = TREE_TYPE (type);
9065 /* Or to fits_to_tree_p, if nothing else. */
9066 return wi::fits_to_tree_p (c, type);
9069 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
9070 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
9071 represented (assuming two's-complement arithmetic) within the bit
9072 precision of the type are returned instead. */
9075 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
9077 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
9078 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
9079 wi::to_mpz (TYPE_MIN_VALUE (type), min, TYPE_SIGN (type));
9082 if (TYPE_UNSIGNED (type))
9083 mpz_set_ui (min, 0);
9086 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
9087 wi::to_mpz (mn, min, SIGNED);
9091 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
9092 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
9093 wi::to_mpz (TYPE_MAX_VALUE (type), max, TYPE_SIGN (type));
9096 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
9097 wi::to_mpz (mn, max, TYPE_SIGN (type));
9101 /* Return true if VAR is an automatic variable defined in function FN. */
9104 auto_var_in_fn_p (const_tree var, const_tree fn)
9106 return (DECL_P (var) && DECL_CONTEXT (var) == fn
9107 && ((((TREE_CODE (var) == VAR_DECL && ! DECL_EXTERNAL (var))
9108 || TREE_CODE (var) == PARM_DECL)
9109 && ! TREE_STATIC (var))
9110 || TREE_CODE (var) == LABEL_DECL
9111 || TREE_CODE (var) == RESULT_DECL));
9114 /* Subprogram of following function. Called by walk_tree.
9116 Return *TP if it is an automatic variable or parameter of the
9117 function passed in as DATA. */
9120 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
9122 tree fn = (tree) data;
9127 else if (DECL_P (*tp)
9128 && auto_var_in_fn_p (*tp, fn))
9134 /* Returns true if T is, contains, or refers to a type with variable
9135 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
9136 arguments, but not the return type. If FN is nonzero, only return
9137 true if a modifier of the type or position of FN is a variable or
9138 parameter inside FN.
9140 This concept is more general than that of C99 'variably modified types':
9141 in C99, a struct type is never variably modified because a VLA may not
9142 appear as a structure member. However, in GNU C code like:
9144 struct S { int i[f()]; };
9146 is valid, and other languages may define similar constructs. */
9149 variably_modified_type_p (tree type, tree fn)
9153 /* Test if T is either variable (if FN is zero) or an expression containing
9154 a variable in FN. If TYPE isn't gimplified, return true also if
9155 gimplify_one_sizepos would gimplify the expression into a local
9157 #define RETURN_TRUE_IF_VAR(T) \
9158 do { tree _t = (T); \
9159 if (_t != NULL_TREE \
9160 && _t != error_mark_node \
9161 && TREE_CODE (_t) != INTEGER_CST \
9162 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
9164 || (!TYPE_SIZES_GIMPLIFIED (type) \
9165 && !is_gimple_sizepos (_t)) \
9166 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
9167 return true; } while (0)
9169 if (type == error_mark_node)
9172 /* If TYPE itself has variable size, it is variably modified. */
9173 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
9174 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
9176 switch (TREE_CODE (type))
9179 case REFERENCE_TYPE:
9181 if (variably_modified_type_p (TREE_TYPE (type), fn))
9187 /* If TYPE is a function type, it is variably modified if the
9188 return type is variably modified. */
9189 if (variably_modified_type_p (TREE_TYPE (type), fn))
9195 case FIXED_POINT_TYPE:
9198 /* Scalar types are variably modified if their end points
9200 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
9201 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
9206 case QUAL_UNION_TYPE:
9207 /* We can't see if any of the fields are variably-modified by the
9208 definition we normally use, since that would produce infinite
9209 recursion via pointers. */
9210 /* This is variably modified if some field's type is. */
9211 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
9212 if (TREE_CODE (t) == FIELD_DECL)
9214 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
9215 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
9216 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
9218 if (TREE_CODE (type) == QUAL_UNION_TYPE)
9219 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
9224 /* Do not call ourselves to avoid infinite recursion. This is
9225 variably modified if the element type is. */
9226 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
9227 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
9234 /* The current language may have other cases to check, but in general,
9235 all other types are not variably modified. */
9236 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
9238 #undef RETURN_TRUE_IF_VAR
9241 /* Given a DECL or TYPE, return the scope in which it was declared, or
9242 NULL_TREE if there is no containing scope. */
9245 get_containing_scope (const_tree t)
9247 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
9250 /* Return the innermost context enclosing DECL that is
9251 a FUNCTION_DECL, or zero if none. */
9254 decl_function_context (const_tree decl)
9258 if (TREE_CODE (decl) == ERROR_MARK)
9261 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
9262 where we look up the function at runtime. Such functions always take
9263 a first argument of type 'pointer to real context'.
9265 C++ should really be fixed to use DECL_CONTEXT for the real context,
9266 and use something else for the "virtual context". */
9267 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VINDEX (decl))
9270 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
9272 context = DECL_CONTEXT (decl);
9274 while (context && TREE_CODE (context) != FUNCTION_DECL)
9276 if (TREE_CODE (context) == BLOCK)
9277 context = BLOCK_SUPERCONTEXT (context);
9279 context = get_containing_scope (context);
9285 /* Return the innermost context enclosing DECL that is
9286 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
9287 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
9290 decl_type_context (const_tree decl)
9292 tree context = DECL_CONTEXT (decl);
9295 switch (TREE_CODE (context))
9297 case NAMESPACE_DECL:
9298 case TRANSLATION_UNIT_DECL:
9303 case QUAL_UNION_TYPE:
9308 context = DECL_CONTEXT (context);
9312 context = BLOCK_SUPERCONTEXT (context);
9322 /* CALL is a CALL_EXPR. Return the declaration for the function
9323 called, or NULL_TREE if the called function cannot be
9327 get_callee_fndecl (const_tree call)
9331 if (call == error_mark_node)
9332 return error_mark_node;
9334 /* It's invalid to call this function with anything but a
9336 gcc_assert (TREE_CODE (call) == CALL_EXPR);
9338 /* The first operand to the CALL is the address of the function
9340 addr = CALL_EXPR_FN (call);
9342 /* If there is no function, return early. */
9343 if (addr == NULL_TREE)
9348 /* If this is a readonly function pointer, extract its initial value. */
9349 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
9350 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
9351 && DECL_INITIAL (addr))
9352 addr = DECL_INITIAL (addr);
9354 /* If the address is just `&f' for some function `f', then we know
9355 that `f' is being called. */
9356 if (TREE_CODE (addr) == ADDR_EXPR
9357 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
9358 return TREE_OPERAND (addr, 0);
9360 /* We couldn't figure out what was being called. */
9364 /* If CALL_EXPR CALL calls a normal built-in function or an internal function,
9365 return the associated function code, otherwise return CFN_LAST. */
9368 get_call_combined_fn (const_tree call)
9370 /* It's invalid to call this function with anything but a CALL_EXPR. */
9371 gcc_assert (TREE_CODE (call) == CALL_EXPR);
9373 if (!CALL_EXPR_FN (call))
9374 return as_combined_fn (CALL_EXPR_IFN (call));
9376 tree fndecl = get_callee_fndecl (call);
9377 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
9378 return as_combined_fn (DECL_FUNCTION_CODE (fndecl));
9383 #define TREE_MEM_USAGE_SPACES 40
9385 /* Print debugging information about tree nodes generated during the compile,
9386 and any language-specific information. */
9389 dump_tree_statistics (void)
9391 if (GATHER_STATISTICS)
9394 int total_nodes, total_bytes;
9395 fprintf (stderr, "\nKind Nodes Bytes\n");
9396 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9397 total_nodes = total_bytes = 0;
9398 for (i = 0; i < (int) all_kinds; i++)
9400 fprintf (stderr, "%-20s %7d %10d\n", tree_node_kind_names[i],
9401 tree_node_counts[i], tree_node_sizes[i]);
9402 total_nodes += tree_node_counts[i];
9403 total_bytes += tree_node_sizes[i];
9405 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9406 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_nodes, total_bytes);
9407 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9408 fprintf (stderr, "Code Nodes\n");
9409 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9410 for (i = 0; i < (int) MAX_TREE_CODES; i++)
9411 fprintf (stderr, "%-32s %7d\n", get_tree_code_name ((enum tree_code) i),
9412 tree_code_counts[i]);
9413 mem_usage::print_dash_line (TREE_MEM_USAGE_SPACES);
9414 fprintf (stderr, "\n");
9415 ssanames_print_statistics ();
9416 fprintf (stderr, "\n");
9417 phinodes_print_statistics ();
9418 fprintf (stderr, "\n");
9421 fprintf (stderr, "(No per-node statistics)\n");
9423 print_type_hash_statistics ();
9424 print_debug_expr_statistics ();
9425 print_value_expr_statistics ();
9426 lang_hooks.print_statistics ();
9429 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
9431 /* Generate a crc32 of a byte. */
9434 crc32_unsigned_bits (unsigned chksum, unsigned value, unsigned bits)
9438 for (ix = bits; ix--; value <<= 1)
9442 feedback = (value ^ chksum) & 0x80000000 ? 0x04c11db7 : 0;
9449 /* Generate a crc32 of a 32-bit unsigned. */
9452 crc32_unsigned (unsigned chksum, unsigned value)
9454 return crc32_unsigned_bits (chksum, value, 32);
9457 /* Generate a crc32 of a byte. */
9460 crc32_byte (unsigned chksum, char byte)
9462 return crc32_unsigned_bits (chksum, (unsigned) byte << 24, 8);
9465 /* Generate a crc32 of a string. */
9468 crc32_string (unsigned chksum, const char *string)
9472 chksum = crc32_byte (chksum, *string);
9478 /* P is a string that will be used in a symbol. Mask out any characters
9479 that are not valid in that context. */
9482 clean_symbol_name (char *p)
9486 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
9489 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
9496 /* For anonymous aggregate types, we need some sort of name to
9497 hold on to. In practice, this should not appear, but it should
9498 not be harmful if it does. */
9500 anon_aggrname_p(const_tree id_node)
9502 #ifndef NO_DOT_IN_LABEL
9503 return (IDENTIFIER_POINTER (id_node)[0] == '.'
9504 && IDENTIFIER_POINTER (id_node)[1] == '_');
9505 #else /* NO_DOT_IN_LABEL */
9506 #ifndef NO_DOLLAR_IN_LABEL
9507 return (IDENTIFIER_POINTER (id_node)[0] == '$' \
9508 && IDENTIFIER_POINTER (id_node)[1] == '_');
9509 #else /* NO_DOLLAR_IN_LABEL */
9510 #define ANON_AGGRNAME_PREFIX "__anon_"
9511 return (!strncmp (IDENTIFIER_POINTER (id_node), ANON_AGGRNAME_PREFIX,
9512 sizeof (ANON_AGGRNAME_PREFIX) - 1));
9513 #endif /* NO_DOLLAR_IN_LABEL */
9514 #endif /* NO_DOT_IN_LABEL */
9517 /* Return a format for an anonymous aggregate name. */
9519 anon_aggrname_format()
9521 #ifndef NO_DOT_IN_LABEL
9523 #else /* NO_DOT_IN_LABEL */
9524 #ifndef NO_DOLLAR_IN_LABEL
9526 #else /* NO_DOLLAR_IN_LABEL */
9528 #endif /* NO_DOLLAR_IN_LABEL */
9529 #endif /* NO_DOT_IN_LABEL */
9532 /* Generate a name for a special-purpose function.
9533 The generated name may need to be unique across the whole link.
9534 Changes to this function may also require corresponding changes to
9535 xstrdup_mask_random.
9536 TYPE is some string to identify the purpose of this function to the
9537 linker or collect2; it must start with an uppercase letter,
9539 I - for constructors
9541 N - for C++ anonymous namespaces
9542 F - for DWARF unwind frame information. */
9545 get_file_function_name (const char *type)
9551 /* If we already have a name we know to be unique, just use that. */
9552 if (first_global_object_name)
9553 p = q = ASTRDUP (first_global_object_name);
9554 /* If the target is handling the constructors/destructors, they
9555 will be local to this file and the name is only necessary for
9557 We also assign sub_I and sub_D sufixes to constructors called from
9558 the global static constructors. These are always local. */
9559 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9560 || (strncmp (type, "sub_", 4) == 0
9561 && (type[4] == 'I' || type[4] == 'D')))
9563 const char *file = main_input_filename;
9565 file = LOCATION_FILE (input_location);
9566 /* Just use the file's basename, because the full pathname
9567 might be quite long. */
9568 p = q = ASTRDUP (lbasename (file));
9572 /* Otherwise, the name must be unique across the entire link.
9573 We don't have anything that we know to be unique to this translation
9574 unit, so use what we do have and throw in some randomness. */
9576 const char *name = weak_global_object_name;
9577 const char *file = main_input_filename;
9582 file = LOCATION_FILE (input_location);
9584 len = strlen (file);
9585 q = (char *) alloca (9 + 17 + len + 1);
9586 memcpy (q, file, len + 1);
9588 snprintf (q + len, 9 + 17 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9589 crc32_string (0, name), get_random_seed (false));
9594 clean_symbol_name (q);
9595 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9598 /* Set up the name of the file-level functions we may need.
9599 Use a global object (which is already required to be unique over
9600 the program) rather than the file name (which imposes extra
9602 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9604 return get_identifier (buf);
9607 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9609 /* Complain that the tree code of NODE does not match the expected 0
9610 terminated list of trailing codes. The trailing code list can be
9611 empty, for a more vague error message. FILE, LINE, and FUNCTION
9612 are of the caller. */
9615 tree_check_failed (const_tree node, const char *file,
9616 int line, const char *function, ...)
9620 unsigned length = 0;
9621 enum tree_code code;
9623 va_start (args, function);
9624 while ((code = (enum tree_code) va_arg (args, int)))
9625 length += 4 + strlen (get_tree_code_name (code));
9630 va_start (args, function);
9631 length += strlen ("expected ");
9632 buffer = tmp = (char *) alloca (length);
9634 while ((code = (enum tree_code) va_arg (args, int)))
9636 const char *prefix = length ? " or " : "expected ";
9638 strcpy (tmp + length, prefix);
9639 length += strlen (prefix);
9640 strcpy (tmp + length, get_tree_code_name (code));
9641 length += strlen (get_tree_code_name (code));
9646 buffer = "unexpected node";
9648 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9649 buffer, get_tree_code_name (TREE_CODE (node)),
9650 function, trim_filename (file), line);
9653 /* Complain that the tree code of NODE does match the expected 0
9654 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9658 tree_not_check_failed (const_tree node, const char *file,
9659 int line, const char *function, ...)
9663 unsigned length = 0;
9664 enum tree_code code;
9666 va_start (args, function);
9667 while ((code = (enum tree_code) va_arg (args, int)))
9668 length += 4 + strlen (get_tree_code_name (code));
9670 va_start (args, function);
9671 buffer = (char *) alloca (length);
9673 while ((code = (enum tree_code) va_arg (args, int)))
9677 strcpy (buffer + length, " or ");
9680 strcpy (buffer + length, get_tree_code_name (code));
9681 length += strlen (get_tree_code_name (code));
9685 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9686 buffer, get_tree_code_name (TREE_CODE (node)),
9687 function, trim_filename (file), line);
9690 /* Similar to tree_check_failed, except that we check for a class of tree
9691 code, given in CL. */
9694 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9695 const char *file, int line, const char *function)
9698 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9699 TREE_CODE_CLASS_STRING (cl),
9700 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9701 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9704 /* Similar to tree_check_failed, except that instead of specifying a
9705 dozen codes, use the knowledge that they're all sequential. */
9708 tree_range_check_failed (const_tree node, const char *file, int line,
9709 const char *function, enum tree_code c1,
9713 unsigned length = 0;
9716 for (c = c1; c <= c2; ++c)
9717 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9719 length += strlen ("expected ");
9720 buffer = (char *) alloca (length);
9723 for (c = c1; c <= c2; ++c)
9725 const char *prefix = length ? " or " : "expected ";
9727 strcpy (buffer + length, prefix);
9728 length += strlen (prefix);
9729 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9730 length += strlen (get_tree_code_name ((enum tree_code) c));
9733 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9734 buffer, get_tree_code_name (TREE_CODE (node)),
9735 function, trim_filename (file), line);
9739 /* Similar to tree_check_failed, except that we check that a tree does
9740 not have the specified code, given in CL. */
9743 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9744 const char *file, int line, const char *function)
9747 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9748 TREE_CODE_CLASS_STRING (cl),
9749 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9750 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9754 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9757 omp_clause_check_failed (const_tree node, const char *file, int line,
9758 const char *function, enum omp_clause_code code)
9760 internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d",
9761 omp_clause_code_name[code], get_tree_code_name (TREE_CODE (node)),
9762 function, trim_filename (file), line);
9766 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9769 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9770 const char *function, enum omp_clause_code c1,
9771 enum omp_clause_code c2)
9774 unsigned length = 0;
9777 for (c = c1; c <= c2; ++c)
9778 length += 4 + strlen (omp_clause_code_name[c]);
9780 length += strlen ("expected ");
9781 buffer = (char *) alloca (length);
9784 for (c = c1; c <= c2; ++c)
9786 const char *prefix = length ? " or " : "expected ";
9788 strcpy (buffer + length, prefix);
9789 length += strlen (prefix);
9790 strcpy (buffer + length, omp_clause_code_name[c]);
9791 length += strlen (omp_clause_code_name[c]);
9794 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9795 buffer, omp_clause_code_name[TREE_CODE (node)],
9796 function, trim_filename (file), line);
9800 #undef DEFTREESTRUCT
9801 #define DEFTREESTRUCT(VAL, NAME) NAME,
9803 static const char *ts_enum_names[] = {
9804 #include "treestruct.def"
9806 #undef DEFTREESTRUCT
9808 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9810 /* Similar to tree_class_check_failed, except that we check for
9811 whether CODE contains the tree structure identified by EN. */
9814 tree_contains_struct_check_failed (const_tree node,
9815 const enum tree_node_structure_enum en,
9816 const char *file, int line,
9817 const char *function)
9820 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9822 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9826 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9827 (dynamically sized) vector. */
9830 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9831 const char *function)
9834 ("tree check: accessed elt %d of tree_int_cst with %d elts in %s, at %s:%d",
9835 idx + 1, len, function, trim_filename (file), line);
9838 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9839 (dynamically sized) vector. */
9842 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9843 const char *function)
9846 ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d",
9847 idx + 1, len, function, trim_filename (file), line);
9850 /* Similar to above, except that the check is for the bounds of the operand
9851 vector of an expression node EXP. */
9854 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9855 int line, const char *function)
9857 enum tree_code code = TREE_CODE (exp);
9859 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9860 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9861 function, trim_filename (file), line);
9864 /* Similar to above, except that the check is for the number of
9865 operands of an OMP_CLAUSE node. */
9868 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9869 int line, const char *function)
9872 ("tree check: accessed operand %d of omp_clause %s with %d operands "
9873 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9874 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9875 trim_filename (file), line);
9877 #endif /* ENABLE_TREE_CHECKING */
9879 /* Create a new vector type node holding SUBPARTS units of type INNERTYPE,
9880 and mapped to the machine mode MODE. Initialize its fields and build
9881 the information necessary for debugging output. */
9884 make_vector_type (tree innertype, int nunits, machine_mode mode)
9887 inchash::hash hstate;
9888 tree mv_innertype = TYPE_MAIN_VARIANT (innertype);
9890 t = make_node (VECTOR_TYPE);
9891 TREE_TYPE (t) = mv_innertype;
9892 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9893 SET_TYPE_MODE (t, mode);
9895 if (TYPE_STRUCTURAL_EQUALITY_P (mv_innertype) || in_lto_p)
9896 SET_TYPE_STRUCTURAL_EQUALITY (t);
9897 else if ((TYPE_CANONICAL (mv_innertype) != innertype
9898 || mode != VOIDmode)
9899 && !VECTOR_BOOLEAN_TYPE_P (t))
9901 = make_vector_type (TYPE_CANONICAL (mv_innertype), nunits, VOIDmode);
9905 hstate.add_wide_int (VECTOR_TYPE);
9906 hstate.add_wide_int (nunits);
9907 hstate.add_wide_int (mode);
9908 hstate.add_object (TYPE_HASH (TREE_TYPE (t)));
9909 t = type_hash_canon (hstate.end (), t);
9911 /* We have built a main variant, based on the main variant of the
9912 inner type. Use it to build the variant we return. */
9913 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9914 && TREE_TYPE (t) != innertype)
9915 return build_type_attribute_qual_variant (t,
9916 TYPE_ATTRIBUTES (innertype),
9917 TYPE_QUALS (innertype));
9923 make_or_reuse_type (unsigned size, int unsignedp)
9927 if (size == INT_TYPE_SIZE)
9928 return unsignedp ? unsigned_type_node : integer_type_node;
9929 if (size == CHAR_TYPE_SIZE)
9930 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9931 if (size == SHORT_TYPE_SIZE)
9932 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9933 if (size == LONG_TYPE_SIZE)
9934 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9935 if (size == LONG_LONG_TYPE_SIZE)
9936 return (unsignedp ? long_long_unsigned_type_node
9937 : long_long_integer_type_node);
9939 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9940 if (size == int_n_data[i].bitsize
9941 && int_n_enabled_p[i])
9942 return (unsignedp ? int_n_trees[i].unsigned_type
9943 : int_n_trees[i].signed_type);
9946 return make_unsigned_type (size);
9948 return make_signed_type (size);
9951 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9954 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9958 if (size == SHORT_FRACT_TYPE_SIZE)
9959 return unsignedp ? sat_unsigned_short_fract_type_node
9960 : sat_short_fract_type_node;
9961 if (size == FRACT_TYPE_SIZE)
9962 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9963 if (size == LONG_FRACT_TYPE_SIZE)
9964 return unsignedp ? sat_unsigned_long_fract_type_node
9965 : sat_long_fract_type_node;
9966 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9967 return unsignedp ? sat_unsigned_long_long_fract_type_node
9968 : sat_long_long_fract_type_node;
9972 if (size == SHORT_FRACT_TYPE_SIZE)
9973 return unsignedp ? unsigned_short_fract_type_node
9974 : short_fract_type_node;
9975 if (size == FRACT_TYPE_SIZE)
9976 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9977 if (size == LONG_FRACT_TYPE_SIZE)
9978 return unsignedp ? unsigned_long_fract_type_node
9979 : long_fract_type_node;
9980 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9981 return unsignedp ? unsigned_long_long_fract_type_node
9982 : long_long_fract_type_node;
9985 return make_fract_type (size, unsignedp, satp);
9988 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9991 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9995 if (size == SHORT_ACCUM_TYPE_SIZE)
9996 return unsignedp ? sat_unsigned_short_accum_type_node
9997 : sat_short_accum_type_node;
9998 if (size == ACCUM_TYPE_SIZE)
9999 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
10000 if (size == LONG_ACCUM_TYPE_SIZE)
10001 return unsignedp ? sat_unsigned_long_accum_type_node
10002 : sat_long_accum_type_node;
10003 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
10004 return unsignedp ? sat_unsigned_long_long_accum_type_node
10005 : sat_long_long_accum_type_node;
10009 if (size == SHORT_ACCUM_TYPE_SIZE)
10010 return unsignedp ? unsigned_short_accum_type_node
10011 : short_accum_type_node;
10012 if (size == ACCUM_TYPE_SIZE)
10013 return unsignedp ? unsigned_accum_type_node : accum_type_node;
10014 if (size == LONG_ACCUM_TYPE_SIZE)
10015 return unsignedp ? unsigned_long_accum_type_node
10016 : long_accum_type_node;
10017 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
10018 return unsignedp ? unsigned_long_long_accum_type_node
10019 : long_long_accum_type_node;
10022 return make_accum_type (size, unsignedp, satp);
10026 /* Create an atomic variant node for TYPE. This routine is called
10027 during initialization of data types to create the 5 basic atomic
10028 types. The generic build_variant_type function requires these to
10029 already be set up in order to function properly, so cannot be
10030 called from there. If ALIGN is non-zero, then ensure alignment is
10031 overridden to this value. */
10034 build_atomic_base (tree type, unsigned int align)
10038 /* Make sure its not already registered. */
10039 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
10042 t = build_variant_type_copy (type);
10043 set_type_quals (t, TYPE_QUAL_ATOMIC);
10046 SET_TYPE_ALIGN (t, align);
10051 /* Create nodes for all integer types (and error_mark_node) using the sizes
10052 of C datatypes. SIGNED_CHAR specifies whether char is signed. */
10055 build_common_tree_nodes (bool signed_char)
10059 error_mark_node = make_node (ERROR_MARK);
10060 TREE_TYPE (error_mark_node) = error_mark_node;
10062 initialize_sizetypes ();
10064 /* Define both `signed char' and `unsigned char'. */
10065 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
10066 TYPE_STRING_FLAG (signed_char_type_node) = 1;
10067 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
10068 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
10070 /* Define `char', which is like either `signed char' or `unsigned char'
10071 but not the same as either. */
10074 ? make_signed_type (CHAR_TYPE_SIZE)
10075 : make_unsigned_type (CHAR_TYPE_SIZE));
10076 TYPE_STRING_FLAG (char_type_node) = 1;
10078 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
10079 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
10080 integer_type_node = make_signed_type (INT_TYPE_SIZE);
10081 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
10082 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
10083 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
10084 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
10085 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
10087 for (i = 0; i < NUM_INT_N_ENTS; i ++)
10089 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
10090 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
10091 TYPE_SIZE (int_n_trees[i].signed_type) = bitsize_int (int_n_data[i].bitsize);
10092 TYPE_SIZE (int_n_trees[i].unsigned_type) = bitsize_int (int_n_data[i].bitsize);
10094 if (int_n_data[i].bitsize > LONG_LONG_TYPE_SIZE
10095 && int_n_enabled_p[i])
10097 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
10098 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
10102 /* Define a boolean type. This type only represents boolean values but
10103 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
10104 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
10105 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
10106 TYPE_PRECISION (boolean_type_node) = 1;
10107 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
10109 /* Define what type to use for size_t. */
10110 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
10111 size_type_node = unsigned_type_node;
10112 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
10113 size_type_node = long_unsigned_type_node;
10114 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
10115 size_type_node = long_long_unsigned_type_node;
10116 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
10117 size_type_node = short_unsigned_type_node;
10122 size_type_node = NULL_TREE;
10123 for (i = 0; i < NUM_INT_N_ENTS; i++)
10124 if (int_n_enabled_p[i])
10127 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
10129 if (strcmp (name, SIZE_TYPE) == 0)
10131 size_type_node = int_n_trees[i].unsigned_type;
10134 if (size_type_node == NULL_TREE)
10135 gcc_unreachable ();
10138 /* Fill in the rest of the sized types. Reuse existing type nodes
10140 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
10141 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
10142 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
10143 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
10144 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
10146 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
10147 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
10148 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
10149 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
10150 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
10152 /* Don't call build_qualified type for atomics. That routine does
10153 special processing for atomics, and until they are initialized
10154 it's better not to make that call.
10156 Check to see if there is a target override for atomic types. */
10158 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
10159 targetm.atomic_align_for_mode (QImode));
10160 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
10161 targetm.atomic_align_for_mode (HImode));
10162 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
10163 targetm.atomic_align_for_mode (SImode));
10164 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
10165 targetm.atomic_align_for_mode (DImode));
10166 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
10167 targetm.atomic_align_for_mode (TImode));
10169 access_public_node = get_identifier ("public");
10170 access_protected_node = get_identifier ("protected");
10171 access_private_node = get_identifier ("private");
10173 /* Define these next since types below may used them. */
10174 integer_zero_node = build_int_cst (integer_type_node, 0);
10175 integer_one_node = build_int_cst (integer_type_node, 1);
10176 integer_three_node = build_int_cst (integer_type_node, 3);
10177 integer_minus_one_node = build_int_cst (integer_type_node, -1);
10179 size_zero_node = size_int (0);
10180 size_one_node = size_int (1);
10181 bitsize_zero_node = bitsize_int (0);
10182 bitsize_one_node = bitsize_int (1);
10183 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
10185 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
10186 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
10188 void_type_node = make_node (VOID_TYPE);
10189 layout_type (void_type_node);
10191 pointer_bounds_type_node = targetm.chkp_bound_type ();
10193 /* We are not going to have real types in C with less than byte alignment,
10194 so we might as well not have any types that claim to have it. */
10195 SET_TYPE_ALIGN (void_type_node, BITS_PER_UNIT);
10196 TYPE_USER_ALIGN (void_type_node) = 0;
10198 void_node = make_node (VOID_CST);
10199 TREE_TYPE (void_node) = void_type_node;
10201 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
10202 layout_type (TREE_TYPE (null_pointer_node));
10204 ptr_type_node = build_pointer_type (void_type_node);
10205 const_ptr_type_node
10206 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
10207 fileptr_type_node = ptr_type_node;
10209 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
10211 float_type_node = make_node (REAL_TYPE);
10212 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
10213 layout_type (float_type_node);
10215 double_type_node = make_node (REAL_TYPE);
10216 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
10217 layout_type (double_type_node);
10219 long_double_type_node = make_node (REAL_TYPE);
10220 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
10221 layout_type (long_double_type_node);
10223 float_ptr_type_node = build_pointer_type (float_type_node);
10224 double_ptr_type_node = build_pointer_type (double_type_node);
10225 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
10226 integer_ptr_type_node = build_pointer_type (integer_type_node);
10228 /* Fixed size integer types. */
10229 uint16_type_node = make_or_reuse_type (16, 1);
10230 uint32_type_node = make_or_reuse_type (32, 1);
10231 uint64_type_node = make_or_reuse_type (64, 1);
10233 /* Decimal float types. */
10234 dfloat32_type_node = make_node (REAL_TYPE);
10235 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
10236 layout_type (dfloat32_type_node);
10237 SET_TYPE_MODE (dfloat32_type_node, SDmode);
10238 dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node);
10240 dfloat64_type_node = make_node (REAL_TYPE);
10241 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
10242 layout_type (dfloat64_type_node);
10243 SET_TYPE_MODE (dfloat64_type_node, DDmode);
10244 dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node);
10246 dfloat128_type_node = make_node (REAL_TYPE);
10247 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
10248 layout_type (dfloat128_type_node);
10249 SET_TYPE_MODE (dfloat128_type_node, TDmode);
10250 dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node);
10252 complex_integer_type_node = build_complex_type (integer_type_node, true);
10253 complex_float_type_node = build_complex_type (float_type_node, true);
10254 complex_double_type_node = build_complex_type (double_type_node, true);
10255 complex_long_double_type_node = build_complex_type (long_double_type_node,
10258 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
10259 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
10260 sat_ ## KIND ## _type_node = \
10261 make_sat_signed_ ## KIND ## _type (SIZE); \
10262 sat_unsigned_ ## KIND ## _type_node = \
10263 make_sat_unsigned_ ## KIND ## _type (SIZE); \
10264 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10265 unsigned_ ## KIND ## _type_node = \
10266 make_unsigned_ ## KIND ## _type (SIZE);
10268 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
10269 sat_ ## WIDTH ## KIND ## _type_node = \
10270 make_sat_signed_ ## KIND ## _type (SIZE); \
10271 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
10272 make_sat_unsigned_ ## KIND ## _type (SIZE); \
10273 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
10274 unsigned_ ## WIDTH ## KIND ## _type_node = \
10275 make_unsigned_ ## KIND ## _type (SIZE);
10277 /* Make fixed-point type nodes based on four different widths. */
10278 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
10279 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
10280 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
10281 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
10282 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
10284 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
10285 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
10286 NAME ## _type_node = \
10287 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
10288 u ## NAME ## _type_node = \
10289 make_or_reuse_unsigned_ ## KIND ## _type \
10290 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
10291 sat_ ## NAME ## _type_node = \
10292 make_or_reuse_sat_signed_ ## KIND ## _type \
10293 (GET_MODE_BITSIZE (MODE ## mode)); \
10294 sat_u ## NAME ## _type_node = \
10295 make_or_reuse_sat_unsigned_ ## KIND ## _type \
10296 (GET_MODE_BITSIZE (U ## MODE ## mode));
10298 /* Fixed-point type and mode nodes. */
10299 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
10300 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
10301 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
10302 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
10303 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
10304 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
10305 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
10306 MAKE_FIXED_MODE_NODE (accum, ha, HA)
10307 MAKE_FIXED_MODE_NODE (accum, sa, SA)
10308 MAKE_FIXED_MODE_NODE (accum, da, DA)
10309 MAKE_FIXED_MODE_NODE (accum, ta, TA)
10312 tree t = targetm.build_builtin_va_list ();
10314 /* Many back-ends define record types without setting TYPE_NAME.
10315 If we copied the record type here, we'd keep the original
10316 record type without a name. This breaks name mangling. So,
10317 don't copy record types and let c_common_nodes_and_builtins()
10318 declare the type to be __builtin_va_list. */
10319 if (TREE_CODE (t) != RECORD_TYPE)
10320 t = build_variant_type_copy (t);
10322 va_list_type_node = t;
10326 /* Modify DECL for given flags.
10327 TM_PURE attribute is set only on types, so the function will modify
10328 DECL's type when ECF_TM_PURE is used. */
10331 set_call_expr_flags (tree decl, int flags)
10333 if (flags & ECF_NOTHROW)
10334 TREE_NOTHROW (decl) = 1;
10335 if (flags & ECF_CONST)
10336 TREE_READONLY (decl) = 1;
10337 if (flags & ECF_PURE)
10338 DECL_PURE_P (decl) = 1;
10339 if (flags & ECF_LOOPING_CONST_OR_PURE)
10340 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
10341 if (flags & ECF_NOVOPS)
10342 DECL_IS_NOVOPS (decl) = 1;
10343 if (flags & ECF_NORETURN)
10344 TREE_THIS_VOLATILE (decl) = 1;
10345 if (flags & ECF_MALLOC)
10346 DECL_IS_MALLOC (decl) = 1;
10347 if (flags & ECF_RETURNS_TWICE)
10348 DECL_IS_RETURNS_TWICE (decl) = 1;
10349 if (flags & ECF_LEAF)
10350 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
10351 NULL, DECL_ATTRIBUTES (decl));
10352 if ((flags & ECF_TM_PURE) && flag_tm)
10353 apply_tm_attr (decl, get_identifier ("transaction_pure"));
10354 /* Looping const or pure is implied by noreturn.
10355 There is currently no way to declare looping const or looping pure alone. */
10356 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
10357 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
10361 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
10364 local_define_builtin (const char *name, tree type, enum built_in_function code,
10365 const char *library_name, int ecf_flags)
10369 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
10370 library_name, NULL_TREE);
10371 set_call_expr_flags (decl, ecf_flags);
10373 set_builtin_decl (code, decl, true);
10376 /* Call this function after instantiating all builtins that the language
10377 front end cares about. This will build the rest of the builtins
10378 and internal functions that are relied upon by the tree optimizers and
10382 build_common_builtin_nodes (void)
10387 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
10389 ftype = build_function_type (void_type_node, void_list_node);
10390 local_define_builtin ("__builtin_unreachable", ftype, BUILT_IN_UNREACHABLE,
10391 "__builtin_unreachable",
10392 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
10396 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
10397 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10399 ftype = build_function_type_list (ptr_type_node,
10400 ptr_type_node, const_ptr_type_node,
10401 size_type_node, NULL_TREE);
10403 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
10404 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
10405 "memcpy", ECF_NOTHROW | ECF_LEAF);
10406 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10407 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
10408 "memmove", ECF_NOTHROW | ECF_LEAF);
10411 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
10413 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
10414 const_ptr_type_node, size_type_node,
10416 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
10417 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10420 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
10422 ftype = build_function_type_list (ptr_type_node,
10423 ptr_type_node, integer_type_node,
10424 size_type_node, NULL_TREE);
10425 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
10426 "memset", ECF_NOTHROW | ECF_LEAF);
10429 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
10431 ftype = build_function_type_list (ptr_type_node,
10432 size_type_node, NULL_TREE);
10433 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
10434 "alloca", ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
10437 ftype = build_function_type_list (ptr_type_node, size_type_node,
10438 size_type_node, NULL_TREE);
10439 local_define_builtin ("__builtin_alloca_with_align", ftype,
10440 BUILT_IN_ALLOCA_WITH_ALIGN,
10441 "__builtin_alloca_with_align",
10442 ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
10444 /* If we're checking the stack, `alloca' can throw. */
10445 if (flag_stack_check)
10447 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA)) = 0;
10448 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN)) = 0;
10451 ftype = build_function_type_list (void_type_node,
10452 ptr_type_node, ptr_type_node,
10453 ptr_type_node, NULL_TREE);
10454 local_define_builtin ("__builtin_init_trampoline", ftype,
10455 BUILT_IN_INIT_TRAMPOLINE,
10456 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
10457 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
10458 BUILT_IN_INIT_HEAP_TRAMPOLINE,
10459 "__builtin_init_heap_trampoline",
10460 ECF_NOTHROW | ECF_LEAF);
10462 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
10463 local_define_builtin ("__builtin_adjust_trampoline", ftype,
10464 BUILT_IN_ADJUST_TRAMPOLINE,
10465 "__builtin_adjust_trampoline",
10466 ECF_CONST | ECF_NOTHROW);
10468 ftype = build_function_type_list (void_type_node,
10469 ptr_type_node, ptr_type_node, NULL_TREE);
10470 local_define_builtin ("__builtin_nonlocal_goto", ftype,
10471 BUILT_IN_NONLOCAL_GOTO,
10472 "__builtin_nonlocal_goto",
10473 ECF_NORETURN | ECF_NOTHROW);
10475 ftype = build_function_type_list (void_type_node,
10476 ptr_type_node, ptr_type_node, NULL_TREE);
10477 local_define_builtin ("__builtin_setjmp_setup", ftype,
10478 BUILT_IN_SETJMP_SETUP,
10479 "__builtin_setjmp_setup", ECF_NOTHROW);
10481 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10482 local_define_builtin ("__builtin_setjmp_receiver", ftype,
10483 BUILT_IN_SETJMP_RECEIVER,
10484 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
10486 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
10487 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
10488 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
10490 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10491 local_define_builtin ("__builtin_stack_restore", ftype,
10492 BUILT_IN_STACK_RESTORE,
10493 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
10495 /* If there's a possibility that we might use the ARM EABI, build the
10496 alternate __cxa_end_cleanup node used to resume from C++ and Java. */
10497 if (targetm.arm_eabi_unwinder)
10499 ftype = build_function_type_list (void_type_node, NULL_TREE);
10500 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
10501 BUILT_IN_CXA_END_CLEANUP,
10502 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
10505 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10506 local_define_builtin ("__builtin_unwind_resume", ftype,
10507 BUILT_IN_UNWIND_RESUME,
10508 ((targetm_common.except_unwind_info (&global_options)
10510 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10513 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
10515 ftype = build_function_type_list (ptr_type_node, integer_type_node,
10517 local_define_builtin ("__builtin_return_address", ftype,
10518 BUILT_IN_RETURN_ADDRESS,
10519 "__builtin_return_address",
10523 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10524 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10526 ftype = build_function_type_list (void_type_node, ptr_type_node,
10527 ptr_type_node, NULL_TREE);
10528 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10529 local_define_builtin ("__cyg_profile_func_enter", ftype,
10530 BUILT_IN_PROFILE_FUNC_ENTER,
10531 "__cyg_profile_func_enter", 0);
10532 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10533 local_define_builtin ("__cyg_profile_func_exit", ftype,
10534 BUILT_IN_PROFILE_FUNC_EXIT,
10535 "__cyg_profile_func_exit", 0);
10538 /* The exception object and filter values from the runtime. The argument
10539 must be zero before exception lowering, i.e. from the front end. After
10540 exception lowering, it will be the region number for the exception
10541 landing pad. These functions are PURE instead of CONST to prevent
10542 them from being hoisted past the exception edge that will initialize
10543 its value in the landing pad. */
10544 ftype = build_function_type_list (ptr_type_node,
10545 integer_type_node, NULL_TREE);
10546 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10547 /* Only use TM_PURE if we have TM language support. */
10548 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10549 ecf_flags |= ECF_TM_PURE;
10550 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10551 "__builtin_eh_pointer", ecf_flags);
10553 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10554 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10555 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10556 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10558 ftype = build_function_type_list (void_type_node,
10559 integer_type_node, integer_type_node,
10561 local_define_builtin ("__builtin_eh_copy_values", ftype,
10562 BUILT_IN_EH_COPY_VALUES,
10563 "__builtin_eh_copy_values", ECF_NOTHROW);
10565 /* Complex multiplication and division. These are handled as builtins
10566 rather than optabs because emit_library_call_value doesn't support
10567 complex. Further, we can do slightly better with folding these
10568 beasties if the real and complex parts of the arguments are separate. */
10572 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10574 char mode_name_buf[4], *q;
10576 enum built_in_function mcode, dcode;
10577 tree type, inner_type;
10578 const char *prefix = "__";
10580 if (targetm.libfunc_gnu_prefix)
10583 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
10586 inner_type = TREE_TYPE (type);
10588 ftype = build_function_type_list (type, inner_type, inner_type,
10589 inner_type, inner_type, NULL_TREE);
10591 mcode = ((enum built_in_function)
10592 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10593 dcode = ((enum built_in_function)
10594 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10596 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10600 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10602 local_define_builtin (built_in_names[mcode], ftype, mcode,
10603 built_in_names[mcode],
10604 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10606 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10608 local_define_builtin (built_in_names[dcode], ftype, dcode,
10609 built_in_names[dcode],
10610 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10614 init_internal_fns ();
10617 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10620 If we requested a pointer to a vector, build up the pointers that
10621 we stripped off while looking for the inner type. Similarly for
10622 return values from functions.
10624 The argument TYPE is the top of the chain, and BOTTOM is the
10625 new type which we will point to. */
10628 reconstruct_complex_type (tree type, tree bottom)
10632 if (TREE_CODE (type) == POINTER_TYPE)
10634 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10635 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10636 TYPE_REF_CAN_ALIAS_ALL (type));
10638 else if (TREE_CODE (type) == REFERENCE_TYPE)
10640 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10641 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10642 TYPE_REF_CAN_ALIAS_ALL (type));
10644 else if (TREE_CODE (type) == ARRAY_TYPE)
10646 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10647 outer = build_array_type (inner, TYPE_DOMAIN (type));
10649 else if (TREE_CODE (type) == FUNCTION_TYPE)
10651 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10652 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
10654 else if (TREE_CODE (type) == METHOD_TYPE)
10656 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10657 /* The build_method_type_directly() routine prepends 'this' to argument list,
10658 so we must compensate by getting rid of it. */
10660 = build_method_type_directly
10661 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10663 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10665 else if (TREE_CODE (type) == OFFSET_TYPE)
10667 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10668 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10673 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10674 TYPE_QUALS (type));
10677 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10680 build_vector_type_for_mode (tree innertype, machine_mode mode)
10684 switch (GET_MODE_CLASS (mode))
10686 case MODE_VECTOR_INT:
10687 case MODE_VECTOR_FLOAT:
10688 case MODE_VECTOR_FRACT:
10689 case MODE_VECTOR_UFRACT:
10690 case MODE_VECTOR_ACCUM:
10691 case MODE_VECTOR_UACCUM:
10692 nunits = GET_MODE_NUNITS (mode);
10696 /* Check that there are no leftover bits. */
10697 gcc_assert (GET_MODE_BITSIZE (mode)
10698 % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10700 nunits = GET_MODE_BITSIZE (mode)
10701 / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10705 gcc_unreachable ();
10708 return make_vector_type (innertype, nunits, mode);
10711 /* Similarly, but takes the inner type and number of units, which must be
10715 build_vector_type (tree innertype, int nunits)
10717 return make_vector_type (innertype, nunits, VOIDmode);
10720 /* Build truth vector with specified length and number of units. */
10723 build_truth_vector_type (unsigned nunits, unsigned vector_size)
10725 machine_mode mask_mode = targetm.vectorize.get_mask_mode (nunits,
10728 gcc_assert (mask_mode != VOIDmode);
10730 unsigned HOST_WIDE_INT vsize;
10731 if (mask_mode == BLKmode)
10732 vsize = vector_size * BITS_PER_UNIT;
10734 vsize = GET_MODE_BITSIZE (mask_mode);
10736 unsigned HOST_WIDE_INT esize = vsize / nunits;
10737 gcc_assert (esize * nunits == vsize);
10739 tree bool_type = build_nonstandard_boolean_type (esize);
10741 return make_vector_type (bool_type, nunits, mask_mode);
10744 /* Returns a vector type corresponding to a comparison of VECTYPE. */
10747 build_same_sized_truth_vector_type (tree vectype)
10749 if (VECTOR_BOOLEAN_TYPE_P (vectype))
10752 unsigned HOST_WIDE_INT size = GET_MODE_SIZE (TYPE_MODE (vectype));
10755 size = tree_to_uhwi (TYPE_SIZE_UNIT (vectype));
10757 return build_truth_vector_type (TYPE_VECTOR_SUBPARTS (vectype), size);
10760 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
10763 build_opaque_vector_type (tree innertype, int nunits)
10765 tree t = make_vector_type (innertype, nunits, VOIDmode);
10767 /* We always build the non-opaque variant before the opaque one,
10768 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10769 cand = TYPE_NEXT_VARIANT (t);
10771 && TYPE_VECTOR_OPAQUE (cand)
10772 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10774 /* Othewise build a variant type and make sure to queue it after
10775 the non-opaque type. */
10776 cand = build_distinct_type_copy (t);
10777 TYPE_VECTOR_OPAQUE (cand) = true;
10778 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10779 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10780 TYPE_NEXT_VARIANT (t) = cand;
10781 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10786 /* Given an initializer INIT, return TRUE if INIT is zero or some
10787 aggregate of zeros. Otherwise return FALSE. */
10789 initializer_zerop (const_tree init)
10795 switch (TREE_CODE (init))
10798 return integer_zerop (init);
10801 /* ??? Note that this is not correct for C4X float formats. There,
10802 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10803 negative exponent. */
10804 return real_zerop (init)
10805 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init));
10808 return fixed_zerop (init);
10811 return integer_zerop (init)
10812 || (real_zerop (init)
10813 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10814 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init))));
10819 for (i = 0; i < VECTOR_CST_NELTS (init); ++i)
10820 if (!initializer_zerop (VECTOR_CST_ELT (init, i)))
10827 unsigned HOST_WIDE_INT idx;
10829 if (TREE_CLOBBER_P (init))
10831 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10832 if (!initializer_zerop (elt))
10841 /* We need to loop through all elements to handle cases like
10842 "\0" and "\0foobar". */
10843 for (i = 0; i < TREE_STRING_LENGTH (init); ++i)
10844 if (TREE_STRING_POINTER (init)[i] != '\0')
10855 /* Check if vector VEC consists of all the equal elements and
10856 that the number of elements corresponds to the type of VEC.
10857 The function returns first element of the vector
10858 or NULL_TREE if the vector is not uniform. */
10860 uniform_vector_p (const_tree vec)
10865 if (vec == NULL_TREE)
10868 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10870 if (TREE_CODE (vec) == VECTOR_CST)
10872 first = VECTOR_CST_ELT (vec, 0);
10873 for (i = 1; i < VECTOR_CST_NELTS (vec); ++i)
10874 if (!operand_equal_p (first, VECTOR_CST_ELT (vec, i), 0))
10880 else if (TREE_CODE (vec) == CONSTRUCTOR)
10882 first = error_mark_node;
10884 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10891 if (!operand_equal_p (first, t, 0))
10894 if (i != TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)))
10903 /* Build an empty statement at location LOC. */
10906 build_empty_stmt (location_t loc)
10908 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10909 SET_EXPR_LOCATION (t, loc);
10914 /* Build an OpenMP clause with code CODE. LOC is the location of the
10918 build_omp_clause (location_t loc, enum omp_clause_code code)
10923 length = omp_clause_num_ops[code];
10924 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10926 record_node_allocation_statistics (OMP_CLAUSE, size);
10928 t = (tree) ggc_internal_alloc (size);
10929 memset (t, 0, size);
10930 TREE_SET_CODE (t, OMP_CLAUSE);
10931 OMP_CLAUSE_SET_CODE (t, code);
10932 OMP_CLAUSE_LOCATION (t) = loc;
10937 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10938 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10939 Except for the CODE and operand count field, other storage for the
10940 object is initialized to zeros. */
10943 build_vl_exp_stat (enum tree_code code, int len MEM_STAT_DECL)
10946 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10948 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10949 gcc_assert (len >= 1);
10951 record_node_allocation_statistics (code, length);
10953 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10955 TREE_SET_CODE (t, code);
10957 /* Can't use TREE_OPERAND to store the length because if checking is
10958 enabled, it will try to check the length before we store it. :-P */
10959 t->exp.operands[0] = build_int_cst (sizetype, len);
10964 /* Helper function for build_call_* functions; build a CALL_EXPR with
10965 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10966 the argument slots. */
10969 build_call_1 (tree return_type, tree fn, int nargs)
10973 t = build_vl_exp (CALL_EXPR, nargs + 3);
10974 TREE_TYPE (t) = return_type;
10975 CALL_EXPR_FN (t) = fn;
10976 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10981 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10982 FN and a null static chain slot. NARGS is the number of call arguments
10983 which are specified as "..." arguments. */
10986 build_call_nary (tree return_type, tree fn, int nargs, ...)
10990 va_start (args, nargs);
10991 ret = build_call_valist (return_type, fn, nargs, args);
10996 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10997 FN and a null static chain slot. NARGS is the number of call arguments
10998 which are specified as a va_list ARGS. */
11001 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
11006 t = build_call_1 (return_type, fn, nargs);
11007 for (i = 0; i < nargs; i++)
11008 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
11009 process_call_operands (t);
11013 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
11014 FN and a null static chain slot. NARGS is the number of call arguments
11015 which are specified as a tree array ARGS. */
11018 build_call_array_loc (location_t loc, tree return_type, tree fn,
11019 int nargs, const tree *args)
11024 t = build_call_1 (return_type, fn, nargs);
11025 for (i = 0; i < nargs; i++)
11026 CALL_EXPR_ARG (t, i) = args[i];
11027 process_call_operands (t);
11028 SET_EXPR_LOCATION (t, loc);
11032 /* Like build_call_array, but takes a vec. */
11035 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
11040 ret = build_call_1 (return_type, fn, vec_safe_length (args));
11041 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
11042 CALL_EXPR_ARG (ret, ix) = t;
11043 process_call_operands (ret);
11047 /* Conveniently construct a function call expression. FNDECL names the
11048 function to be called and N arguments are passed in the array
11052 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
11054 tree fntype = TREE_TYPE (fndecl);
11055 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11057 return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray);
11060 /* Conveniently construct a function call expression. FNDECL names the
11061 function to be called and the arguments are passed in the vector
11065 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
11067 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
11068 vec_safe_address (vec));
11072 /* Conveniently construct a function call expression. FNDECL names the
11073 function to be called, N is the number of arguments, and the "..."
11074 parameters are the argument expressions. */
11077 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
11080 tree *argarray = XALLOCAVEC (tree, n);
11084 for (i = 0; i < n; i++)
11085 argarray[i] = va_arg (ap, tree);
11087 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11090 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
11091 varargs macros aren't supported by all bootstrap compilers. */
11094 build_call_expr (tree fndecl, int n, ...)
11097 tree *argarray = XALLOCAVEC (tree, n);
11101 for (i = 0; i < n; i++)
11102 argarray[i] = va_arg (ap, tree);
11104 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
11107 /* Build an internal call to IFN, with arguments ARGS[0:N-1] and with return
11108 type TYPE. This is just like CALL_EXPR, except its CALL_EXPR_FN is NULL.
11109 It will get gimplified later into an ordinary internal function. */
11112 build_call_expr_internal_loc_array (location_t loc, internal_fn ifn,
11113 tree type, int n, const tree *args)
11115 tree t = build_call_1 (type, NULL_TREE, n);
11116 for (int i = 0; i < n; ++i)
11117 CALL_EXPR_ARG (t, i) = args[i];
11118 SET_EXPR_LOCATION (t, loc);
11119 CALL_EXPR_IFN (t) = ifn;
11123 /* Build internal call expression. This is just like CALL_EXPR, except
11124 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
11125 internal function. */
11128 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
11129 tree type, int n, ...)
11132 tree *argarray = XALLOCAVEC (tree, n);
11136 for (i = 0; i < n; i++)
11137 argarray[i] = va_arg (ap, tree);
11139 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
11142 /* Return a function call to FN, if the target is guaranteed to support it,
11145 N is the number of arguments, passed in the "...", and TYPE is the
11146 type of the return value. */
11149 maybe_build_call_expr_loc (location_t loc, combined_fn fn, tree type,
11153 tree *argarray = XALLOCAVEC (tree, n);
11157 for (i = 0; i < n; i++)
11158 argarray[i] = va_arg (ap, tree);
11160 if (internal_fn_p (fn))
11162 internal_fn ifn = as_internal_fn (fn);
11163 if (direct_internal_fn_p (ifn))
11165 tree_pair types = direct_internal_fn_types (ifn, type, argarray);
11166 if (!direct_internal_fn_supported_p (ifn, types,
11167 OPTIMIZE_FOR_BOTH))
11170 return build_call_expr_internal_loc_array (loc, ifn, type, n, argarray);
11174 tree fndecl = builtin_decl_implicit (as_builtin_fn (fn));
11177 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11181 /* Create a new constant string literal and return a char* pointer to it.
11182 The STRING_CST value is the LEN characters at STR. */
11184 build_string_literal (int len, const char *str)
11186 tree t, elem, index, type;
11188 t = build_string (len, str);
11189 elem = build_type_variant (char_type_node, 1, 0);
11190 index = build_index_type (size_int (len - 1));
11191 type = build_array_type (elem, index);
11192 TREE_TYPE (t) = type;
11193 TREE_CONSTANT (t) = 1;
11194 TREE_READONLY (t) = 1;
11195 TREE_STATIC (t) = 1;
11197 type = build_pointer_type (elem);
11198 t = build1 (ADDR_EXPR, type,
11199 build4 (ARRAY_REF, elem,
11200 t, integer_zero_node, NULL_TREE, NULL_TREE));
11206 /* Return true if T (assumed to be a DECL) must be assigned a memory
11210 needs_to_live_in_memory (const_tree t)
11212 return (TREE_ADDRESSABLE (t)
11213 || is_global_var (t)
11214 || (TREE_CODE (t) == RESULT_DECL
11215 && !DECL_BY_REFERENCE (t)
11216 && aggregate_value_p (t, current_function_decl)));
11219 /* Return value of a constant X and sign-extend it. */
11222 int_cst_value (const_tree x)
11224 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
11225 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
11227 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
11228 gcc_assert (cst_and_fits_in_hwi (x));
11230 if (bits < HOST_BITS_PER_WIDE_INT)
11232 bool negative = ((val >> (bits - 1)) & 1) != 0;
11234 val |= (~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1;
11236 val &= ~((~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1);
11242 /* If TYPE is an integral or pointer type, return an integer type with
11243 the same precision which is unsigned iff UNSIGNEDP is true, or itself
11244 if TYPE is already an integer type of signedness UNSIGNEDP. */
11247 signed_or_unsigned_type_for (int unsignedp, tree type)
11249 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type) == unsignedp)
11252 if (TREE_CODE (type) == VECTOR_TYPE)
11254 tree inner = TREE_TYPE (type);
11255 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
11258 if (inner == inner2)
11260 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
11263 if (!INTEGRAL_TYPE_P (type)
11264 && !POINTER_TYPE_P (type)
11265 && TREE_CODE (type) != OFFSET_TYPE)
11268 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
11271 /* If TYPE is an integral or pointer type, return an integer type with
11272 the same precision which is unsigned, or itself if TYPE is already an
11273 unsigned integer type. */
11276 unsigned_type_for (tree type)
11278 return signed_or_unsigned_type_for (1, type);
11281 /* If TYPE is an integral or pointer type, return an integer type with
11282 the same precision which is signed, or itself if TYPE is already a
11283 signed integer type. */
11286 signed_type_for (tree type)
11288 return signed_or_unsigned_type_for (0, type);
11291 /* If TYPE is a vector type, return a signed integer vector type with the
11292 same width and number of subparts. Otherwise return boolean_type_node. */
11295 truth_type_for (tree type)
11297 if (TREE_CODE (type) == VECTOR_TYPE)
11299 if (VECTOR_BOOLEAN_TYPE_P (type))
11301 return build_truth_vector_type (TYPE_VECTOR_SUBPARTS (type),
11302 GET_MODE_SIZE (TYPE_MODE (type)));
11305 return boolean_type_node;
11308 /* Returns the largest value obtainable by casting something in INNER type to
11312 upper_bound_in_type (tree outer, tree inner)
11314 unsigned int det = 0;
11315 unsigned oprec = TYPE_PRECISION (outer);
11316 unsigned iprec = TYPE_PRECISION (inner);
11319 /* Compute a unique number for every combination. */
11320 det |= (oprec > iprec) ? 4 : 0;
11321 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
11322 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
11324 /* Determine the exponent to use. */
11329 /* oprec <= iprec, outer: signed, inner: don't care. */
11334 /* oprec <= iprec, outer: unsigned, inner: don't care. */
11338 /* oprec > iprec, outer: signed, inner: signed. */
11342 /* oprec > iprec, outer: signed, inner: unsigned. */
11346 /* oprec > iprec, outer: unsigned, inner: signed. */
11350 /* oprec > iprec, outer: unsigned, inner: unsigned. */
11354 gcc_unreachable ();
11357 return wide_int_to_tree (outer,
11358 wi::mask (prec, false, TYPE_PRECISION (outer)));
11361 /* Returns the smallest value obtainable by casting something in INNER type to
11365 lower_bound_in_type (tree outer, tree inner)
11367 unsigned oprec = TYPE_PRECISION (outer);
11368 unsigned iprec = TYPE_PRECISION (inner);
11370 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
11372 if (TYPE_UNSIGNED (outer)
11373 /* If we are widening something of an unsigned type, OUTER type
11374 contains all values of INNER type. In particular, both INNER
11375 and OUTER types have zero in common. */
11376 || (oprec > iprec && TYPE_UNSIGNED (inner)))
11377 return build_int_cst (outer, 0);
11380 /* If we are widening a signed type to another signed type, we
11381 want to obtain -2^^(iprec-1). If we are keeping the
11382 precision or narrowing to a signed type, we want to obtain
11384 unsigned prec = oprec > iprec ? iprec : oprec;
11385 return wide_int_to_tree (outer,
11386 wi::mask (prec - 1, true,
11387 TYPE_PRECISION (outer)));
11391 /* Return nonzero if two operands that are suitable for PHI nodes are
11392 necessarily equal. Specifically, both ARG0 and ARG1 must be either
11393 SSA_NAME or invariant. Note that this is strictly an optimization.
11394 That is, callers of this function can directly call operand_equal_p
11395 and get the same result, only slower. */
11398 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
11402 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
11404 return operand_equal_p (arg0, arg1, 0);
11407 /* Returns number of zeros at the end of binary representation of X. */
11410 num_ending_zeros (const_tree x)
11412 return build_int_cst (TREE_TYPE (x), wi::ctz (x));
11416 #define WALK_SUBTREE(NODE) \
11419 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
11425 /* This is a subroutine of walk_tree that walks field of TYPE that are to
11426 be walked whenever a type is seen in the tree. Rest of operands and return
11427 value are as for walk_tree. */
11430 walk_type_fields (tree type, walk_tree_fn func, void *data,
11431 hash_set<tree> *pset, walk_tree_lh lh)
11433 tree result = NULL_TREE;
11435 switch (TREE_CODE (type))
11438 case REFERENCE_TYPE:
11440 /* We have to worry about mutually recursive pointers. These can't
11441 be written in C. They can in Ada. It's pathological, but
11442 there's an ACATS test (c38102a) that checks it. Deal with this
11443 by checking if we're pointing to another pointer, that one
11444 points to another pointer, that one does too, and we have no htab.
11445 If so, get a hash table. We check three levels deep to avoid
11446 the cost of the hash table if we don't need one. */
11447 if (POINTER_TYPE_P (TREE_TYPE (type))
11448 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
11449 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
11452 result = walk_tree_without_duplicates (&TREE_TYPE (type),
11463 WALK_SUBTREE (TREE_TYPE (type));
11467 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
11469 /* Fall through. */
11471 case FUNCTION_TYPE:
11472 WALK_SUBTREE (TREE_TYPE (type));
11476 /* We never want to walk into default arguments. */
11477 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
11478 WALK_SUBTREE (TREE_VALUE (arg));
11483 /* Don't follow this nodes's type if a pointer for fear that
11484 we'll have infinite recursion. If we have a PSET, then we
11487 || (!POINTER_TYPE_P (TREE_TYPE (type))
11488 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
11489 WALK_SUBTREE (TREE_TYPE (type));
11490 WALK_SUBTREE (TYPE_DOMAIN (type));
11494 WALK_SUBTREE (TREE_TYPE (type));
11495 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
11505 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
11506 called with the DATA and the address of each sub-tree. If FUNC returns a
11507 non-NULL value, the traversal is stopped, and the value returned by FUNC
11508 is returned. If PSET is non-NULL it is used to record the nodes visited,
11509 and to avoid visiting a node more than once. */
11512 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
11513 hash_set<tree> *pset, walk_tree_lh lh)
11515 enum tree_code code;
11519 #define WALK_SUBTREE_TAIL(NODE) \
11523 goto tail_recurse; \
11528 /* Skip empty subtrees. */
11532 /* Don't walk the same tree twice, if the user has requested
11533 that we avoid doing so. */
11534 if (pset && pset->add (*tp))
11537 /* Call the function. */
11539 result = (*func) (tp, &walk_subtrees, data);
11541 /* If we found something, return it. */
11545 code = TREE_CODE (*tp);
11547 /* Even if we didn't, FUNC may have decided that there was nothing
11548 interesting below this point in the tree. */
11549 if (!walk_subtrees)
11551 /* But we still need to check our siblings. */
11552 if (code == TREE_LIST)
11553 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11554 else if (code == OMP_CLAUSE)
11555 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11562 result = (*lh) (tp, &walk_subtrees, func, data, pset);
11563 if (result || !walk_subtrees)
11570 case IDENTIFIER_NODE:
11577 case PLACEHOLDER_EXPR:
11581 /* None of these have subtrees other than those already walked
11586 WALK_SUBTREE (TREE_VALUE (*tp));
11587 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11592 int len = TREE_VEC_LENGTH (*tp);
11597 /* Walk all elements but the first. */
11599 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
11601 /* Now walk the first one as a tail call. */
11602 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
11606 WALK_SUBTREE (TREE_REALPART (*tp));
11607 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
11611 unsigned HOST_WIDE_INT idx;
11612 constructor_elt *ce;
11614 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
11616 WALK_SUBTREE (ce->value);
11621 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11626 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11628 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11629 into declarations that are just mentioned, rather than
11630 declared; they don't really belong to this part of the tree.
11631 And, we can see cycles: the initializer for a declaration
11632 can refer to the declaration itself. */
11633 WALK_SUBTREE (DECL_INITIAL (decl));
11634 WALK_SUBTREE (DECL_SIZE (decl));
11635 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11637 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11640 case STATEMENT_LIST:
11642 tree_stmt_iterator i;
11643 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11644 WALK_SUBTREE (*tsi_stmt_ptr (i));
11649 switch (OMP_CLAUSE_CODE (*tp))
11651 case OMP_CLAUSE_GANG:
11652 case OMP_CLAUSE__GRIDDIM_:
11653 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11656 case OMP_CLAUSE_DEVICE_RESIDENT:
11657 case OMP_CLAUSE_ASYNC:
11658 case OMP_CLAUSE_WAIT:
11659 case OMP_CLAUSE_WORKER:
11660 case OMP_CLAUSE_VECTOR:
11661 case OMP_CLAUSE_NUM_GANGS:
11662 case OMP_CLAUSE_NUM_WORKERS:
11663 case OMP_CLAUSE_VECTOR_LENGTH:
11664 case OMP_CLAUSE_PRIVATE:
11665 case OMP_CLAUSE_SHARED:
11666 case OMP_CLAUSE_FIRSTPRIVATE:
11667 case OMP_CLAUSE_COPYIN:
11668 case OMP_CLAUSE_COPYPRIVATE:
11669 case OMP_CLAUSE_FINAL:
11670 case OMP_CLAUSE_IF:
11671 case OMP_CLAUSE_NUM_THREADS:
11672 case OMP_CLAUSE_SCHEDULE:
11673 case OMP_CLAUSE_UNIFORM:
11674 case OMP_CLAUSE_DEPEND:
11675 case OMP_CLAUSE_NUM_TEAMS:
11676 case OMP_CLAUSE_THREAD_LIMIT:
11677 case OMP_CLAUSE_DEVICE:
11678 case OMP_CLAUSE_DIST_SCHEDULE:
11679 case OMP_CLAUSE_SAFELEN:
11680 case OMP_CLAUSE_SIMDLEN:
11681 case OMP_CLAUSE_ORDERED:
11682 case OMP_CLAUSE_PRIORITY:
11683 case OMP_CLAUSE_GRAINSIZE:
11684 case OMP_CLAUSE_NUM_TASKS:
11685 case OMP_CLAUSE_HINT:
11686 case OMP_CLAUSE_TO_DECLARE:
11687 case OMP_CLAUSE_LINK:
11688 case OMP_CLAUSE_USE_DEVICE_PTR:
11689 case OMP_CLAUSE_IS_DEVICE_PTR:
11690 case OMP_CLAUSE__LOOPTEMP_:
11691 case OMP_CLAUSE__SIMDUID_:
11692 case OMP_CLAUSE__CILK_FOR_COUNT_:
11693 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
11696 case OMP_CLAUSE_INDEPENDENT:
11697 case OMP_CLAUSE_NOWAIT:
11698 case OMP_CLAUSE_DEFAULT:
11699 case OMP_CLAUSE_UNTIED:
11700 case OMP_CLAUSE_MERGEABLE:
11701 case OMP_CLAUSE_PROC_BIND:
11702 case OMP_CLAUSE_INBRANCH:
11703 case OMP_CLAUSE_NOTINBRANCH:
11704 case OMP_CLAUSE_FOR:
11705 case OMP_CLAUSE_PARALLEL:
11706 case OMP_CLAUSE_SECTIONS:
11707 case OMP_CLAUSE_TASKGROUP:
11708 case OMP_CLAUSE_NOGROUP:
11709 case OMP_CLAUSE_THREADS:
11710 case OMP_CLAUSE_SIMD:
11711 case OMP_CLAUSE_DEFAULTMAP:
11712 case OMP_CLAUSE_AUTO:
11713 case OMP_CLAUSE_SEQ:
11714 case OMP_CLAUSE_TILE:
11715 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11717 case OMP_CLAUSE_LASTPRIVATE:
11718 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11719 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
11720 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11722 case OMP_CLAUSE_COLLAPSE:
11725 for (i = 0; i < 3; i++)
11726 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11727 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11730 case OMP_CLAUSE_LINEAR:
11731 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11732 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
11733 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
11734 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11736 case OMP_CLAUSE_ALIGNED:
11737 case OMP_CLAUSE_FROM:
11738 case OMP_CLAUSE_TO:
11739 case OMP_CLAUSE_MAP:
11740 case OMP_CLAUSE__CACHE_:
11741 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11742 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11743 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11745 case OMP_CLAUSE_REDUCTION:
11748 for (i = 0; i < 5; i++)
11749 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11750 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11754 gcc_unreachable ();
11762 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11763 But, we only want to walk once. */
11764 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11765 for (i = 0; i < len; ++i)
11766 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11767 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11771 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11772 defining. We only want to walk into these fields of a type in this
11773 case and not in the general case of a mere reference to the type.
11775 The criterion is as follows: if the field can be an expression, it
11776 must be walked only here. This should be in keeping with the fields
11777 that are directly gimplified in gimplify_type_sizes in order for the
11778 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11779 variable-sized types.
11781 Note that DECLs get walked as part of processing the BIND_EXPR. */
11782 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11784 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11785 if (TREE_CODE (*type_p) == ERROR_MARK)
11788 /* Call the function for the type. See if it returns anything or
11789 doesn't want us to continue. If we are to continue, walk both
11790 the normal fields and those for the declaration case. */
11791 result = (*func) (type_p, &walk_subtrees, data);
11792 if (result || !walk_subtrees)
11795 /* But do not walk a pointed-to type since it may itself need to
11796 be walked in the declaration case if it isn't anonymous. */
11797 if (!POINTER_TYPE_P (*type_p))
11799 result = walk_type_fields (*type_p, func, data, pset, lh);
11804 /* If this is a record type, also walk the fields. */
11805 if (RECORD_OR_UNION_TYPE_P (*type_p))
11809 for (field = TYPE_FIELDS (*type_p); field;
11810 field = DECL_CHAIN (field))
11812 /* We'd like to look at the type of the field, but we can
11813 easily get infinite recursion. So assume it's pointed
11814 to elsewhere in the tree. Also, ignore things that
11816 if (TREE_CODE (field) != FIELD_DECL)
11819 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11820 WALK_SUBTREE (DECL_SIZE (field));
11821 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11822 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11823 WALK_SUBTREE (DECL_QUALIFIER (field));
11827 /* Same for scalar types. */
11828 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11829 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11830 || TREE_CODE (*type_p) == INTEGER_TYPE
11831 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11832 || TREE_CODE (*type_p) == REAL_TYPE)
11834 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11835 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11838 WALK_SUBTREE (TYPE_SIZE (*type_p));
11839 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11844 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11848 /* Walk over all the sub-trees of this operand. */
11849 len = TREE_OPERAND_LENGTH (*tp);
11851 /* Go through the subtrees. We need to do this in forward order so
11852 that the scope of a FOR_EXPR is handled properly. */
11855 for (i = 0; i < len - 1; ++i)
11856 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11857 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11860 /* If this is a type, walk the needed fields in the type. */
11861 else if (TYPE_P (*tp))
11862 return walk_type_fields (*tp, func, data, pset, lh);
11866 /* We didn't find what we were looking for. */
11869 #undef WALK_SUBTREE_TAIL
11871 #undef WALK_SUBTREE
11873 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11876 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11881 hash_set<tree> pset;
11882 result = walk_tree_1 (tp, func, data, &pset, lh);
11888 tree_block (tree t)
11890 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11892 if (IS_EXPR_CODE_CLASS (c))
11893 return LOCATION_BLOCK (t->exp.locus);
11894 gcc_unreachable ();
11899 tree_set_block (tree t, tree b)
11901 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11903 if (IS_EXPR_CODE_CLASS (c))
11905 t->exp.locus = set_block (t->exp.locus, b);
11908 gcc_unreachable ();
11911 /* Create a nameless artificial label and put it in the current
11912 function context. The label has a location of LOC. Returns the
11913 newly created label. */
11916 create_artificial_label (location_t loc)
11918 tree lab = build_decl (loc,
11919 LABEL_DECL, NULL_TREE, void_type_node);
11921 DECL_ARTIFICIAL (lab) = 1;
11922 DECL_IGNORED_P (lab) = 1;
11923 DECL_CONTEXT (lab) = current_function_decl;
11927 /* Given a tree, try to return a useful variable name that we can use
11928 to prefix a temporary that is being assigned the value of the tree.
11929 I.E. given <temp> = &A, return A. */
11934 tree stripped_decl;
11937 STRIP_NOPS (stripped_decl);
11938 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11939 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11940 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11942 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11945 return IDENTIFIER_POINTER (name);
11949 switch (TREE_CODE (stripped_decl))
11952 return get_name (TREE_OPERAND (stripped_decl, 0));
11959 /* Return true if TYPE has a variable argument list. */
11962 stdarg_p (const_tree fntype)
11964 function_args_iterator args_iter;
11965 tree n = NULL_TREE, t;
11970 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11975 return n != NULL_TREE && n != void_type_node;
11978 /* Return true if TYPE has a prototype. */
11981 prototype_p (const_tree fntype)
11985 gcc_assert (fntype != NULL_TREE);
11987 t = TYPE_ARG_TYPES (fntype);
11988 return (t != NULL_TREE);
11991 /* If BLOCK is inlined from an __attribute__((__artificial__))
11992 routine, return pointer to location from where it has been
11995 block_nonartificial_location (tree block)
11997 location_t *ret = NULL;
11999 while (block && TREE_CODE (block) == BLOCK
12000 && BLOCK_ABSTRACT_ORIGIN (block))
12002 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
12004 while (TREE_CODE (ao) == BLOCK
12005 && BLOCK_ABSTRACT_ORIGIN (ao)
12006 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
12007 ao = BLOCK_ABSTRACT_ORIGIN (ao);
12009 if (TREE_CODE (ao) == FUNCTION_DECL)
12011 /* If AO is an artificial inline, point RET to the
12012 call site locus at which it has been inlined and continue
12013 the loop, in case AO's caller is also an artificial
12015 if (DECL_DECLARED_INLINE_P (ao)
12016 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
12017 ret = &BLOCK_SOURCE_LOCATION (block);
12021 else if (TREE_CODE (ao) != BLOCK)
12024 block = BLOCK_SUPERCONTEXT (block);
12030 /* If EXP is inlined from an __attribute__((__artificial__))
12031 function, return the location of the original call expression. */
12034 tree_nonartificial_location (tree exp)
12036 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
12041 return EXPR_LOCATION (exp);
12045 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
12048 /* Return the hash code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
12051 cl_option_hasher::hash (tree x)
12053 const_tree const t = x;
12057 hashval_t hash = 0;
12059 if (TREE_CODE (t) == OPTIMIZATION_NODE)
12061 p = (const char *)TREE_OPTIMIZATION (t);
12062 len = sizeof (struct cl_optimization);
12065 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
12066 return cl_target_option_hash (TREE_TARGET_OPTION (t));
12069 gcc_unreachable ();
12071 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
12073 for (i = 0; i < len; i++)
12075 hash = (hash << 4) ^ ((i << 2) | p[i]);
12080 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
12081 TARGET_OPTION tree node) is the same as that given by *Y, which is the
12085 cl_option_hasher::equal (tree x, tree y)
12087 const_tree const xt = x;
12088 const_tree const yt = y;
12093 if (TREE_CODE (xt) != TREE_CODE (yt))
12096 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
12098 xp = (const char *)TREE_OPTIMIZATION (xt);
12099 yp = (const char *)TREE_OPTIMIZATION (yt);
12100 len = sizeof (struct cl_optimization);
12103 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
12105 return cl_target_option_eq (TREE_TARGET_OPTION (xt),
12106 TREE_TARGET_OPTION (yt));
12110 gcc_unreachable ();
12112 return (memcmp (xp, yp, len) == 0);
12115 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
12118 build_optimization_node (struct gcc_options *opts)
12122 /* Use the cache of optimization nodes. */
12124 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
12127 tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
12131 /* Insert this one into the hash table. */
12132 t = cl_optimization_node;
12135 /* Make a new node for next time round. */
12136 cl_optimization_node = make_node (OPTIMIZATION_NODE);
12142 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
12145 build_target_option_node (struct gcc_options *opts)
12149 /* Use the cache of optimization nodes. */
12151 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
12154 tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
12158 /* Insert this one into the hash table. */
12159 t = cl_target_option_node;
12162 /* Make a new node for next time round. */
12163 cl_target_option_node = make_node (TARGET_OPTION_NODE);
12169 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
12170 so that they aren't saved during PCH writing. */
12173 prepare_target_option_nodes_for_pch (void)
12175 hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
12176 for (; iter != cl_option_hash_table->end (); ++iter)
12177 if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
12178 TREE_TARGET_GLOBALS (*iter) = NULL;
12181 /* Determine the "ultimate origin" of a block. The block may be an inlined
12182 instance of an inlined instance of a block which is local to an inline
12183 function, so we have to trace all of the way back through the origin chain
12184 to find out what sort of node actually served as the original seed for the
12188 block_ultimate_origin (const_tree block)
12190 tree immediate_origin = BLOCK_ABSTRACT_ORIGIN (block);
12192 /* BLOCK_ABSTRACT_ORIGIN can point to itself; ignore that if
12193 we're trying to output the abstract instance of this function. */
12194 if (BLOCK_ABSTRACT (block) && immediate_origin == block)
12197 if (immediate_origin == NULL_TREE)
12202 tree lookahead = immediate_origin;
12206 ret_val = lookahead;
12207 lookahead = (TREE_CODE (ret_val) == BLOCK
12208 ? BLOCK_ABSTRACT_ORIGIN (ret_val) : NULL);
12210 while (lookahead != NULL && lookahead != ret_val);
12212 /* The block's abstract origin chain may not be the *ultimate* origin of
12213 the block. It could lead to a DECL that has an abstract origin set.
12214 If so, we want that DECL's abstract origin (which is what DECL_ORIGIN
12215 will give us if it has one). Note that DECL's abstract origins are
12216 supposed to be the most distant ancestor (or so decl_ultimate_origin
12217 claims), so we don't need to loop following the DECL origins. */
12218 if (DECL_P (ret_val))
12219 return DECL_ORIGIN (ret_val);
12225 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
12229 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
12231 /* Do not strip casts into or out of differing address spaces. */
12232 if (POINTER_TYPE_P (outer_type)
12233 && TYPE_ADDR_SPACE (TREE_TYPE (outer_type)) != ADDR_SPACE_GENERIC)
12235 if (!POINTER_TYPE_P (inner_type)
12236 || (TYPE_ADDR_SPACE (TREE_TYPE (outer_type))
12237 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type))))
12240 else if (POINTER_TYPE_P (inner_type)
12241 && TYPE_ADDR_SPACE (TREE_TYPE (inner_type)) != ADDR_SPACE_GENERIC)
12243 /* We already know that outer_type is not a pointer with
12244 a non-generic address space. */
12248 /* Use precision rather then machine mode when we can, which gives
12249 the correct answer even for submode (bit-field) types. */
12250 if ((INTEGRAL_TYPE_P (outer_type)
12251 || POINTER_TYPE_P (outer_type)
12252 || TREE_CODE (outer_type) == OFFSET_TYPE)
12253 && (INTEGRAL_TYPE_P (inner_type)
12254 || POINTER_TYPE_P (inner_type)
12255 || TREE_CODE (inner_type) == OFFSET_TYPE))
12256 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
12258 /* Otherwise fall back on comparing machine modes (e.g. for
12259 aggregate types, floats). */
12260 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
12263 /* Return true iff conversion in EXP generates no instruction. Mark
12264 it inline so that we fully inline into the stripping functions even
12265 though we have two uses of this function. */
12268 tree_nop_conversion (const_tree exp)
12270 tree outer_type, inner_type;
12272 if (!CONVERT_EXPR_P (exp)
12273 && TREE_CODE (exp) != NON_LVALUE_EXPR)
12275 if (TREE_OPERAND (exp, 0) == error_mark_node)
12278 outer_type = TREE_TYPE (exp);
12279 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12284 return tree_nop_conversion_p (outer_type, inner_type);
12287 /* Return true iff conversion in EXP generates no instruction. Don't
12288 consider conversions changing the signedness. */
12291 tree_sign_nop_conversion (const_tree exp)
12293 tree outer_type, inner_type;
12295 if (!tree_nop_conversion (exp))
12298 outer_type = TREE_TYPE (exp);
12299 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
12301 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
12302 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
12305 /* Strip conversions from EXP according to tree_nop_conversion and
12306 return the resulting expression. */
12309 tree_strip_nop_conversions (tree exp)
12311 while (tree_nop_conversion (exp))
12312 exp = TREE_OPERAND (exp, 0);
12316 /* Strip conversions from EXP according to tree_sign_nop_conversion
12317 and return the resulting expression. */
12320 tree_strip_sign_nop_conversions (tree exp)
12322 while (tree_sign_nop_conversion (exp))
12323 exp = TREE_OPERAND (exp, 0);
12327 /* Avoid any floating point extensions from EXP. */
12329 strip_float_extensions (tree exp)
12331 tree sub, expt, subt;
12333 /* For floating point constant look up the narrowest type that can hold
12334 it properly and handle it like (type)(narrowest_type)constant.
12335 This way we can optimize for instance a=a*2.0 where "a" is float
12336 but 2.0 is double constant. */
12337 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
12339 REAL_VALUE_TYPE orig;
12342 orig = TREE_REAL_CST (exp);
12343 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
12344 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
12345 type = float_type_node;
12346 else if (TYPE_PRECISION (TREE_TYPE (exp))
12347 > TYPE_PRECISION (double_type_node)
12348 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
12349 type = double_type_node;
12351 return build_real_truncate (type, orig);
12354 if (!CONVERT_EXPR_P (exp))
12357 sub = TREE_OPERAND (exp, 0);
12358 subt = TREE_TYPE (sub);
12359 expt = TREE_TYPE (exp);
12361 if (!FLOAT_TYPE_P (subt))
12364 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
12367 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
12370 return strip_float_extensions (sub);
12373 /* Strip out all handled components that produce invariant
12377 strip_invariant_refs (const_tree op)
12379 while (handled_component_p (op))
12381 switch (TREE_CODE (op))
12384 case ARRAY_RANGE_REF:
12385 if (!is_gimple_constant (TREE_OPERAND (op, 1))
12386 || TREE_OPERAND (op, 2) != NULL_TREE
12387 || TREE_OPERAND (op, 3) != NULL_TREE)
12391 case COMPONENT_REF:
12392 if (TREE_OPERAND (op, 2) != NULL_TREE)
12398 op = TREE_OPERAND (op, 0);
12404 static GTY(()) tree gcc_eh_personality_decl;
12406 /* Return the GCC personality function decl. */
12409 lhd_gcc_personality (void)
12411 if (!gcc_eh_personality_decl)
12412 gcc_eh_personality_decl = build_personality_function ("gcc");
12413 return gcc_eh_personality_decl;
12416 /* TARGET is a call target of GIMPLE call statement
12417 (obtained by gimple_call_fn). Return true if it is
12418 OBJ_TYPE_REF representing an virtual call of C++ method.
12419 (As opposed to OBJ_TYPE_REF representing objc calls
12420 through a cast where middle-end devirtualization machinery
12424 virtual_method_call_p (const_tree target)
12426 if (TREE_CODE (target) != OBJ_TYPE_REF)
12428 tree t = TREE_TYPE (target);
12429 gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE);
12431 if (TREE_CODE (t) == FUNCTION_TYPE)
12433 gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE);
12434 /* If we do not have BINFO associated, it means that type was built
12435 without devirtualization enabled. Do not consider this a virtual
12437 if (!TYPE_BINFO (obj_type_ref_class (target)))
12442 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
12445 obj_type_ref_class (const_tree ref)
12447 gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF);
12448 ref = TREE_TYPE (ref);
12449 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
12450 ref = TREE_TYPE (ref);
12451 /* We look for type THIS points to. ObjC also builds
12452 OBJ_TYPE_REF with non-method calls, Their first parameter
12453 ID however also corresponds to class type. */
12454 gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE
12455 || TREE_CODE (ref) == FUNCTION_TYPE);
12456 ref = TREE_VALUE (TYPE_ARG_TYPES (ref));
12457 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
12458 return TREE_TYPE (ref);
12461 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */
12464 lookup_binfo_at_offset (tree binfo, tree type, HOST_WIDE_INT pos)
12467 tree base_binfo, b;
12469 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12470 if (pos == tree_to_shwi (BINFO_OFFSET (base_binfo))
12471 && types_same_for_odr (TREE_TYPE (base_binfo), type))
12473 else if ((b = lookup_binfo_at_offset (base_binfo, type, pos)) != NULL)
12478 /* Try to find a base info of BINFO that would have its field decl at offset
12479 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
12480 found, return, otherwise return NULL_TREE. */
12483 get_binfo_at_offset (tree binfo, HOST_WIDE_INT offset, tree expected_type)
12485 tree type = BINFO_TYPE (binfo);
12489 HOST_WIDE_INT pos, size;
12493 if (types_same_for_odr (type, expected_type))
12498 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
12500 if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
12503 pos = int_bit_position (fld);
12504 size = tree_to_uhwi (DECL_SIZE (fld));
12505 if (pos <= offset && (pos + size) > offset)
12508 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
12511 /* Offset 0 indicates the primary base, whose vtable contents are
12512 represented in the binfo for the derived class. */
12513 else if (offset != 0)
12515 tree found_binfo = NULL, base_binfo;
12516 /* Offsets in BINFO are in bytes relative to the whole structure
12517 while POS is in bits relative to the containing field. */
12518 int binfo_offset = (tree_to_shwi (BINFO_OFFSET (binfo)) + pos
12521 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12522 if (tree_to_shwi (BINFO_OFFSET (base_binfo)) == binfo_offset
12523 && types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
12525 found_binfo = base_binfo;
12529 binfo = found_binfo;
12531 binfo = lookup_binfo_at_offset (binfo, TREE_TYPE (fld),
12535 type = TREE_TYPE (fld);
12540 /* Returns true if X is a typedef decl. */
12543 is_typedef_decl (const_tree x)
12545 return (x && TREE_CODE (x) == TYPE_DECL
12546 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
12549 /* Returns true iff TYPE is a type variant created for a typedef. */
12552 typedef_variant_p (const_tree type)
12554 return is_typedef_decl (TYPE_NAME (type));
12557 /* Warn about a use of an identifier which was marked deprecated. */
12559 warn_deprecated_use (tree node, tree attr)
12563 if (node == 0 || !warn_deprecated_decl)
12569 attr = DECL_ATTRIBUTES (node);
12570 else if (TYPE_P (node))
12572 tree decl = TYPE_STUB_DECL (node);
12574 attr = lookup_attribute ("deprecated",
12575 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
12580 attr = lookup_attribute ("deprecated", attr);
12583 msg = TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)));
12591 w = warning (OPT_Wdeprecated_declarations,
12592 "%qD is deprecated: %s", node, msg);
12594 w = warning (OPT_Wdeprecated_declarations,
12595 "%qD is deprecated", node);
12597 inform (DECL_SOURCE_LOCATION (node), "declared here");
12599 else if (TYPE_P (node))
12601 tree what = NULL_TREE;
12602 tree decl = TYPE_STUB_DECL (node);
12604 if (TYPE_NAME (node))
12606 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12607 what = TYPE_NAME (node);
12608 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12609 && DECL_NAME (TYPE_NAME (node)))
12610 what = DECL_NAME (TYPE_NAME (node));
12618 w = warning (OPT_Wdeprecated_declarations,
12619 "%qE is deprecated: %s", what, msg);
12621 w = warning (OPT_Wdeprecated_declarations,
12622 "%qE is deprecated", what);
12627 w = warning (OPT_Wdeprecated_declarations,
12628 "type is deprecated: %s", msg);
12630 w = warning (OPT_Wdeprecated_declarations,
12631 "type is deprecated");
12634 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12641 warning (OPT_Wdeprecated_declarations, "%qE is deprecated: %s",
12644 warning (OPT_Wdeprecated_declarations, "%qE is deprecated", what);
12649 warning (OPT_Wdeprecated_declarations, "type is deprecated: %s",
12652 warning (OPT_Wdeprecated_declarations, "type is deprecated");
12658 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12659 somewhere in it. */
12662 contains_bitfld_component_ref_p (const_tree ref)
12664 while (handled_component_p (ref))
12666 if (TREE_CODE (ref) == COMPONENT_REF
12667 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12669 ref = TREE_OPERAND (ref, 0);
12675 /* Try to determine whether a TRY_CATCH expression can fall through.
12676 This is a subroutine of block_may_fallthru. */
12679 try_catch_may_fallthru (const_tree stmt)
12681 tree_stmt_iterator i;
12683 /* If the TRY block can fall through, the whole TRY_CATCH can
12685 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12688 i = tsi_start (TREE_OPERAND (stmt, 1));
12689 switch (TREE_CODE (tsi_stmt (i)))
12692 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12693 catch expression and a body. The whole TRY_CATCH may fall
12694 through iff any of the catch bodies falls through. */
12695 for (; !tsi_end_p (i); tsi_next (&i))
12697 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12702 case EH_FILTER_EXPR:
12703 /* The exception filter expression only matters if there is an
12704 exception. If the exception does not match EH_FILTER_TYPES,
12705 we will execute EH_FILTER_FAILURE, and we will fall through
12706 if that falls through. If the exception does match
12707 EH_FILTER_TYPES, the stack unwinder will continue up the
12708 stack, so we will not fall through. We don't know whether we
12709 will throw an exception which matches EH_FILTER_TYPES or not,
12710 so we just ignore EH_FILTER_TYPES and assume that we might
12711 throw an exception which doesn't match. */
12712 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12715 /* This case represents statements to be executed when an
12716 exception occurs. Those statements are implicitly followed
12717 by a RESX statement to resume execution after the exception.
12718 So in this case the TRY_CATCH never falls through. */
12723 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12724 need not be 100% accurate; simply be conservative and return true if we
12725 don't know. This is used only to avoid stupidly generating extra code.
12726 If we're wrong, we'll just delete the extra code later. */
12729 block_may_fallthru (const_tree block)
12731 /* This CONST_CAST is okay because expr_last returns its argument
12732 unmodified and we assign it to a const_tree. */
12733 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12735 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12739 /* Easy cases. If the last statement of the block implies
12740 control transfer, then we can't fall through. */
12744 /* If SWITCH_LABELS is set, this is lowered, and represents a
12745 branch to a selected label and hence can not fall through.
12746 Otherwise SWITCH_BODY is set, and the switch can fall
12748 return SWITCH_LABELS (stmt) == NULL_TREE;
12751 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12753 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12756 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12758 case TRY_CATCH_EXPR:
12759 return try_catch_may_fallthru (stmt);
12761 case TRY_FINALLY_EXPR:
12762 /* The finally clause is always executed after the try clause,
12763 so if it does not fall through, then the try-finally will not
12764 fall through. Otherwise, if the try clause does not fall
12765 through, then when the finally clause falls through it will
12766 resume execution wherever the try clause was going. So the
12767 whole try-finally will only fall through if both the try
12768 clause and the finally clause fall through. */
12769 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12770 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12773 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12774 stmt = TREE_OPERAND (stmt, 1);
12780 /* Functions that do not return do not fall through. */
12781 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12783 case CLEANUP_POINT_EXPR:
12784 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12787 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12793 return lang_hooks.block_may_fallthru (stmt);
12797 /* True if we are using EH to handle cleanups. */
12798 static bool using_eh_for_cleanups_flag = false;
12800 /* This routine is called from front ends to indicate eh should be used for
12803 using_eh_for_cleanups (void)
12805 using_eh_for_cleanups_flag = true;
12808 /* Query whether EH is used for cleanups. */
12810 using_eh_for_cleanups_p (void)
12812 return using_eh_for_cleanups_flag;
12815 /* Wrapper for tree_code_name to ensure that tree code is valid */
12817 get_tree_code_name (enum tree_code code)
12819 const char *invalid = "<invalid tree code>";
12821 if (code >= MAX_TREE_CODES)
12824 return tree_code_name[code];
12827 /* Drops the TREE_OVERFLOW flag from T. */
12830 drop_tree_overflow (tree t)
12832 gcc_checking_assert (TREE_OVERFLOW (t));
12834 /* For tree codes with a sharing machinery re-build the result. */
12835 if (TREE_CODE (t) == INTEGER_CST)
12836 return wide_int_to_tree (TREE_TYPE (t), t);
12838 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12839 and drop the flag. */
12841 TREE_OVERFLOW (t) = 0;
12845 /* Given a memory reference expression T, return its base address.
12846 The base address of a memory reference expression is the main
12847 object being referenced. For instance, the base address for
12848 'array[i].fld[j]' is 'array'. You can think of this as stripping
12849 away the offset part from a memory address.
12851 This function calls handled_component_p to strip away all the inner
12852 parts of the memory reference until it reaches the base object. */
12855 get_base_address (tree t)
12857 while (handled_component_p (t))
12858 t = TREE_OPERAND (t, 0);
12860 if ((TREE_CODE (t) == MEM_REF
12861 || TREE_CODE (t) == TARGET_MEM_REF)
12862 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12863 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12865 /* ??? Either the alias oracle or all callers need to properly deal
12866 with WITH_SIZE_EXPRs before we can look through those. */
12867 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12873 /* Return a tree of sizetype representing the size, in bytes, of the element
12874 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12877 array_ref_element_size (tree exp)
12879 tree aligned_size = TREE_OPERAND (exp, 3);
12880 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
12881 location_t loc = EXPR_LOCATION (exp);
12883 /* If a size was specified in the ARRAY_REF, it's the size measured
12884 in alignment units of the element type. So multiply by that value. */
12887 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12888 sizetype from another type of the same width and signedness. */
12889 if (TREE_TYPE (aligned_size) != sizetype)
12890 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
12891 return size_binop_loc (loc, MULT_EXPR, aligned_size,
12892 size_int (TYPE_ALIGN_UNIT (elmt_type)));
12895 /* Otherwise, take the size from that of the element type. Substitute
12896 any PLACEHOLDER_EXPR that we have. */
12898 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
12901 /* Return a tree representing the lower bound of the array mentioned in
12902 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12905 array_ref_low_bound (tree exp)
12907 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12909 /* If a lower bound is specified in EXP, use it. */
12910 if (TREE_OPERAND (exp, 2))
12911 return TREE_OPERAND (exp, 2);
12913 /* Otherwise, if there is a domain type and it has a lower bound, use it,
12914 substituting for a PLACEHOLDER_EXPR as needed. */
12915 if (domain_type && TYPE_MIN_VALUE (domain_type))
12916 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
12918 /* Otherwise, return a zero of the appropriate type. */
12919 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
12922 /* Return a tree representing the upper bound of the array mentioned in
12923 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
12926 array_ref_up_bound (tree exp)
12928 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
12930 /* If there is a domain type and it has an upper bound, use it, substituting
12931 for a PLACEHOLDER_EXPR as needed. */
12932 if (domain_type && TYPE_MAX_VALUE (domain_type))
12933 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
12935 /* Otherwise fail. */
12939 /* Returns true if REF is an array reference to an array at the end of
12940 a structure. If this is the case, the array may be allocated larger
12941 than its upper bound implies. */
12944 array_at_struct_end_p (tree ref)
12946 if (TREE_CODE (ref) != ARRAY_REF
12947 && TREE_CODE (ref) != ARRAY_RANGE_REF)
12950 while (handled_component_p (ref))
12952 /* If the reference chain contains a component reference to a
12953 non-union type and there follows another field the reference
12954 is not at the end of a structure. */
12955 if (TREE_CODE (ref) == COMPONENT_REF
12956 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
12958 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
12959 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
12960 nextf = DECL_CHAIN (nextf);
12965 ref = TREE_OPERAND (ref, 0);
12968 /* If the reference is based on a declared entity, the size of the array
12969 is constrained by its given domain. (Do not trust commons PR/69368). */
12971 && !(flag_unconstrained_commons
12972 && TREE_CODE (ref) == VAR_DECL && DECL_COMMON (ref)))
12978 /* Return a tree representing the offset, in bytes, of the field referenced
12979 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
12982 component_ref_field_offset (tree exp)
12984 tree aligned_offset = TREE_OPERAND (exp, 2);
12985 tree field = TREE_OPERAND (exp, 1);
12986 location_t loc = EXPR_LOCATION (exp);
12988 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
12989 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
12991 if (aligned_offset)
12993 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
12994 sizetype from another type of the same width and signedness. */
12995 if (TREE_TYPE (aligned_offset) != sizetype)
12996 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
12997 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
12998 size_int (DECL_OFFSET_ALIGN (field)
13002 /* Otherwise, take the offset from that of the field. Substitute
13003 any PLACEHOLDER_EXPR that we have. */
13005 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
13008 /* Return the machine mode of T. For vectors, returns the mode of the
13009 inner type. The main use case is to feed the result to HONOR_NANS,
13010 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
13013 element_mode (const_tree t)
13017 if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
13019 return TYPE_MODE (t);
13023 /* Veirfy that basic properties of T match TV and thus T can be a variant of
13024 TV. TV should be the more specified variant (i.e. the main variant). */
13027 verify_type_variant (const_tree t, tree tv)
13029 /* Type variant can differ by:
13031 - TYPE_QUALS: TYPE_READONLY, TYPE_VOLATILE, TYPE_ATOMIC, TYPE_RESTRICT,
13032 ENCODE_QUAL_ADDR_SPACE.
13033 - main variant may be TYPE_COMPLETE_P and variant types !TYPE_COMPLETE_P
13034 in this case some values may not be set in the variant types
13035 (see TYPE_COMPLETE_P checks).
13036 - it is possible to have TYPE_ARTIFICIAL variant of non-artifical type
13037 - by TYPE_NAME and attributes (i.e. when variant originate by typedef)
13038 - TYPE_CANONICAL (TYPE_ALIAS_SET is the same among variants)
13039 - by the alignment: TYPE_ALIGN and TYPE_USER_ALIGN
13040 - during LTO by TYPE_CONTEXT if type is TYPE_FILE_SCOPE_P
13041 this is necessary to make it possible to merge types form different TUs
13042 - arrays, pointers and references may have TREE_TYPE that is a variant
13043 of TREE_TYPE of their main variants.
13044 - aggregates may have new TYPE_FIELDS list that list variants of
13045 the main variant TYPE_FIELDS.
13046 - vector types may differ by TYPE_VECTOR_OPAQUE
13047 - TYPE_METHODS is always NULL for vairant types and maintained for
13051 /* Convenience macro for matching individual fields. */
13052 #define verify_variant_match(flag) \
13054 if (flag (tv) != flag (t)) \
13056 error ("type variant differs by " #flag "."); \
13062 /* tree_base checks. */
13064 verify_variant_match (TREE_CODE);
13065 /* FIXME: Ada builds non-artificial variants of artificial types. */
13066 if (TYPE_ARTIFICIAL (tv) && 0)
13067 verify_variant_match (TYPE_ARTIFICIAL);
13068 if (POINTER_TYPE_P (tv))
13069 verify_variant_match (TYPE_REF_CAN_ALIAS_ALL);
13070 /* FIXME: TYPE_SIZES_GIMPLIFIED may differs for Ada build. */
13071 verify_variant_match (TYPE_UNSIGNED);
13072 verify_variant_match (TYPE_ALIGN_OK);
13073 verify_variant_match (TYPE_PACKED);
13074 if (TREE_CODE (t) == REFERENCE_TYPE)
13075 verify_variant_match (TYPE_REF_IS_RVALUE);
13076 if (AGGREGATE_TYPE_P (t))
13077 verify_variant_match (TYPE_REVERSE_STORAGE_ORDER);
13079 verify_variant_match (TYPE_SATURATING);
13080 /* FIXME: This check trigger during libstdc++ build. */
13081 if (RECORD_OR_UNION_TYPE_P (t) && COMPLETE_TYPE_P (t) && 0)
13082 verify_variant_match (TYPE_FINAL_P);
13084 /* tree_type_common checks. */
13086 if (COMPLETE_TYPE_P (t))
13088 verify_variant_match (TYPE_SIZE);
13089 verify_variant_match (TYPE_MODE);
13090 if (TYPE_SIZE_UNIT (t) != TYPE_SIZE_UNIT (tv)
13091 /* FIXME: ideally we should compare pointer equality, but java FE
13092 produce variants where size is INTEGER_CST of different type (int
13093 wrt size_type) during libjava biuld. */
13094 && !operand_equal_p (TYPE_SIZE_UNIT (t), TYPE_SIZE_UNIT (tv), 0))
13096 error ("type variant has different TYPE_SIZE_UNIT");
13098 error ("type variant's TYPE_SIZE_UNIT");
13099 debug_tree (TYPE_SIZE_UNIT (tv));
13100 error ("type's TYPE_SIZE_UNIT");
13101 debug_tree (TYPE_SIZE_UNIT (t));
13105 verify_variant_match (TYPE_PRECISION);
13106 verify_variant_match (TYPE_NEEDS_CONSTRUCTING);
13107 if (RECORD_OR_UNION_TYPE_P (t))
13108 verify_variant_match (TYPE_TRANSPARENT_AGGR);
13109 else if (TREE_CODE (t) == ARRAY_TYPE)
13110 verify_variant_match (TYPE_NONALIASED_COMPONENT);
13111 /* During LTO we merge variant lists from diferent translation units
13112 that may differ BY TYPE_CONTEXT that in turn may point
13113 to TRANSLATION_UNIT_DECL.
13114 Ada also builds variants of types with different TYPE_CONTEXT. */
13115 if ((!in_lto_p || !TYPE_FILE_SCOPE_P (t)) && 0)
13116 verify_variant_match (TYPE_CONTEXT);
13117 verify_variant_match (TYPE_STRING_FLAG);
13118 if (TYPE_ALIAS_SET_KNOWN_P (t))
13120 error ("type variant with TYPE_ALIAS_SET_KNOWN_P");
13125 /* tree_type_non_common checks. */
13127 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13128 and dangle the pointer from time to time. */
13129 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_VFIELD (t) != TYPE_VFIELD (tv)
13130 && (in_lto_p || !TYPE_VFIELD (tv)
13131 || TREE_CODE (TYPE_VFIELD (tv)) != TREE_LIST))
13133 error ("type variant has different TYPE_VFIELD");
13137 if ((TREE_CODE (t) == ENUMERAL_TYPE && COMPLETE_TYPE_P (t))
13138 || TREE_CODE (t) == INTEGER_TYPE
13139 || TREE_CODE (t) == BOOLEAN_TYPE
13140 || TREE_CODE (t) == REAL_TYPE
13141 || TREE_CODE (t) == FIXED_POINT_TYPE)
13143 verify_variant_match (TYPE_MAX_VALUE);
13144 verify_variant_match (TYPE_MIN_VALUE);
13146 if (TREE_CODE (t) == METHOD_TYPE)
13147 verify_variant_match (TYPE_METHOD_BASETYPE);
13148 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_METHODS (t))
13150 error ("type variant has TYPE_METHODS");
13154 if (TREE_CODE (t) == OFFSET_TYPE)
13155 verify_variant_match (TYPE_OFFSET_BASETYPE);
13156 if (TREE_CODE (t) == ARRAY_TYPE)
13157 verify_variant_match (TYPE_ARRAY_MAX_SIZE);
13158 /* FIXME: Be lax and allow TYPE_BINFO to be missing in variant types
13159 or even type's main variant. This is needed to make bootstrap pass
13160 and the bug seems new in GCC 5.
13161 C++ FE should be updated to make this consistent and we should check
13162 that TYPE_BINFO is always NULL for !COMPLETE_TYPE_P and otherwise there
13163 is a match with main variant.
13165 Also disable the check for Java for now because of parser hack that builds
13166 first an dummy BINFO and then sometimes replace it by real BINFO in some
13168 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t) && TYPE_BINFO (tv)
13169 && TYPE_BINFO (t) != TYPE_BINFO (tv)
13170 /* FIXME: Java sometimes keep dump TYPE_BINFOs on variant types.
13171 Since there is no cheap way to tell C++/Java type w/o LTO, do checking
13172 at LTO time only. */
13173 && (in_lto_p && odr_type_p (t)))
13175 error ("type variant has different TYPE_BINFO");
13177 error ("type variant's TYPE_BINFO");
13178 debug_tree (TYPE_BINFO (tv));
13179 error ("type's TYPE_BINFO");
13180 debug_tree (TYPE_BINFO (t));
13184 /* Check various uses of TYPE_VALUES_RAW. */
13185 if (TREE_CODE (t) == ENUMERAL_TYPE)
13186 verify_variant_match (TYPE_VALUES);
13187 else if (TREE_CODE (t) == ARRAY_TYPE)
13188 verify_variant_match (TYPE_DOMAIN);
13189 /* Permit incomplete variants of complete type. While FEs may complete
13190 all variants, this does not happen for C++ templates in all cases. */
13191 else if (RECORD_OR_UNION_TYPE_P (t)
13192 && COMPLETE_TYPE_P (t)
13193 && TYPE_FIELDS (t) != TYPE_FIELDS (tv))
13197 /* Fortran builds qualified variants as new records with items of
13198 qualified type. Verify that they looks same. */
13199 for (f1 = TYPE_FIELDS (t), f2 = TYPE_FIELDS (tv);
13201 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13202 if (TREE_CODE (f1) != FIELD_DECL || TREE_CODE (f2) != FIELD_DECL
13203 || (TYPE_MAIN_VARIANT (TREE_TYPE (f1))
13204 != TYPE_MAIN_VARIANT (TREE_TYPE (f2))
13205 /* FIXME: gfc_nonrestricted_type builds all types as variants
13206 with exception of pointer types. It deeply copies the type
13207 which means that we may end up with a variant type
13208 referring non-variant pointer. We may change it to
13209 produce types as variants, too, like
13210 objc_get_protocol_qualified_type does. */
13211 && !POINTER_TYPE_P (TREE_TYPE (f1)))
13212 || DECL_FIELD_OFFSET (f1) != DECL_FIELD_OFFSET (f2)
13213 || DECL_FIELD_BIT_OFFSET (f1) != DECL_FIELD_BIT_OFFSET (f2))
13217 error ("type variant has different TYPE_FIELDS");
13219 error ("first mismatch is field");
13221 error ("and field");
13226 else if ((TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE))
13227 verify_variant_match (TYPE_ARG_TYPES);
13228 /* For C++ the qualified variant of array type is really an array type
13229 of qualified TREE_TYPE.
13230 objc builds variants of pointer where pointer to type is a variant, too
13231 in objc_get_protocol_qualified_type. */
13232 if (TREE_TYPE (t) != TREE_TYPE (tv)
13233 && ((TREE_CODE (t) != ARRAY_TYPE
13234 && !POINTER_TYPE_P (t))
13235 || TYPE_MAIN_VARIANT (TREE_TYPE (t))
13236 != TYPE_MAIN_VARIANT (TREE_TYPE (tv))))
13238 error ("type variant has different TREE_TYPE");
13240 error ("type variant's TREE_TYPE");
13241 debug_tree (TREE_TYPE (tv));
13242 error ("type's TREE_TYPE");
13243 debug_tree (TREE_TYPE (t));
13246 if (type_with_alias_set_p (t)
13247 && !gimple_canonical_types_compatible_p (t, tv, false))
13249 error ("type is not compatible with its vairant");
13251 error ("type variant's TREE_TYPE");
13252 debug_tree (TREE_TYPE (tv));
13253 error ("type's TREE_TYPE");
13254 debug_tree (TREE_TYPE (t));
13258 #undef verify_variant_match
13262 /* The TYPE_CANONICAL merging machinery. It should closely resemble
13263 the middle-end types_compatible_p function. It needs to avoid
13264 claiming types are different for types that should be treated
13265 the same with respect to TBAA. Canonical types are also used
13266 for IL consistency checks via the useless_type_conversion_p
13267 predicate which does not handle all type kinds itself but falls
13268 back to pointer-comparison of TYPE_CANONICAL for aggregates
13271 /* Return true if TYPE_UNSIGNED of TYPE should be ignored for canonical
13272 type calculation because we need to allow inter-operability between signed
13273 and unsigned variants. */
13276 type_with_interoperable_signedness (const_tree type)
13278 /* Fortran standard require C_SIGNED_CHAR to be interoperable with both
13279 signed char and unsigned char. Similarly fortran FE builds
13280 C_SIZE_T as signed type, while C defines it unsigned. */
13282 return tree_code_for_canonical_type_merging (TREE_CODE (type))
13284 && (TYPE_PRECISION (type) == TYPE_PRECISION (signed_char_type_node)
13285 || TYPE_PRECISION (type) == TYPE_PRECISION (size_type_node));
13288 /* Return true iff T1 and T2 are structurally identical for what
13290 This function is used both by lto.c canonical type merging and by the
13291 verifier. If TRUST_TYPE_CANONICAL we do not look into structure of types
13292 that have TYPE_CANONICAL defined and assume them equivalent. This is useful
13293 only for LTO because only in these cases TYPE_CANONICAL equivalence
13294 correspond to one defined by gimple_canonical_types_compatible_p. */
13297 gimple_canonical_types_compatible_p (const_tree t1, const_tree t2,
13298 bool trust_type_canonical)
13300 /* Type variants should be same as the main variant. When not doing sanity
13301 checking to verify this fact, go to main variants and save some work. */
13302 if (trust_type_canonical)
13304 t1 = TYPE_MAIN_VARIANT (t1);
13305 t2 = TYPE_MAIN_VARIANT (t2);
13308 /* Check first for the obvious case of pointer identity. */
13312 /* Check that we have two types to compare. */
13313 if (t1 == NULL_TREE || t2 == NULL_TREE)
13316 /* We consider complete types always compatible with incomplete type.
13317 This does not make sense for canonical type calculation and thus we
13318 need to ensure that we are never called on it.
13320 FIXME: For more correctness the function probably should have three modes
13321 1) mode assuming that types are complete mathcing their structure
13322 2) mode allowing incomplete types but producing equivalence classes
13323 and thus ignoring all info from complete types
13324 3) mode allowing incomplete types to match complete but checking
13325 compatibility between complete types.
13327 1 and 2 can be used for canonical type calculation. 3 is the real
13328 definition of type compatibility that can be used i.e. for warnings during
13329 declaration merging. */
13331 gcc_assert (!trust_type_canonical
13332 || (type_with_alias_set_p (t1) && type_with_alias_set_p (t2)));
13333 /* If the types have been previously registered and found equal
13336 if (TYPE_CANONICAL (t1) && TYPE_CANONICAL (t2)
13337 && trust_type_canonical)
13339 /* Do not use TYPE_CANONICAL of pointer types. For LTO streamed types
13340 they are always NULL, but they are set to non-NULL for types
13341 constructed by build_pointer_type and variants. In this case the
13342 TYPE_CANONICAL is more fine grained than the equivalnce we test (where
13343 all pointers are considered equal. Be sure to not return false
13345 gcc_checking_assert (canonical_type_used_p (t1)
13346 && canonical_type_used_p (t2));
13347 return TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2);
13350 /* Can't be the same type if the types don't have the same code. */
13351 enum tree_code code = tree_code_for_canonical_type_merging (TREE_CODE (t1));
13352 if (code != tree_code_for_canonical_type_merging (TREE_CODE (t2)))
13355 /* Qualifiers do not matter for canonical type comparison purposes. */
13357 /* Void types and nullptr types are always the same. */
13358 if (TREE_CODE (t1) == VOID_TYPE
13359 || TREE_CODE (t1) == NULLPTR_TYPE)
13362 /* Can't be the same type if they have different mode. */
13363 if (TYPE_MODE (t1) != TYPE_MODE (t2))
13366 /* Non-aggregate types can be handled cheaply. */
13367 if (INTEGRAL_TYPE_P (t1)
13368 || SCALAR_FLOAT_TYPE_P (t1)
13369 || FIXED_POINT_TYPE_P (t1)
13370 || TREE_CODE (t1) == VECTOR_TYPE
13371 || TREE_CODE (t1) == COMPLEX_TYPE
13372 || TREE_CODE (t1) == OFFSET_TYPE
13373 || POINTER_TYPE_P (t1))
13375 /* Can't be the same type if they have different recision. */
13376 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2))
13379 /* In some cases the signed and unsigned types are required to be
13381 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2)
13382 && !type_with_interoperable_signedness (t1))
13385 /* Fortran's C_SIGNED_CHAR is !TYPE_STRING_FLAG but needs to be
13386 interoperable with "signed char". Unless all frontends are revisited
13387 to agree on these types, we must ignore the flag completely. */
13389 /* Fortran standard define C_PTR type that is compatible with every
13390 C pointer. For this reason we need to glob all pointers into one.
13391 Still pointers in different address spaces are not compatible. */
13392 if (POINTER_TYPE_P (t1))
13394 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
13395 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
13399 /* Tail-recurse to components. */
13400 if (TREE_CODE (t1) == VECTOR_TYPE
13401 || TREE_CODE (t1) == COMPLEX_TYPE)
13402 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
13404 trust_type_canonical);
13409 /* Do type-specific comparisons. */
13410 switch (TREE_CODE (t1))
13413 /* Array types are the same if the element types are the same and
13414 the number of elements are the same. */
13415 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13416 trust_type_canonical)
13417 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
13418 || TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2)
13419 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
13423 tree i1 = TYPE_DOMAIN (t1);
13424 tree i2 = TYPE_DOMAIN (t2);
13426 /* For an incomplete external array, the type domain can be
13427 NULL_TREE. Check this condition also. */
13428 if (i1 == NULL_TREE && i2 == NULL_TREE)
13430 else if (i1 == NULL_TREE || i2 == NULL_TREE)
13434 tree min1 = TYPE_MIN_VALUE (i1);
13435 tree min2 = TYPE_MIN_VALUE (i2);
13436 tree max1 = TYPE_MAX_VALUE (i1);
13437 tree max2 = TYPE_MAX_VALUE (i2);
13439 /* The minimum/maximum values have to be the same. */
13442 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
13443 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
13444 || operand_equal_p (min1, min2, 0))))
13447 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
13448 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
13449 || operand_equal_p (max1, max2, 0)))))
13457 case FUNCTION_TYPE:
13458 /* Function types are the same if the return type and arguments types
13460 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2),
13461 trust_type_canonical))
13464 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
13468 tree parms1, parms2;
13470 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
13472 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
13474 if (!gimple_canonical_types_compatible_p
13475 (TREE_VALUE (parms1), TREE_VALUE (parms2),
13476 trust_type_canonical))
13480 if (parms1 || parms2)
13488 case QUAL_UNION_TYPE:
13492 /* Don't try to compare variants of an incomplete type, before
13493 TYPE_FIELDS has been copied around. */
13494 if (!COMPLETE_TYPE_P (t1) && !COMPLETE_TYPE_P (t2))
13498 if (TYPE_REVERSE_STORAGE_ORDER (t1) != TYPE_REVERSE_STORAGE_ORDER (t2))
13501 /* For aggregate types, all the fields must be the same. */
13502 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
13504 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
13506 /* Skip non-fields and zero-sized fields. */
13507 while (f1 && (TREE_CODE (f1) != FIELD_DECL
13509 && integer_zerop (DECL_SIZE (f1)))))
13510 f1 = TREE_CHAIN (f1);
13511 while (f2 && (TREE_CODE (f2) != FIELD_DECL
13513 && integer_zerop (DECL_SIZE (f2)))))
13514 f2 = TREE_CHAIN (f2);
13517 /* The fields must have the same name, offset and type. */
13518 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
13519 || !gimple_compare_field_offset (f1, f2)
13520 || !gimple_canonical_types_compatible_p
13521 (TREE_TYPE (f1), TREE_TYPE (f2),
13522 trust_type_canonical))
13526 /* If one aggregate has more fields than the other, they
13527 are not the same. */
13535 /* Consider all types with language specific trees in them mutually
13536 compatible. This is executed only from verify_type and false
13537 positives can be tolerated. */
13538 gcc_assert (!in_lto_p);
13543 /* Verify type T. */
13546 verify_type (const_tree t)
13548 bool error_found = false;
13549 tree mv = TYPE_MAIN_VARIANT (t);
13552 error ("Main variant is not defined");
13553 error_found = true;
13555 else if (mv != TYPE_MAIN_VARIANT (mv))
13557 error ("TYPE_MAIN_VARIANT has different TYPE_MAIN_VARIANT");
13559 error_found = true;
13561 else if (t != mv && !verify_type_variant (t, mv))
13562 error_found = true;
13564 tree ct = TYPE_CANONICAL (t);
13567 else if (TYPE_CANONICAL (t) != ct)
13569 error ("TYPE_CANONICAL has different TYPE_CANONICAL");
13571 error_found = true;
13573 /* Method and function types can not be used to address memory and thus
13574 TYPE_CANONICAL really matters only for determining useless conversions.
13576 FIXME: C++ FE produce declarations of builtin functions that are not
13577 compatible with main variants. */
13578 else if (TREE_CODE (t) == FUNCTION_TYPE)
13581 /* FIXME: gimple_canonical_types_compatible_p can not compare types
13582 with variably sized arrays because their sizes possibly
13583 gimplified to different variables. */
13584 && !variably_modified_type_p (ct, NULL)
13585 && !gimple_canonical_types_compatible_p (t, ct, false))
13587 error ("TYPE_CANONICAL is not compatible");
13589 error_found = true;
13592 if (COMPLETE_TYPE_P (t) && TYPE_CANONICAL (t)
13593 && TYPE_MODE (t) != TYPE_MODE (TYPE_CANONICAL (t)))
13595 error ("TYPE_MODE of TYPE_CANONICAL is not compatible");
13597 error_found = true;
13599 /* FIXME: this is violated by the C++ FE as discussed in PR70029, when
13600 FUNCTION_*_QUALIFIED flags are set. */
13601 if (0 && TYPE_MAIN_VARIANT (t) == t && ct && TYPE_MAIN_VARIANT (ct) != ct)
13603 error ("TYPE_CANONICAL of main variant is not main variant");
13605 debug_tree (TYPE_MAIN_VARIANT (ct));
13606 error_found = true;
13610 /* Check various uses of TYPE_MINVAL. */
13611 if (RECORD_OR_UNION_TYPE_P (t))
13613 /* FIXME: C FE uses TYPE_VFIELD to record C_TYPE_INCOMPLETE_VARS
13614 and danagle the pointer from time to time. */
13615 if (TYPE_VFIELD (t)
13616 && TREE_CODE (TYPE_VFIELD (t)) != FIELD_DECL
13617 && TREE_CODE (TYPE_VFIELD (t)) != TREE_LIST)
13619 error ("TYPE_VFIELD is not FIELD_DECL nor TREE_LIST");
13620 debug_tree (TYPE_VFIELD (t));
13621 error_found = true;
13624 else if (TREE_CODE (t) == POINTER_TYPE)
13626 if (TYPE_NEXT_PTR_TO (t)
13627 && TREE_CODE (TYPE_NEXT_PTR_TO (t)) != POINTER_TYPE)
13629 error ("TYPE_NEXT_PTR_TO is not POINTER_TYPE");
13630 debug_tree (TYPE_NEXT_PTR_TO (t));
13631 error_found = true;
13634 else if (TREE_CODE (t) == REFERENCE_TYPE)
13636 if (TYPE_NEXT_REF_TO (t)
13637 && TREE_CODE (TYPE_NEXT_REF_TO (t)) != REFERENCE_TYPE)
13639 error ("TYPE_NEXT_REF_TO is not REFERENCE_TYPE");
13640 debug_tree (TYPE_NEXT_REF_TO (t));
13641 error_found = true;
13644 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13645 || TREE_CODE (t) == FIXED_POINT_TYPE)
13647 /* FIXME: The following check should pass:
13648 useless_type_conversion_p (const_cast <tree> (t),
13649 TREE_TYPE (TYPE_MIN_VALUE (t))
13650 but does not for C sizetypes in LTO. */
13652 /* Java uses TYPE_MINVAL for TYPE_ARGUMENT_SIGNATURE. */
13653 else if (TYPE_MINVAL (t)
13654 && ((TREE_CODE (t) != METHOD_TYPE && TREE_CODE (t) != FUNCTION_TYPE)
13657 error ("TYPE_MINVAL non-NULL");
13658 debug_tree (TYPE_MINVAL (t));
13659 error_found = true;
13662 /* Check various uses of TYPE_MAXVAL. */
13663 if (RECORD_OR_UNION_TYPE_P (t))
13665 if (TYPE_METHODS (t) && TREE_CODE (TYPE_METHODS (t)) != FUNCTION_DECL
13666 && TREE_CODE (TYPE_METHODS (t)) != TEMPLATE_DECL
13667 && TYPE_METHODS (t) != error_mark_node)
13669 error ("TYPE_METHODS is not FUNCTION_DECL, TEMPLATE_DECL nor error_mark_node");
13670 debug_tree (TYPE_METHODS (t));
13671 error_found = true;
13674 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13676 if (TYPE_METHOD_BASETYPE (t)
13677 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != RECORD_TYPE
13678 && TREE_CODE (TYPE_METHOD_BASETYPE (t)) != UNION_TYPE)
13680 error ("TYPE_METHOD_BASETYPE is not record nor union");
13681 debug_tree (TYPE_METHOD_BASETYPE (t));
13682 error_found = true;
13685 else if (TREE_CODE (t) == OFFSET_TYPE)
13687 if (TYPE_OFFSET_BASETYPE (t)
13688 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != RECORD_TYPE
13689 && TREE_CODE (TYPE_OFFSET_BASETYPE (t)) != UNION_TYPE)
13691 error ("TYPE_OFFSET_BASETYPE is not record nor union");
13692 debug_tree (TYPE_OFFSET_BASETYPE (t));
13693 error_found = true;
13696 else if (INTEGRAL_TYPE_P (t) || TREE_CODE (t) == REAL_TYPE
13697 || TREE_CODE (t) == FIXED_POINT_TYPE)
13699 /* FIXME: The following check should pass:
13700 useless_type_conversion_p (const_cast <tree> (t),
13701 TREE_TYPE (TYPE_MAX_VALUE (t))
13702 but does not for C sizetypes in LTO. */
13704 else if (TREE_CODE (t) == ARRAY_TYPE)
13706 if (TYPE_ARRAY_MAX_SIZE (t)
13707 && TREE_CODE (TYPE_ARRAY_MAX_SIZE (t)) != INTEGER_CST)
13709 error ("TYPE_ARRAY_MAX_SIZE not INTEGER_CST");
13710 debug_tree (TYPE_ARRAY_MAX_SIZE (t));
13711 error_found = true;
13714 else if (TYPE_MAXVAL (t))
13716 error ("TYPE_MAXVAL non-NULL");
13717 debug_tree (TYPE_MAXVAL (t));
13718 error_found = true;
13721 /* Check various uses of TYPE_BINFO. */
13722 if (RECORD_OR_UNION_TYPE_P (t))
13724 if (!TYPE_BINFO (t))
13726 else if (TREE_CODE (TYPE_BINFO (t)) != TREE_BINFO)
13728 error ("TYPE_BINFO is not TREE_BINFO");
13729 debug_tree (TYPE_BINFO (t));
13730 error_found = true;
13732 /* FIXME: Java builds invalid empty binfos that do not have
13734 else if (TREE_TYPE (TYPE_BINFO (t)) != TYPE_MAIN_VARIANT (t) && 0)
13736 error ("TYPE_BINFO type is not TYPE_MAIN_VARIANT");
13737 debug_tree (TREE_TYPE (TYPE_BINFO (t)));
13738 error_found = true;
13741 else if (TYPE_LANG_SLOT_1 (t) && in_lto_p)
13743 error ("TYPE_LANG_SLOT_1 (binfo) field is non-NULL");
13744 debug_tree (TYPE_LANG_SLOT_1 (t));
13745 error_found = true;
13748 /* Check various uses of TYPE_VALUES_RAW. */
13749 if (TREE_CODE (t) == ENUMERAL_TYPE)
13750 for (tree l = TYPE_VALUES (t); l; l = TREE_CHAIN (l))
13752 tree value = TREE_VALUE (l);
13753 tree name = TREE_PURPOSE (l);
13755 /* C FE porduce INTEGER_CST of INTEGER_TYPE, while C++ FE uses
13756 CONST_DECL of ENUMERAL TYPE. */
13757 if (TREE_CODE (value) != INTEGER_CST && TREE_CODE (value) != CONST_DECL)
13759 error ("Enum value is not CONST_DECL or INTEGER_CST");
13760 debug_tree (value);
13762 error_found = true;
13764 if (TREE_CODE (TREE_TYPE (value)) != INTEGER_TYPE
13765 && !useless_type_conversion_p (const_cast <tree> (t), TREE_TYPE (value)))
13767 error ("Enum value type is not INTEGER_TYPE nor convertible to the enum");
13768 debug_tree (value);
13770 error_found = true;
13772 if (TREE_CODE (name) != IDENTIFIER_NODE)
13774 error ("Enum value name is not IDENTIFIER_NODE");
13775 debug_tree (value);
13777 error_found = true;
13780 else if (TREE_CODE (t) == ARRAY_TYPE)
13782 if (TYPE_DOMAIN (t) && TREE_CODE (TYPE_DOMAIN (t)) != INTEGER_TYPE)
13784 error ("Array TYPE_DOMAIN is not integer type");
13785 debug_tree (TYPE_DOMAIN (t));
13786 error_found = true;
13789 else if (RECORD_OR_UNION_TYPE_P (t))
13791 if (TYPE_FIELDS (t) && !COMPLETE_TYPE_P (t) && in_lto_p)
13793 error ("TYPE_FIELDS defined in incomplete type");
13794 error_found = true;
13796 for (tree fld = TYPE_FIELDS (t); fld; fld = TREE_CHAIN (fld))
13798 /* TODO: verify properties of decls. */
13799 if (TREE_CODE (fld) == FIELD_DECL)
13801 else if (TREE_CODE (fld) == TYPE_DECL)
13803 else if (TREE_CODE (fld) == CONST_DECL)
13805 else if (TREE_CODE (fld) == VAR_DECL)
13807 else if (TREE_CODE (fld) == TEMPLATE_DECL)
13809 else if (TREE_CODE (fld) == USING_DECL)
13813 error ("Wrong tree in TYPE_FIELDS list");
13815 error_found = true;
13819 else if (TREE_CODE (t) == INTEGER_TYPE
13820 || TREE_CODE (t) == BOOLEAN_TYPE
13821 || TREE_CODE (t) == OFFSET_TYPE
13822 || TREE_CODE (t) == REFERENCE_TYPE
13823 || TREE_CODE (t) == NULLPTR_TYPE
13824 || TREE_CODE (t) == POINTER_TYPE)
13826 if (TYPE_CACHED_VALUES_P (t) != (TYPE_CACHED_VALUES (t) != NULL))
13828 error ("TYPE_CACHED_VALUES_P is %i while TYPE_CACHED_VALUES is %p",
13829 TYPE_CACHED_VALUES_P (t), (void *)TYPE_CACHED_VALUES (t));
13830 error_found = true;
13832 else if (TYPE_CACHED_VALUES_P (t) && TREE_CODE (TYPE_CACHED_VALUES (t)) != TREE_VEC)
13834 error ("TYPE_CACHED_VALUES is not TREE_VEC");
13835 debug_tree (TYPE_CACHED_VALUES (t));
13836 error_found = true;
13838 /* Verify just enough of cache to ensure that no one copied it to new type.
13839 All copying should go by copy_node that should clear it. */
13840 else if (TYPE_CACHED_VALUES_P (t))
13843 for (i = 0; i < TREE_VEC_LENGTH (TYPE_CACHED_VALUES (t)); i++)
13844 if (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)
13845 && TREE_TYPE (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i)) != t)
13847 error ("wrong TYPE_CACHED_VALUES entry");
13848 debug_tree (TREE_VEC_ELT (TYPE_CACHED_VALUES (t), i));
13849 error_found = true;
13854 else if (TREE_CODE (t) == FUNCTION_TYPE || TREE_CODE (t) == METHOD_TYPE)
13855 for (tree l = TYPE_ARG_TYPES (t); l; l = TREE_CHAIN (l))
13857 /* C++ FE uses TREE_PURPOSE to store initial values. */
13858 if (TREE_PURPOSE (l) && in_lto_p)
13860 error ("TREE_PURPOSE is non-NULL in TYPE_ARG_TYPES list");
13862 error_found = true;
13864 if (!TYPE_P (TREE_VALUE (l)))
13866 error ("Wrong entry in TYPE_ARG_TYPES list");
13868 error_found = true;
13871 else if (!is_lang_specific (t) && TYPE_VALUES_RAW (t))
13873 error ("TYPE_VALUES_RAW field is non-NULL");
13874 debug_tree (TYPE_VALUES_RAW (t));
13875 error_found = true;
13877 if (TREE_CODE (t) != INTEGER_TYPE
13878 && TREE_CODE (t) != BOOLEAN_TYPE
13879 && TREE_CODE (t) != OFFSET_TYPE
13880 && TREE_CODE (t) != REFERENCE_TYPE
13881 && TREE_CODE (t) != NULLPTR_TYPE
13882 && TREE_CODE (t) != POINTER_TYPE
13883 && TYPE_CACHED_VALUES_P (t))
13885 error ("TYPE_CACHED_VALUES_P is set while it should not");
13886 error_found = true;
13888 if (TYPE_STRING_FLAG (t)
13889 && TREE_CODE (t) != ARRAY_TYPE && TREE_CODE (t) != INTEGER_TYPE)
13891 error ("TYPE_STRING_FLAG is set on wrong type code");
13892 error_found = true;
13894 else if (TYPE_STRING_FLAG (t))
13897 if (TREE_CODE (b) == ARRAY_TYPE)
13899 /* Java builds arrays with TYPE_STRING_FLAG of promoted_char_type
13901 if (TREE_CODE (b) != INTEGER_TYPE)
13903 error ("TYPE_STRING_FLAG is set on type that does not look like "
13904 "char nor array of chars");
13905 error_found = true;
13909 /* ipa-devirt makes an assumption that TYPE_METHOD_BASETYPE is always
13910 TYPE_MAIN_VARIANT and it would be odd to add methods only to variatns
13912 if (TREE_CODE (t) == METHOD_TYPE
13913 && TYPE_MAIN_VARIANT (TYPE_METHOD_BASETYPE (t)) != TYPE_METHOD_BASETYPE (t))
13915 error ("TYPE_METHOD_BASETYPE is not main variant");
13916 error_found = true;
13921 debug_tree (const_cast <tree> (t));
13922 internal_error ("verify_type failed");
13927 /* Return true if ARG is marked with the nonnull attribute in the
13928 current function signature. */
13931 nonnull_arg_p (const_tree arg)
13933 tree t, attrs, fntype;
13934 unsigned HOST_WIDE_INT arg_num;
13936 gcc_assert (TREE_CODE (arg) == PARM_DECL
13937 && (POINTER_TYPE_P (TREE_TYPE (arg))
13938 || TREE_CODE (TREE_TYPE (arg)) == OFFSET_TYPE));
13940 /* The static chain decl is always non null. */
13941 if (arg == cfun->static_chain_decl)
13944 /* THIS argument of method is always non-NULL. */
13945 if (TREE_CODE (TREE_TYPE (cfun->decl)) == METHOD_TYPE
13946 && arg == DECL_ARGUMENTS (cfun->decl)
13947 && flag_delete_null_pointer_checks)
13950 /* Values passed by reference are always non-NULL. */
13951 if (TREE_CODE (TREE_TYPE (arg)) == REFERENCE_TYPE
13952 && flag_delete_null_pointer_checks)
13955 fntype = TREE_TYPE (cfun->decl);
13956 for (attrs = TYPE_ATTRIBUTES (fntype); attrs; attrs = TREE_CHAIN (attrs))
13958 attrs = lookup_attribute ("nonnull", attrs);
13960 /* If "nonnull" wasn't specified, we know nothing about the argument. */
13961 if (attrs == NULL_TREE)
13964 /* If "nonnull" applies to all the arguments, then ARG is non-null. */
13965 if (TREE_VALUE (attrs) == NULL_TREE)
13968 /* Get the position number for ARG in the function signature. */
13969 for (arg_num = 1, t = DECL_ARGUMENTS (cfun->decl);
13971 t = DECL_CHAIN (t), arg_num++)
13977 gcc_assert (t == arg);
13979 /* Now see if ARG_NUM is mentioned in the nonnull list. */
13980 for (t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
13982 if (compare_tree_int (TREE_VALUE (t), arg_num) == 0)
13990 /* Given location LOC, strip away any packed range information
13991 or ad-hoc information. */
13994 get_pure_location (location_t loc)
13996 if (IS_ADHOC_LOC (loc))
13998 = line_table->location_adhoc_data_map.data[loc & MAX_SOURCE_LOCATION].locus;
14000 if (loc >= LINEMAPS_MACRO_LOWEST_LOCATION (line_table))
14003 if (loc < RESERVED_LOCATION_COUNT)
14006 const line_map *map = linemap_lookup (line_table, loc);
14007 const line_map_ordinary *ordmap = linemap_check_ordinary (map);
14009 return loc & ~((1 << ordmap->m_range_bits) - 1);
14012 /* Combine LOC and BLOCK to a combined adhoc loc, retaining any range
14016 set_block (location_t loc, tree block)
14018 location_t pure_loc = get_pure_location (loc);
14019 source_range src_range = get_range_from_loc (line_table, loc);
14020 return COMBINE_LOCATION_DATA (line_table, pure_loc, src_range, block);
14024 set_source_range (tree expr, location_t start, location_t finish)
14026 source_range src_range;
14027 src_range.m_start = start;
14028 src_range.m_finish = finish;
14029 return set_source_range (expr, src_range);
14033 set_source_range (tree expr, source_range src_range)
14035 if (!EXPR_P (expr))
14036 return UNKNOWN_LOCATION;
14038 location_t pure_loc = get_pure_location (EXPR_LOCATION (expr));
14039 location_t adhoc = COMBINE_LOCATION_DATA (line_table,
14043 SET_EXPR_LOCATION (expr, adhoc);
14048 make_location (location_t caret, location_t start, location_t finish)
14050 location_t pure_loc = get_pure_location (caret);
14051 source_range src_range;
14052 src_range.m_start = start;
14053 src_range.m_finish = finish;
14054 location_t combined_loc = COMBINE_LOCATION_DATA (line_table,
14058 return combined_loc;
14061 /* Return the name of combined function FN, for debugging purposes. */
14064 combined_fn_name (combined_fn fn)
14066 if (builtin_fn_p (fn))
14068 tree fndecl = builtin_decl_explicit (as_builtin_fn (fn));
14069 return IDENTIFIER_POINTER (DECL_NAME (fndecl));
14072 return internal_fn_name (as_internal_fn (fn));
14075 #include "gt-tree.h"