1 /****************************************************************************
3 * GNAT COMPILER COMPONENTS *
7 * C Implementation File *
9 * Copyright (C) 1992-2019, Free Software Foundation, Inc. *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
24 ****************************************************************************/
28 #include "coretypes.h"
32 #include "stringpool.h"
34 #include "diagnostic.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
44 #include "common/common-target.h"
45 #include "langhooks.h"
46 #include "tree-dump.h"
47 #include "tree-inline.h"
60 /* If nonzero, pretend we are allocating at global level. */
63 /* The default alignment of "double" floating-point types, i.e. floating
64 point types whose size is equal to 64 bits, or 0 if this alignment is
65 not specifically capped. */
66 int double_float_alignment;
68 /* The default alignment of "double" or larger scalar types, i.e. scalar
69 types whose size is greater or equal to 64 bits, or 0 if this alignment
70 is not specifically capped. */
71 int double_scalar_alignment;
73 /* True if floating-point arithmetics may use wider intermediate results. */
74 bool fp_arith_may_widen = true;
76 /* Tree nodes for the various types and decls we create. */
77 tree gnat_std_decls[(int) ADT_LAST];
79 /* Functions to call for each of the possible raise reasons. */
80 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
82 /* Likewise, but with extra info for each of the possible raise reasons. */
83 tree gnat_raise_decls_ext[(int) LAST_REASON_CODE + 1];
85 /* Forward declarations for handlers of attributes. */
86 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
87 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
88 static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
89 static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
90 static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
91 static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
92 static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
93 static tree handle_stack_protect_attribute (tree *, tree, tree, int, bool *);
94 static tree handle_noinline_attribute (tree *, tree, tree, int, bool *);
95 static tree handle_noclone_attribute (tree *, tree, tree, int, bool *);
96 static tree handle_noicf_attribute (tree *, tree, tree, int, bool *);
97 static tree handle_noipa_attribute (tree *, tree, tree, int, bool *);
98 static tree handle_leaf_attribute (tree *, tree, tree, int, bool *);
99 static tree handle_always_inline_attribute (tree *, tree, tree, int, bool *);
100 static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
101 static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
102 static tree handle_flatten_attribute (tree *, tree, tree, int, bool *);
103 static tree handle_used_attribute (tree *, tree, tree, int, bool *);
104 static tree handle_cold_attribute (tree *, tree, tree, int, bool *);
105 static tree handle_hot_attribute (tree *, tree, tree, int, bool *);
106 static tree handle_target_attribute (tree *, tree, tree, int, bool *);
107 static tree handle_target_clones_attribute (tree *, tree, tree, int, bool *);
108 static tree handle_vector_size_attribute (tree *, tree, tree, int, bool *);
109 static tree handle_vector_type_attribute (tree *, tree, tree, int, bool *);
111 static const struct attribute_spec::exclusions attr_cold_hot_exclusions[] =
113 { "cold", true, true, true },
114 { "hot" , true, true, true },
115 { NULL , false, false, false }
118 /* Fake handler for attributes we don't properly support, typically because
119 they'd require dragging a lot of the common-c front-end circuitry. */
120 static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
122 /* Table of machine-independent internal attributes for Ada. We support
123 this minimal set of attributes to accommodate the needs of builtins. */
124 const struct attribute_spec gnat_internal_attribute_table[] =
126 /* { name, min_len, max_len, decl_req, type_req, fn_type_req,
127 affects_type_identity, handler, exclude } */
128 { "const", 0, 0, true, false, false, false,
129 handle_const_attribute, NULL },
130 { "nothrow", 0, 0, true, false, false, false,
131 handle_nothrow_attribute, NULL },
132 { "pure", 0, 0, true, false, false, false,
133 handle_pure_attribute, NULL },
134 { "no vops", 0, 0, true, false, false, false,
135 handle_novops_attribute, NULL },
136 { "nonnull", 0, -1, false, true, true, false,
137 handle_nonnull_attribute, NULL },
138 { "sentinel", 0, 1, false, true, true, false,
139 handle_sentinel_attribute, NULL },
140 { "noreturn", 0, 0, true, false, false, false,
141 handle_noreturn_attribute, NULL },
142 { "stack_protect",0, 0, true, false, false, false,
143 handle_stack_protect_attribute, NULL },
144 { "noinline", 0, 0, true, false, false, false,
145 handle_noinline_attribute, NULL },
146 { "noclone", 0, 0, true, false, false, false,
147 handle_noclone_attribute, NULL },
148 { "no_icf", 0, 0, true, false, false, false,
149 handle_noicf_attribute, NULL },
150 { "noipa", 0, 0, true, false, false, false,
151 handle_noipa_attribute, NULL },
152 { "leaf", 0, 0, true, false, false, false,
153 handle_leaf_attribute, NULL },
154 { "always_inline",0, 0, true, false, false, false,
155 handle_always_inline_attribute, NULL },
156 { "malloc", 0, 0, true, false, false, false,
157 handle_malloc_attribute, NULL },
158 { "type generic", 0, 0, false, true, true, false,
159 handle_type_generic_attribute, NULL },
161 { "flatten", 0, 0, true, false, false, false,
162 handle_flatten_attribute, NULL },
163 { "used", 0, 0, true, false, false, false,
164 handle_used_attribute, NULL },
165 { "cold", 0, 0, true, false, false, false,
166 handle_cold_attribute, attr_cold_hot_exclusions },
167 { "hot", 0, 0, true, false, false, false,
168 handle_hot_attribute, attr_cold_hot_exclusions },
169 { "target", 1, -1, true, false, false, false,
170 handle_target_attribute, NULL },
171 { "target_clones",1, -1, true, false, false, false,
172 handle_target_clones_attribute, NULL },
174 { "vector_size", 1, 1, false, true, false, false,
175 handle_vector_size_attribute, NULL },
176 { "vector_type", 0, 0, false, true, false, false,
177 handle_vector_type_attribute, NULL },
178 { "may_alias", 0, 0, false, true, false, false,
181 /* ??? format and format_arg are heavy and not supported, which actually
182 prevents support for stdio builtins, which we however declare as part
183 of the common builtins.def contents. */
184 { "format", 3, 3, false, true, true, false,
185 fake_attribute_handler, NULL },
186 { "format_arg", 1, 1, false, true, true, false,
187 fake_attribute_handler, NULL },
189 { NULL, 0, 0, false, false, false, false,
193 /* Associates a GNAT tree node to a GCC tree node. It is used in
194 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
195 of `save_gnu_tree' for more info. */
196 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
198 #define GET_GNU_TREE(GNAT_ENTITY) \
199 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
201 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
202 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
204 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
205 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
207 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
208 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
210 #define GET_DUMMY_NODE(GNAT_ENTITY) \
211 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
213 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
214 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
216 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
217 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
219 /* This variable keeps a table for types for each precision so that we only
220 allocate each of them once. Signed and unsigned types are kept separate.
222 Note that these types are only used when fold-const requests something
223 special. Perhaps we should NOT share these types; we'll see how it
225 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
227 /* Likewise for float types, but record these by mode. */
228 static GTY(()) tree float_types[NUM_MACHINE_MODES];
230 /* For each binding contour we allocate a binding_level structure to indicate
231 the binding depth. */
233 struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
234 /* The binding level containing this one (the enclosing binding level). */
235 struct gnat_binding_level *chain;
236 /* The BLOCK node for this level. */
238 /* If nonzero, the setjmp buffer that needs to be updated for any
239 variable-sized definition within this context. */
243 /* The binding level currently in effect. */
244 static GTY(()) struct gnat_binding_level *current_binding_level;
246 /* A chain of gnat_binding_level structures awaiting reuse. */
247 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
249 /* The context to be used for global declarations. */
250 static GTY(()) tree global_context;
252 /* An array of global declarations. */
253 static GTY(()) vec<tree, va_gc> *global_decls;
255 /* An array of builtin function declarations. */
256 static GTY(()) vec<tree, va_gc> *builtin_decls;
258 /* A chain of unused BLOCK nodes. */
259 static GTY((deletable)) tree free_block_chain;
261 /* A hash table of padded types. It is modelled on the generic type
262 hash table in tree.c, which must thus be used as a reference. */
264 struct GTY((for_user)) pad_type_hash
270 struct pad_type_hasher : ggc_cache_ptr_hash<pad_type_hash>
272 static inline hashval_t hash (pad_type_hash *t) { return t->hash; }
273 static bool equal (pad_type_hash *a, pad_type_hash *b);
276 keep_cache_entry (pad_type_hash *&t)
278 return ggc_marked_p (t->type);
282 static GTY ((cache)) hash_table<pad_type_hasher> *pad_type_hash_table;
284 static tree merge_sizes (tree, tree, tree, bool, bool);
285 static tree fold_bit_position (const_tree);
286 static tree compute_related_constant (tree, tree);
287 static tree split_plus (tree, tree *);
288 static tree float_type_for_precision (int, machine_mode);
289 static tree convert_to_fat_pointer (tree, tree);
290 static unsigned int scale_by_factor_of (tree, unsigned int);
291 static bool potential_alignment_gap (tree, tree, tree);
293 /* Linked list used as a queue to defer the initialization of the DECL_CONTEXT
294 of ..._DECL nodes and of the TYPE_CONTEXT of ..._TYPE nodes. */
295 struct deferred_decl_context_node
297 /* The ..._DECL node to work on. */
300 /* The corresponding entity's Scope. */
301 Entity_Id gnat_scope;
303 /* The value of force_global when DECL was pushed. */
306 /* The list of ..._TYPE nodes to propagate the context to. */
309 /* The next queue item. */
310 struct deferred_decl_context_node *next;
313 static struct deferred_decl_context_node *deferred_decl_context_queue = NULL;
315 /* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
316 feed it with the elaboration of GNAT_SCOPE. */
317 static struct deferred_decl_context_node *
318 add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global);
320 /* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
321 feed it with the DECL_CONTEXT computed as part of N as soon as it is
323 static void add_deferred_type_context (struct deferred_decl_context_node *n,
326 /* Initialize data structures of the utils.c module. */
329 init_gnat_utils (void)
331 /* Initialize the association of GNAT nodes to GCC trees. */
332 associate_gnat_to_gnu = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
334 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
335 dummy_node_table = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
337 /* Initialize the hash table of padded types. */
338 pad_type_hash_table = hash_table<pad_type_hasher>::create_ggc (512);
341 /* Destroy data structures of the utils.c module. */
344 destroy_gnat_utils (void)
346 /* Destroy the association of GNAT nodes to GCC trees. */
347 ggc_free (associate_gnat_to_gnu);
348 associate_gnat_to_gnu = NULL;
350 /* Destroy the association of GNAT nodes to GCC trees as dummies. */
351 ggc_free (dummy_node_table);
352 dummy_node_table = NULL;
354 /* Destroy the hash table of padded types. */
355 pad_type_hash_table->empty ();
356 pad_type_hash_table = NULL;
359 /* GNAT_ENTITY is a GNAT tree node for an entity. Associate GNU_DECL, a GCC
360 tree node, with GNAT_ENTITY. If GNU_DECL is not a ..._DECL node, abort.
361 If NO_CHECK is true, the latter check is suppressed.
363 If GNU_DECL is zero, reset a previous association. */
366 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
368 /* Check that GNAT_ENTITY is not already defined and that it is being set
369 to something which is a decl. If that is not the case, this usually
370 means GNAT_ENTITY is defined twice, but occasionally is due to some
372 gcc_assert (!(gnu_decl
373 && (PRESENT_GNU_TREE (gnat_entity)
374 || (!no_check && !DECL_P (gnu_decl)))));
376 SET_GNU_TREE (gnat_entity, gnu_decl);
379 /* GNAT_ENTITY is a GNAT tree node for an entity. Return the GCC tree node
380 that was associated with it. If there is no such tree node, abort.
382 In some cases, such as delayed elaboration or expressions that need to
383 be elaborated only once, GNAT_ENTITY is really not an entity. */
386 get_gnu_tree (Entity_Id gnat_entity)
388 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
389 return GET_GNU_TREE (gnat_entity);
392 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
395 present_gnu_tree (Entity_Id gnat_entity)
397 return PRESENT_GNU_TREE (gnat_entity);
400 /* Make a dummy type corresponding to GNAT_TYPE. */
403 make_dummy_type (Entity_Id gnat_type)
405 Entity_Id gnat_equiv = Gigi_Equivalent_Type (Underlying_Type (gnat_type));
406 tree gnu_type, debug_type;
408 /* If there was no equivalent type (can only happen when just annotating
409 types) or underlying type, go back to the original type. */
411 gnat_equiv = gnat_type;
413 /* If it there already a dummy type, use that one. Else make one. */
414 if (PRESENT_DUMMY_NODE (gnat_equiv))
415 return GET_DUMMY_NODE (gnat_equiv);
417 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
419 gnu_type = make_node (Is_Record_Type (gnat_equiv)
420 ? tree_code_for_record_type (gnat_equiv)
422 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
423 TYPE_DUMMY_P (gnu_type) = 1;
424 TYPE_STUB_DECL (gnu_type)
425 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
426 if (Is_By_Reference_Type (gnat_equiv))
427 TYPE_BY_REFERENCE_P (gnu_type) = 1;
429 SET_DUMMY_NODE (gnat_equiv, gnu_type);
431 /* Create a debug type so that debuggers only see an unspecified type. */
432 if (Needs_Debug_Info (gnat_type))
434 debug_type = make_node (LANG_TYPE);
435 TYPE_NAME (debug_type) = TYPE_NAME (gnu_type);
436 TYPE_ARTIFICIAL (debug_type) = TYPE_ARTIFICIAL (gnu_type);
437 SET_TYPE_DEBUG_TYPE (gnu_type, debug_type);
443 /* Return the dummy type that was made for GNAT_TYPE, if any. */
446 get_dummy_type (Entity_Id gnat_type)
448 return GET_DUMMY_NODE (gnat_type);
451 /* Build dummy fat and thin pointer types whose designated type is specified
452 by GNAT_DESIG_TYPE/GNU_DESIG_TYPE and attach them to the latter. */
455 build_dummy_unc_pointer_types (Entity_Id gnat_desig_type, tree gnu_desig_type)
457 tree gnu_template_type, gnu_ptr_template, gnu_array_type, gnu_ptr_array;
458 tree gnu_fat_type, fields, gnu_object_type;
460 gnu_template_type = make_node (RECORD_TYPE);
461 TYPE_NAME (gnu_template_type) = create_concat_name (gnat_desig_type, "XUB");
462 TYPE_DUMMY_P (gnu_template_type) = 1;
463 gnu_ptr_template = build_pointer_type (gnu_template_type);
465 gnu_array_type = make_node (ENUMERAL_TYPE);
466 TYPE_NAME (gnu_array_type) = create_concat_name (gnat_desig_type, "XUA");
467 TYPE_DUMMY_P (gnu_array_type) = 1;
468 gnu_ptr_array = build_pointer_type (gnu_array_type);
470 gnu_fat_type = make_node (RECORD_TYPE);
471 /* Build a stub DECL to trigger the special processing for fat pointer types
473 TYPE_NAME (gnu_fat_type)
474 = create_type_stub_decl (create_concat_name (gnat_desig_type, "XUP"),
476 fields = create_field_decl (get_identifier ("P_ARRAY"), gnu_ptr_array,
477 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
479 = create_field_decl (get_identifier ("P_BOUNDS"), gnu_ptr_template,
480 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
481 finish_fat_pointer_type (gnu_fat_type, fields);
482 SET_TYPE_UNCONSTRAINED_ARRAY (gnu_fat_type, gnu_desig_type);
483 /* Suppress debug info until after the type is completed. */
484 TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (gnu_fat_type)) = 1;
486 gnu_object_type = make_node (RECORD_TYPE);
487 TYPE_NAME (gnu_object_type) = create_concat_name (gnat_desig_type, "XUT");
488 TYPE_DUMMY_P (gnu_object_type) = 1;
490 TYPE_POINTER_TO (gnu_desig_type) = gnu_fat_type;
491 TYPE_REFERENCE_TO (gnu_desig_type) = gnu_fat_type;
492 TYPE_OBJECT_RECORD_TYPE (gnu_desig_type) = gnu_object_type;
495 /* Return true if we are in the global binding level. */
498 global_bindings_p (void)
500 return force_global || !current_function_decl;
503 /* Enter a new binding level. */
506 gnat_pushlevel (void)
508 struct gnat_binding_level *newlevel = NULL;
510 /* Reuse a struct for this binding level, if there is one. */
511 if (free_binding_level)
513 newlevel = free_binding_level;
514 free_binding_level = free_binding_level->chain;
517 newlevel = ggc_alloc<gnat_binding_level> ();
519 /* Use a free BLOCK, if any; otherwise, allocate one. */
520 if (free_block_chain)
522 newlevel->block = free_block_chain;
523 free_block_chain = BLOCK_CHAIN (free_block_chain);
524 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
527 newlevel->block = make_node (BLOCK);
529 /* Point the BLOCK we just made to its parent. */
530 if (current_binding_level)
531 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
533 BLOCK_VARS (newlevel->block) = NULL_TREE;
534 BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
535 TREE_USED (newlevel->block) = 1;
537 /* Add this level to the front of the chain (stack) of active levels. */
538 newlevel->chain = current_binding_level;
539 newlevel->jmpbuf_decl = NULL_TREE;
540 current_binding_level = newlevel;
543 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
544 and point FNDECL to this BLOCK. */
547 set_current_block_context (tree fndecl)
549 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
550 DECL_INITIAL (fndecl) = current_binding_level->block;
551 set_block_for_group (current_binding_level->block);
554 /* Set the jmpbuf_decl for the current binding level to DECL. */
557 set_block_jmpbuf_decl (tree decl)
559 current_binding_level->jmpbuf_decl = decl;
562 /* Get the jmpbuf_decl, if any, for the current binding level. */
565 get_block_jmpbuf_decl (void)
567 return current_binding_level->jmpbuf_decl;
570 /* Exit a binding level. Set any BLOCK into the current code group. */
575 struct gnat_binding_level *level = current_binding_level;
576 tree block = level->block;
578 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
579 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
581 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
582 are no variables free the block and merge its subblocks into those of its
583 parent block. Otherwise, add it to the list of its parent. */
584 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
586 else if (!BLOCK_VARS (block))
588 BLOCK_SUBBLOCKS (level->chain->block)
589 = block_chainon (BLOCK_SUBBLOCKS (block),
590 BLOCK_SUBBLOCKS (level->chain->block));
591 BLOCK_CHAIN (block) = free_block_chain;
592 free_block_chain = block;
596 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
597 BLOCK_SUBBLOCKS (level->chain->block) = block;
598 TREE_USED (block) = 1;
599 set_block_for_group (block);
602 /* Free this binding structure. */
603 current_binding_level = level->chain;
604 level->chain = free_binding_level;
605 free_binding_level = level;
608 /* Exit a binding level and discard the associated BLOCK. */
613 struct gnat_binding_level *level = current_binding_level;
614 tree block = level->block;
616 BLOCK_CHAIN (block) = free_block_chain;
617 free_block_chain = block;
619 /* Free this binding structure. */
620 current_binding_level = level->chain;
621 level->chain = free_binding_level;
622 free_binding_level = level;
625 /* Set the context of TYPE and its parallel types (if any) to CONTEXT. */
628 gnat_set_type_context (tree type, tree context)
630 tree decl = TYPE_STUB_DECL (type);
632 TYPE_CONTEXT (type) = context;
634 while (decl && DECL_PARALLEL_TYPE (decl))
636 tree parallel_type = DECL_PARALLEL_TYPE (decl);
638 /* Give a context to the parallel types and their stub decl, if any.
639 Some parallel types seems to be present in multiple parallel type
640 chains, so don't mess with their context if they already have one. */
641 if (!TYPE_CONTEXT (parallel_type))
643 if (TYPE_STUB_DECL (parallel_type))
644 DECL_CONTEXT (TYPE_STUB_DECL (parallel_type)) = context;
645 TYPE_CONTEXT (parallel_type) = context;
648 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
652 /* Return the innermost scope, starting at GNAT_NODE, we are be interested in
653 the debug info, or Empty if there is no such scope. If not NULL, set
654 IS_SUBPROGRAM to whether the returned entity is a subprogram. */
657 get_debug_scope (Node_Id gnat_node, bool *is_subprogram)
659 Entity_Id gnat_entity;
662 *is_subprogram = false;
664 if (Nkind (gnat_node) == N_Defining_Identifier
665 || Nkind (gnat_node) == N_Defining_Operator_Symbol)
666 gnat_entity = Scope (gnat_node);
670 while (Present (gnat_entity))
672 switch (Ekind (gnat_entity))
676 if (Present (Protected_Body_Subprogram (gnat_entity)))
677 gnat_entity = Protected_Body_Subprogram (gnat_entity);
679 /* If the scope is a subprogram, then just rely on
680 current_function_decl, so that we don't have to defer
681 anything. This is needed because other places rely on the
682 validity of the DECL_CONTEXT attribute of FUNCTION_DECL nodes. */
684 *is_subprogram = true;
688 case E_Record_Subtype:
692 /* By default, we are not interested in this particular scope: go to
697 gnat_entity = Scope (gnat_entity);
703 /* If N is NULL, set TYPE's context to CONTEXT. Defer this to the processing
707 defer_or_set_type_context (tree type, tree context,
708 struct deferred_decl_context_node *n)
711 add_deferred_type_context (n, type);
713 gnat_set_type_context (type, context);
716 /* Return global_context, but create it first if need be. */
719 get_global_context (void)
724 = build_translation_unit_decl (get_identifier (main_input_filename));
725 debug_hooks->register_main_translation_unit (global_context);
728 return global_context;
731 /* Record DECL as belonging to the current lexical scope and use GNAT_NODE
732 for location information and flag propagation. */
735 gnat_pushdecl (tree decl, Node_Id gnat_node)
737 tree context = NULL_TREE;
738 struct deferred_decl_context_node *deferred_decl_context = NULL;
740 /* If explicitely asked to make DECL global or if it's an imported nested
741 object, short-circuit the regular Scope-based context computation. */
742 if (!((TREE_PUBLIC (decl) && DECL_EXTERNAL (decl)) || force_global == 1))
744 /* Rely on the GNAT scope, or fallback to the current_function_decl if
745 the GNAT scope reached the global scope, if it reached a subprogram
746 or the declaration is a subprogram or a variable (for them we skip
747 intermediate context types because the subprogram body elaboration
748 machinery and the inliner both expect a subprogram context).
750 Falling back to current_function_decl is necessary for implicit
751 subprograms created by gigi, such as the elaboration subprograms. */
752 bool context_is_subprogram = false;
753 const Entity_Id gnat_scope
754 = get_debug_scope (gnat_node, &context_is_subprogram);
756 if (Present (gnat_scope)
757 && !context_is_subprogram
758 && TREE_CODE (decl) != FUNCTION_DECL
759 && TREE_CODE (decl) != VAR_DECL)
760 /* Always assume the scope has not been elaborated, thus defer the
761 context propagation to the time its elaboration will be
763 deferred_decl_context
764 = add_deferred_decl_context (decl, gnat_scope, force_global);
766 /* External declarations (when force_global > 0) may not be in a
768 else if (current_function_decl && force_global == 0)
769 context = current_function_decl;
772 /* If either we are forced to be in global mode or if both the GNAT scope and
773 the current_function_decl did not help in determining the context, use the
775 if (!deferred_decl_context && !context)
776 context = get_global_context ();
778 /* Functions imported in another function are not really nested.
779 For really nested functions mark them initially as needing
780 a static chain for uses of that flag before unnesting;
781 lower_nested_functions will then recompute it. */
782 if (TREE_CODE (decl) == FUNCTION_DECL
783 && !TREE_PUBLIC (decl)
785 && (TREE_CODE (context) == FUNCTION_DECL
786 || decl_function_context (context)))
787 DECL_STATIC_CHAIN (decl) = 1;
789 if (!deferred_decl_context)
790 DECL_CONTEXT (decl) = context;
792 TREE_NO_WARNING (decl) = (No (gnat_node) || Warnings_Off (gnat_node));
794 /* Set the location of DECL and emit a declaration for it. */
795 if (Present (gnat_node) && !renaming_from_instantiation_p (gnat_node))
796 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
798 add_decl_expr (decl, gnat_node);
800 /* Put the declaration on the list. The list of declarations is in reverse
801 order. The list will be reversed later. Put global declarations in the
802 globals list and local ones in the current block. But skip TYPE_DECLs
803 for UNCONSTRAINED_ARRAY_TYPE in both cases, as they will cause trouble
804 with the debugger and aren't needed anyway. */
805 if (!(TREE_CODE (decl) == TYPE_DECL
806 && TREE_CODE (TREE_TYPE (decl)) == UNCONSTRAINED_ARRAY_TYPE))
808 /* External declarations must go to the binding level they belong to.
809 This will make corresponding imported entities are available in the
810 debugger at the proper time. */
811 if (DECL_EXTERNAL (decl)
812 && TREE_CODE (decl) == FUNCTION_DECL
813 && fndecl_built_in_p (decl))
814 vec_safe_push (builtin_decls, decl);
815 else if (global_bindings_p ())
816 vec_safe_push (global_decls, decl);
819 DECL_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
820 BLOCK_VARS (current_binding_level->block) = decl;
824 /* For the declaration of a type, set its name either if it isn't already
825 set or if the previous type name was not derived from a source name.
826 We'd rather have the type named with a real name and all the pointer
827 types to the same object have the same node, except when the names are
828 both derived from source names. */
829 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
831 tree t = TREE_TYPE (decl);
833 /* Array and pointer types aren't tagged types in the C sense so we need
834 to generate a typedef in DWARF for them and make sure it is preserved,
835 unless the type is artificial. */
836 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL)
837 && ((TREE_CODE (t) != ARRAY_TYPE && TREE_CODE (t) != POINTER_TYPE)
838 || DECL_ARTIFICIAL (decl)))
840 /* For array and pointer types, create the DECL_ORIGINAL_TYPE that will
841 generate the typedef in DWARF. Also do that for fat pointer types
842 because, even though they are tagged types in the C sense, they are
843 still XUP types attached to the base array type at this point. */
844 else if (!DECL_ARTIFICIAL (decl)
845 && (TREE_CODE (t) == ARRAY_TYPE
846 || TREE_CODE (t) == POINTER_TYPE
847 || TYPE_IS_FAT_POINTER_P (t)))
849 tree tt = build_variant_type_copy (t);
850 TYPE_NAME (tt) = decl;
851 defer_or_set_type_context (tt,
853 deferred_decl_context);
854 TREE_TYPE (decl) = tt;
856 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL
857 && DECL_ORIGINAL_TYPE (TYPE_NAME (t)))
858 DECL_ORIGINAL_TYPE (decl) = DECL_ORIGINAL_TYPE (TYPE_NAME (t));
860 DECL_ORIGINAL_TYPE (decl) = t;
861 /* Array types need to have a name so that they can be related to
862 their GNAT encodings. */
863 if (TREE_CODE (t) == ARRAY_TYPE && !TYPE_NAME (t))
864 TYPE_NAME (t) = DECL_NAME (decl);
867 else if (TYPE_NAME (t)
868 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL
869 && DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
874 /* Propagate the name to all the variants, this is needed for the type
875 qualifiers machinery to work properly (see check_qualified_type).
876 Also propagate the context to them. Note that it will be propagated
877 to all parallel types too thanks to gnat_set_type_context. */
879 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
880 /* ??? Because of the previous kludge, we can have variants of fat
881 pointer types with different names. */
882 if (!(TYPE_IS_FAT_POINTER_P (t)
884 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
886 TYPE_NAME (t) = decl;
887 defer_or_set_type_context (t,
889 deferred_decl_context);
894 /* Create a record type that contains a SIZE bytes long field of TYPE with a
895 starting bit position so that it is aligned to ALIGN bits, and leaving at
896 least ROOM bytes free before the field. BASE_ALIGN is the alignment the
897 record is guaranteed to get. GNAT_NODE is used for the position of the
898 associated TYPE_DECL. */
901 make_aligning_type (tree type, unsigned int align, tree size,
902 unsigned int base_align, int room, Node_Id gnat_node)
904 /* We will be crafting a record type with one field at a position set to be
905 the next multiple of ALIGN past record'address + room bytes. We use a
906 record placeholder to express record'address. */
907 tree record_type = make_node (RECORD_TYPE);
908 tree record = build0 (PLACEHOLDER_EXPR, record_type);
911 = convert (sizetype, build_unary_op (ADDR_EXPR, NULL_TREE, record));
913 /* The diagram below summarizes the shape of what we manipulate:
915 <--------- pos ---------->
916 { +------------+-------------+-----------------+
917 record =>{ |############| ... | field (type) |
918 { +------------+-------------+-----------------+
919 |<-- room -->|<- voffset ->|<---- size ----->|
922 record_addr vblock_addr
924 Every length is in sizetype bytes there, except "pos" which has to be
925 set as a bit position in the GCC tree for the record. */
926 tree room_st = size_int (room);
927 tree vblock_addr_st = size_binop (PLUS_EXPR, record_addr_st, room_st);
928 tree voffset_st, pos, field;
930 tree name = TYPE_IDENTIFIER (type);
932 name = concat_name (name, "ALIGN");
933 TYPE_NAME (record_type) = name;
935 /* Compute VOFFSET and then POS. The next byte position multiple of some
936 alignment after some address is obtained by "and"ing the alignment minus
937 1 with the two's complement of the address. */
938 voffset_st = size_binop (BIT_AND_EXPR,
939 fold_build1 (NEGATE_EXPR, sizetype, vblock_addr_st),
940 size_int ((align / BITS_PER_UNIT) - 1));
942 /* POS = (ROOM + VOFFSET) * BIT_PER_UNIT, in bitsizetype. */
943 pos = size_binop (MULT_EXPR,
944 convert (bitsizetype,
945 size_binop (PLUS_EXPR, room_st, voffset_st)),
948 /* Craft the GCC record representation. We exceptionally do everything
949 manually here because 1) our generic circuitry is not quite ready to
950 handle the complex position/size expressions we are setting up, 2) we
951 have a strong simplifying factor at hand: we know the maximum possible
952 value of voffset, and 3) we have to set/reset at least the sizes in
953 accordance with this maximum value anyway, as we need them to convey
954 what should be "alloc"ated for this type.
956 Use -1 as the 'addressable' indication for the field to prevent the
957 creation of a bitfield. We don't need one, it would have damaging
958 consequences on the alignment computation, and create_field_decl would
959 make one without this special argument, for instance because of the
960 complex position expression. */
961 field = create_field_decl (get_identifier ("F"), type, record_type, size,
963 TYPE_FIELDS (record_type) = field;
965 SET_TYPE_ALIGN (record_type, base_align);
966 TYPE_USER_ALIGN (record_type) = 1;
968 TYPE_SIZE (record_type)
969 = size_binop (PLUS_EXPR,
970 size_binop (MULT_EXPR, convert (bitsizetype, size),
972 bitsize_int (align + room * BITS_PER_UNIT));
973 TYPE_SIZE_UNIT (record_type)
974 = size_binop (PLUS_EXPR, size,
975 size_int (room + align / BITS_PER_UNIT));
977 SET_TYPE_MODE (record_type, BLKmode);
978 relate_alias_sets (record_type, type, ALIAS_SET_COPY);
980 /* Declare it now since it will never be declared otherwise. This is
981 necessary to ensure that its subtrees are properly marked. */
982 create_type_decl (name, record_type, true, false, gnat_node);
987 /* TYPE is an ARRAY_TYPE that is being used as the type of a field in a packed
988 record. See if we can rewrite it as a type that has non-BLKmode, which we
989 can pack tighter in the packed record. If so, return the new type; if not,
990 return the original type. */
993 make_packable_array_type (tree type)
995 const unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE (type));
996 unsigned HOST_WIDE_INT new_size;
997 unsigned int new_align;
999 /* No point in doing anything if the size is either zero or too large for an
1000 integral mode, or if the type already has non-BLKmode. */
1001 if (size == 0 || size > MAX_FIXED_MODE_SIZE || TYPE_MODE (type) != BLKmode)
1004 /* Punt if the component type is an aggregate type for now. */
1005 if (AGGREGATE_TYPE_P (TREE_TYPE (type)))
1008 tree new_type = copy_type (type);
1010 new_size = ceil_pow2 (size);
1011 new_align = MIN (new_size, BIGGEST_ALIGNMENT);
1012 SET_TYPE_ALIGN (new_type, new_align);
1014 TYPE_SIZE (new_type) = bitsize_int (new_size);
1015 TYPE_SIZE_UNIT (new_type) = size_int (new_size / BITS_PER_UNIT);
1017 SET_TYPE_MODE (new_type, mode_for_size (new_size, MODE_INT, 1).else_blk ());
1022 /* TYPE is a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE that is being used
1023 as the type of a field in a packed record if IN_RECORD is true, or as
1024 the component type of a packed array if IN_RECORD is false. See if we
1025 can rewrite it either as a type that has non-BLKmode, which we can pack
1026 tighter in the packed record case, or as a smaller type with at most
1027 MAX_ALIGN alignment if the value is non-zero. If so, return the new
1028 type; if not, return the original type. */
1031 make_packable_type (tree type, bool in_record, unsigned int max_align)
1033 const unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE (type));
1034 const unsigned int align = TYPE_ALIGN (type);
1035 unsigned HOST_WIDE_INT new_size;
1036 unsigned int new_align;
1038 /* No point in doing anything if the size is zero. */
1042 tree new_type = make_node (TREE_CODE (type));
1044 /* Copy the name and flags from the old type to that of the new.
1045 Note that we rely on the pointer equality created here for
1046 TYPE_NAME to look through conversions in various places. */
1047 TYPE_NAME (new_type) = TYPE_NAME (type);
1048 TYPE_PACKED (new_type) = 1;
1049 TYPE_JUSTIFIED_MODULAR_P (new_type) = TYPE_JUSTIFIED_MODULAR_P (type);
1050 TYPE_CONTAINS_TEMPLATE_P (new_type) = TYPE_CONTAINS_TEMPLATE_P (type);
1051 TYPE_REVERSE_STORAGE_ORDER (new_type) = TYPE_REVERSE_STORAGE_ORDER (type);
1052 if (TREE_CODE (type) == RECORD_TYPE)
1053 TYPE_PADDING_P (new_type) = TYPE_PADDING_P (type);
1055 /* If we are in a record and have a small size, set the alignment to
1056 try for an integral mode. Otherwise set it to try for a smaller
1057 type with BLKmode. */
1058 if (in_record && size <= MAX_FIXED_MODE_SIZE)
1060 new_size = ceil_pow2 (size);
1061 new_align = MIN (new_size, BIGGEST_ALIGNMENT);
1062 SET_TYPE_ALIGN (new_type, new_align);
1066 tree type_size = TYPE_ADA_SIZE (type);
1067 /* Do not try to shrink the size if the RM size is not constant. */
1068 if (TYPE_CONTAINS_TEMPLATE_P (type)
1069 || !tree_fits_uhwi_p (type_size))
1072 /* Round the RM size up to a unit boundary to get the minimal size
1073 for a BLKmode record. Give up if it's already the size and we
1074 don't need to lower the alignment. */
1075 new_size = tree_to_uhwi (type_size);
1076 new_size = (new_size + BITS_PER_UNIT - 1) & -BITS_PER_UNIT;
1077 if (new_size == size && (max_align == 0 || align <= max_align))
1080 new_align = MIN (new_size & -new_size, BIGGEST_ALIGNMENT);
1081 if (max_align > 0 && new_align > max_align)
1082 new_align = max_align;
1083 SET_TYPE_ALIGN (new_type, MIN (align, new_align));
1086 TYPE_USER_ALIGN (new_type) = 1;
1088 /* Now copy the fields, keeping the position and size as we don't want
1089 to change the layout by propagating the packedness downwards. */
1090 tree new_field_list = NULL_TREE;
1091 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1093 tree new_field_type = TREE_TYPE (field);
1094 tree new_field, new_field_size;
1096 if (AGGREGATE_TYPE_P (new_field_type)
1097 && tree_fits_uhwi_p (TYPE_SIZE (new_field_type)))
1099 if (RECORD_OR_UNION_TYPE_P (new_field_type)
1100 && !TYPE_FAT_POINTER_P (new_field_type))
1102 = make_packable_type (new_field_type, true, max_align);
1105 && max_align < BITS_PER_UNIT
1106 && TREE_CODE (new_field_type) == ARRAY_TYPE)
1107 new_field_type = make_packable_array_type (new_field_type);
1110 /* However, for the last field in a not already packed record type
1111 that is of an aggregate type, we need to use the RM size in the
1112 packable version of the record type, see finish_record_type. */
1113 if (!DECL_CHAIN (field)
1114 && !TYPE_PACKED (type)
1115 && RECORD_OR_UNION_TYPE_P (new_field_type)
1116 && !TYPE_FAT_POINTER_P (new_field_type)
1117 && !TYPE_CONTAINS_TEMPLATE_P (new_field_type)
1118 && TYPE_ADA_SIZE (new_field_type))
1119 new_field_size = TYPE_ADA_SIZE (new_field_type);
1121 new_field_size = DECL_SIZE (field);
1123 /* This is a layout with full representation, alignment and size clauses
1124 so we simply pass 0 as PACKED like gnat_to_gnu_field in this case. */
1126 = create_field_decl (DECL_NAME (field), new_field_type, new_type,
1127 new_field_size, bit_position (field), 0,
1128 !DECL_NONADDRESSABLE_P (field));
1130 DECL_INTERNAL_P (new_field) = DECL_INTERNAL_P (field);
1131 SET_DECL_ORIGINAL_FIELD_TO_FIELD (new_field, field);
1132 if (TREE_CODE (new_type) == QUAL_UNION_TYPE)
1133 DECL_QUALIFIER (new_field) = DECL_QUALIFIER (field);
1135 DECL_CHAIN (new_field) = new_field_list;
1136 new_field_list = new_field;
1139 /* If this is a padding record, we never want to make the size smaller
1140 than what was specified. For QUAL_UNION_TYPE, also copy the size. */
1141 if (TYPE_IS_PADDING_P (type) || TREE_CODE (type) == QUAL_UNION_TYPE)
1143 TYPE_SIZE (new_type) = TYPE_SIZE (type);
1144 TYPE_SIZE_UNIT (new_type) = TYPE_SIZE_UNIT (type);
1149 TYPE_SIZE (new_type) = bitsize_int (new_size);
1150 TYPE_SIZE_UNIT (new_type) = size_int (new_size / BITS_PER_UNIT);
1153 if (!TYPE_CONTAINS_TEMPLATE_P (type))
1154 SET_TYPE_ADA_SIZE (new_type, TYPE_ADA_SIZE (type));
1156 finish_record_type (new_type, nreverse (new_field_list), 2, false);
1157 relate_alias_sets (new_type, type, ALIAS_SET_COPY);
1158 if (gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
1159 SET_TYPE_DEBUG_TYPE (new_type, TYPE_DEBUG_TYPE (type));
1160 else if (TYPE_STUB_DECL (type))
1161 SET_DECL_PARALLEL_TYPE (TYPE_STUB_DECL (new_type),
1162 DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type)));
1164 /* Try harder to get a packable type if necessary, for example
1165 in case the record itself contains a BLKmode field. */
1166 if (in_record && TYPE_MODE (new_type) == BLKmode)
1167 SET_TYPE_MODE (new_type,
1168 mode_for_size_tree (TYPE_SIZE (new_type),
1169 MODE_INT, 1).else_blk ());
1171 /* If neither mode nor size nor alignment shrunk, return the old type. */
1172 if (TYPE_MODE (new_type) == BLKmode && new_size >= size && max_align == 0)
1178 /* Return true if TYPE has an unsigned representation. This needs to be used
1179 when the representation of types whose precision is not equal to their size
1180 is manipulated based on the RM size. */
1183 type_unsigned_for_rm (tree type)
1185 /* This is the common case. */
1186 if (TYPE_UNSIGNED (type))
1189 /* See the E_Signed_Integer_Subtype case of gnat_to_gnu_entity. */
1190 if (TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST
1191 && tree_int_cst_sgn (TYPE_MIN_VALUE (type)) >= 0)
1197 /* Given a type TYPE, return a new type whose size is appropriate for SIZE.
1198 If TYPE is the best type, return it. Otherwise, make a new type. We
1199 only support new integral and pointer types. FOR_BIASED is true if
1200 we are making a biased type. */
1203 make_type_from_size (tree type, tree size_tree, bool for_biased)
1205 unsigned HOST_WIDE_INT size;
1209 /* If size indicates an error, just return TYPE to avoid propagating
1210 the error. Likewise if it's too large to represent. */
1211 if (!size_tree || !tree_fits_uhwi_p (size_tree))
1214 size = tree_to_uhwi (size_tree);
1216 switch (TREE_CODE (type))
1219 /* Do not mess with boolean types that have foreign convention. */
1220 if (TYPE_PRECISION (type) == 1 && TYPE_SIZE (type) == size_tree)
1223 /* ... fall through ... */
1227 biased_p = (TREE_CODE (type) == INTEGER_TYPE
1228 && TYPE_BIASED_REPRESENTATION_P (type));
1230 /* Integer types with precision 0 are forbidden. */
1234 /* Only do something if the type isn't a packed array type and doesn't
1235 already have the proper size and the size isn't too large. */
1236 if (TYPE_IS_PACKED_ARRAY_TYPE_P (type)
1237 || (TYPE_PRECISION (type) == size && biased_p == for_biased)
1238 || size > LONG_LONG_TYPE_SIZE)
1241 biased_p |= for_biased;
1243 /* The type should be an unsigned type if the original type is unsigned
1244 or if the lower bound is constant and non-negative or if the type is
1245 biased, see E_Signed_Integer_Subtype case of gnat_to_gnu_entity. */
1246 if (type_unsigned_for_rm (type) || biased_p)
1247 new_type = make_unsigned_type (size);
1249 new_type = make_signed_type (size);
1250 TREE_TYPE (new_type) = TREE_TYPE (type) ? TREE_TYPE (type) : type;
1251 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_MIN_VALUE (type));
1252 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_MAX_VALUE (type));
1253 /* Copy the name to show that it's essentially the same type and
1254 not a subrange type. */
1255 TYPE_NAME (new_type) = TYPE_NAME (type);
1256 TYPE_BIASED_REPRESENTATION_P (new_type) = biased_p;
1257 SET_TYPE_RM_SIZE (new_type, bitsize_int (size));
1261 /* Do something if this is a fat pointer, in which case we
1262 may need to return the thin pointer. */
1263 if (TYPE_FAT_POINTER_P (type) && size < POINTER_SIZE * 2)
1265 scalar_int_mode p_mode;
1266 if (!int_mode_for_size (size, 0).exists (&p_mode)
1267 || !targetm.valid_pointer_mode (p_mode))
1270 build_pointer_type_for_mode
1271 (TYPE_OBJECT_RECORD_TYPE (TYPE_UNCONSTRAINED_ARRAY (type)),
1277 /* Only do something if this is a thin pointer, in which case we
1278 may need to return the fat pointer. */
1279 if (TYPE_IS_THIN_POINTER_P (type) && size >= POINTER_SIZE * 2)
1281 build_pointer_type (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)));
1291 /* Return true iff the padded types are equivalent. */
1294 pad_type_hasher::equal (pad_type_hash *t1, pad_type_hash *t2)
1298 if (t1->hash != t2->hash)
1304 /* We consider that the padded types are equivalent if they pad the same type
1305 and have the same size, alignment, RM size and storage order. Taking the
1306 mode into account is redundant since it is determined by the others. */
1308 TREE_TYPE (TYPE_FIELDS (type1)) == TREE_TYPE (TYPE_FIELDS (type2))
1309 && TYPE_SIZE (type1) == TYPE_SIZE (type2)
1310 && TYPE_ALIGN (type1) == TYPE_ALIGN (type2)
1311 && TYPE_ADA_SIZE (type1) == TYPE_ADA_SIZE (type2)
1312 && TYPE_REVERSE_STORAGE_ORDER (type1) == TYPE_REVERSE_STORAGE_ORDER (type2);
1315 /* Compute the hash value for the padded TYPE. */
1318 hash_pad_type (tree type)
1323 = iterative_hash_object (TYPE_HASH (TREE_TYPE (TYPE_FIELDS (type))), 0);
1324 hashcode = iterative_hash_expr (TYPE_SIZE (type), hashcode);
1325 hashcode = iterative_hash_hashval_t (TYPE_ALIGN (type), hashcode);
1326 hashcode = iterative_hash_expr (TYPE_ADA_SIZE (type), hashcode);
1331 /* Look up the padded TYPE in the hash table and return its canonical version
1332 if it exists; otherwise, insert it into the hash table. */
1335 canonicalize_pad_type (tree type)
1337 const hashval_t hashcode = hash_pad_type (type);
1338 struct pad_type_hash in, *h, **slot;
1342 slot = pad_type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
1346 h = ggc_alloc<pad_type_hash> ();
1355 /* Ensure that TYPE has SIZE and ALIGN. Make and return a new padded type
1356 if needed. We have already verified that SIZE and ALIGN are large enough.
1357 GNAT_ENTITY is used to name the resulting record and to issue a warning.
1358 IS_COMPONENT_TYPE is true if this is being done for the component type of
1359 an array. IS_USER_TYPE is true if the original type needs to be completed.
1360 DEFINITION is true if this type is being defined. SET_RM_SIZE is true if
1361 the RM size of the resulting type is to be set to SIZE too; in this case,
1362 the padded type is canonicalized before being returned. */
1365 maybe_pad_type (tree type, tree size, unsigned int align,
1366 Entity_Id gnat_entity, bool is_component_type,
1367 bool is_user_type, bool definition, bool set_rm_size)
1369 tree orig_size = TYPE_SIZE (type);
1370 unsigned int orig_align = TYPE_ALIGN (type);
1373 /* If TYPE is a padded type, see if it agrees with any size and alignment
1374 we were given. If so, return the original type. Otherwise, strip
1375 off the padding, since we will either be returning the inner type
1376 or repadding it. If no size or alignment is specified, use that of
1377 the original padded type. */
1378 if (TYPE_IS_PADDING_P (type))
1381 || operand_equal_p (round_up (size, orig_align), orig_size, 0))
1382 && (align == 0 || align == orig_align))
1390 type = TREE_TYPE (TYPE_FIELDS (type));
1391 orig_size = TYPE_SIZE (type);
1392 orig_align = TYPE_ALIGN (type);
1395 /* If the size is either not being changed or is being made smaller (which
1396 is not done here and is only valid for bitfields anyway), show the size
1397 isn't changing. Likewise, clear the alignment if it isn't being
1398 changed. Then return if we aren't doing anything. */
1400 && (operand_equal_p (size, orig_size, 0)
1401 || (TREE_CODE (orig_size) == INTEGER_CST
1402 && tree_int_cst_lt (size, orig_size))))
1405 if (align == orig_align)
1408 if (align == 0 && !size)
1411 /* If requested, complete the original type and give it a name. */
1413 create_type_decl (get_entity_name (gnat_entity), type,
1414 !Comes_From_Source (gnat_entity),
1416 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1417 && DECL_IGNORED_P (TYPE_NAME (type))),
1420 /* We used to modify the record in place in some cases, but that could
1421 generate incorrect debugging information. So make a new record
1423 record = make_node (RECORD_TYPE);
1424 TYPE_PADDING_P (record) = 1;
1426 /* ??? Padding types around packed array implementation types will be
1427 considered as root types in the array descriptor language hook (see
1428 gnat_get_array_descr_info). Give them the original packed array type
1429 name so that the one coming from sources appears in the debugging
1431 if (TYPE_IMPL_PACKED_ARRAY_P (type)
1432 && TYPE_ORIGINAL_PACKED_ARRAY (type)
1433 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
1434 TYPE_NAME (record) = TYPE_NAME (TYPE_ORIGINAL_PACKED_ARRAY (type));
1435 else if (Present (gnat_entity))
1436 TYPE_NAME (record) = create_concat_name (gnat_entity, "PAD");
1438 SET_TYPE_ALIGN (record, align ? align : orig_align);
1439 TYPE_SIZE (record) = size ? size : orig_size;
1440 TYPE_SIZE_UNIT (record)
1441 = convert (sizetype,
1442 size_binop (CEIL_DIV_EXPR, TYPE_SIZE (record),
1443 bitsize_unit_node));
1445 /* If we are changing the alignment and the input type is a record with
1446 BLKmode and a small constant size, try to make a form that has an
1447 integral mode. This might allow the padding record to also have an
1448 integral mode, which will be much more efficient. There is no point
1449 in doing so if a size is specified unless it is also a small constant
1450 size and it is incorrect to do so if we cannot guarantee that the mode
1451 will be naturally aligned since the field must always be addressable.
1453 ??? This might not always be a win when done for a stand-alone object:
1454 since the nominal and the effective type of the object will now have
1455 different modes, a VIEW_CONVERT_EXPR will be required for converting
1456 between them and it might be hard to overcome afterwards, including
1457 at the RTL level when the stand-alone object is accessed as a whole. */
1459 && RECORD_OR_UNION_TYPE_P (type)
1460 && TYPE_MODE (type) == BLKmode
1461 && !TYPE_BY_REFERENCE_P (type)
1462 && TREE_CODE (orig_size) == INTEGER_CST
1463 && !TREE_OVERFLOW (orig_size)
1464 && compare_tree_int (orig_size, MAX_FIXED_MODE_SIZE) <= 0
1466 || (TREE_CODE (size) == INTEGER_CST
1467 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) <= 0)))
1469 tree packable_type = make_packable_type (type, true, align);
1470 if (TYPE_MODE (packable_type) != BLKmode
1471 && align >= TYPE_ALIGN (packable_type))
1472 type = packable_type;
1475 /* Now create the field with the original size. */
1476 field = create_field_decl (get_identifier ("F"), type, record, orig_size,
1477 bitsize_zero_node, 0, 1);
1478 DECL_INTERNAL_P (field) = 1;
1480 /* We will output additional debug info manually below. */
1481 finish_record_type (record, field, 1, false);
1483 /* Set the RM size if requested. */
1486 SET_TYPE_ADA_SIZE (record, size ? size : orig_size);
1488 /* If the padded type is complete and has constant size, we canonicalize
1489 it by means of the hash table. This is consistent with the language
1490 semantics and ensures that gigi and the middle-end have a common view
1491 of these padded types. */
1492 if (TREE_CONSTANT (TYPE_SIZE (record)))
1494 tree canonical = canonicalize_pad_type (record);
1495 if (canonical != record)
1503 if (gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
1504 SET_TYPE_DEBUG_TYPE (record, maybe_debug_type (type));
1506 /* Unless debugging information isn't being written for the input type,
1507 write a record that shows what we are a subtype of and also make a
1508 variable that indicates our size, if still variable. */
1509 if (TREE_CODE (orig_size) != INTEGER_CST
1510 && TYPE_NAME (record)
1512 && !(TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1513 && DECL_IGNORED_P (TYPE_NAME (type))))
1515 tree name = TYPE_IDENTIFIER (record);
1516 tree size_unit = TYPE_SIZE_UNIT (record);
1518 /* A variable that holds the size is required even with no encoding since
1519 it will be referenced by debugging information attributes. At global
1520 level, we need a single variable across all translation units. */
1522 && TREE_CODE (size) != INTEGER_CST
1523 && (definition || global_bindings_p ()))
1525 /* Whether or not gnat_entity comes from source, this XVZ variable is
1526 is a compilation artifact. */
1528 = create_var_decl (concat_name (name, "XVZ"), NULL_TREE, sizetype,
1529 size_unit, true, global_bindings_p (),
1530 !definition && global_bindings_p (), false,
1531 false, true, true, NULL, gnat_entity);
1532 TYPE_SIZE_UNIT (record) = size_unit;
1535 /* There is no need to show what we are a subtype of when outputting as
1536 few encodings as possible: regular debugging infomation makes this
1538 if (gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
1540 tree marker = make_node (RECORD_TYPE);
1541 tree orig_name = TYPE_IDENTIFIER (type);
1543 TYPE_NAME (marker) = concat_name (name, "XVS");
1544 finish_record_type (marker,
1545 create_field_decl (orig_name,
1546 build_reference_type (type),
1547 marker, NULL_TREE, NULL_TREE,
1550 TYPE_SIZE_UNIT (marker) = size_unit;
1552 add_parallel_type (record, marker);
1557 /* If a simple size was explicitly given, maybe issue a warning. */
1559 || TREE_CODE (size) == COND_EXPR
1560 || TREE_CODE (size) == MAX_EXPR
1561 || No (gnat_entity))
1564 /* But don't do it if we are just annotating types and the type is tagged or
1565 concurrent, since these types aren't fully laid out in this mode. */
1566 if (type_annotate_only)
1570 ? Component_Type (gnat_entity) : Etype (gnat_entity);
1572 if (Is_Tagged_Type (gnat_type) || Is_Concurrent_Type (gnat_type))
1576 /* Take the original size as the maximum size of the input if there was an
1577 unconstrained record involved and round it up to the specified alignment,
1578 if one was specified, but only for aggregate types. */
1579 if (CONTAINS_PLACEHOLDER_P (orig_size))
1580 orig_size = max_size (orig_size, true);
1582 if (align && AGGREGATE_TYPE_P (type))
1583 orig_size = round_up (orig_size, align);
1585 if (!operand_equal_p (size, orig_size, 0)
1586 && !(TREE_CODE (size) == INTEGER_CST
1587 && TREE_CODE (orig_size) == INTEGER_CST
1588 && (TREE_OVERFLOW (size)
1589 || TREE_OVERFLOW (orig_size)
1590 || tree_int_cst_lt (size, orig_size))))
1592 Node_Id gnat_error_node;
1594 /* For a packed array, post the message on the original array type. */
1595 if (Is_Packed_Array_Impl_Type (gnat_entity))
1596 gnat_entity = Original_Array_Type (gnat_entity);
1598 if ((Ekind (gnat_entity) == E_Component
1599 || Ekind (gnat_entity) == E_Discriminant)
1600 && Present (Component_Clause (gnat_entity)))
1601 gnat_error_node = Last_Bit (Component_Clause (gnat_entity));
1602 else if (Has_Size_Clause (gnat_entity))
1603 gnat_error_node = Expression (Size_Clause (gnat_entity));
1604 else if (Has_Object_Size_Clause (gnat_entity))
1605 gnat_error_node = Expression (Object_Size_Clause (gnat_entity));
1607 gnat_error_node = Empty;
1609 /* Generate message only for entities that come from source, since
1610 if we have an entity created by expansion, the message will be
1611 generated for some other corresponding source entity. */
1612 if (Comes_From_Source (gnat_entity))
1614 if (is_component_type)
1615 post_error_ne_tree ("component of& padded{ by ^ bits}?",
1616 gnat_entity, gnat_entity,
1617 size_diffop (size, orig_size));
1618 else if (Present (gnat_error_node))
1619 post_error_ne_tree ("{^ }bits of & unused?",
1620 gnat_error_node, gnat_entity,
1621 size_diffop (size, orig_size));
1628 /* Return true if padded TYPE was built with an RM size. */
1631 pad_type_has_rm_size (tree type)
1633 /* This is required for the lookup. */
1634 if (!TREE_CONSTANT (TYPE_SIZE (type)))
1637 const hashval_t hashcode = hash_pad_type (type);
1638 struct pad_type_hash in, *h;
1642 h = pad_type_hash_table->find_with_hash (&in, hashcode);
1644 /* The types built with an RM size are the canonicalized ones. */
1645 return h && h->type == type;
1648 /* Return a copy of the padded TYPE but with reverse storage order. */
1651 set_reverse_storage_order_on_pad_type (tree type)
1655 /* If the inner type is not scalar then the function does nothing. */
1656 tree inner_type = TREE_TYPE (TYPE_FIELDS (type));
1657 gcc_assert (!AGGREGATE_TYPE_P (inner_type)
1658 && !VECTOR_TYPE_P (inner_type));
1661 /* This is required for the canonicalization. */
1662 gcc_assert (TREE_CONSTANT (TYPE_SIZE (type)));
1664 tree field = copy_node (TYPE_FIELDS (type));
1665 type = copy_type (type);
1666 DECL_CONTEXT (field) = type;
1667 TYPE_FIELDS (type) = field;
1668 TYPE_REVERSE_STORAGE_ORDER (type) = 1;
1669 return canonicalize_pad_type (type);
1672 /* Relate the alias sets of GNU_NEW_TYPE and GNU_OLD_TYPE according to OP.
1673 If this is a multi-dimensional array type, do this recursively.
1676 - ALIAS_SET_COPY: the new set is made a copy of the old one.
1677 - ALIAS_SET_SUPERSET: the new set is made a superset of the old one.
1678 - ALIAS_SET_SUBSET: the new set is made a subset of the old one. */
1681 relate_alias_sets (tree gnu_new_type, tree gnu_old_type, enum alias_set_op op)
1683 /* Remove any padding from GNU_OLD_TYPE. It doesn't matter in the case
1684 of a one-dimensional array, since the padding has the same alias set
1685 as the field type, but if it's a multi-dimensional array, we need to
1686 see the inner types. */
1687 while (TREE_CODE (gnu_old_type) == RECORD_TYPE
1688 && (TYPE_JUSTIFIED_MODULAR_P (gnu_old_type)
1689 || TYPE_PADDING_P (gnu_old_type)))
1690 gnu_old_type = TREE_TYPE (TYPE_FIELDS (gnu_old_type));
1692 /* Unconstrained array types are deemed incomplete and would thus be given
1693 alias set 0. Retrieve the underlying array type. */
1694 if (TREE_CODE (gnu_old_type) == UNCONSTRAINED_ARRAY_TYPE)
1696 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_old_type))));
1697 if (TREE_CODE (gnu_new_type) == UNCONSTRAINED_ARRAY_TYPE)
1699 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_new_type))));
1701 if (TREE_CODE (gnu_new_type) == ARRAY_TYPE
1702 && TREE_CODE (TREE_TYPE (gnu_new_type)) == ARRAY_TYPE
1703 && TYPE_MULTI_ARRAY_P (TREE_TYPE (gnu_new_type)))
1704 relate_alias_sets (TREE_TYPE (gnu_new_type), TREE_TYPE (gnu_old_type), op);
1708 case ALIAS_SET_COPY:
1709 /* The alias set shouldn't be copied between array types with different
1710 aliasing settings because this can break the aliasing relationship
1711 between the array type and its element type. */
1712 if (flag_checking || flag_strict_aliasing)
1713 gcc_assert (!(TREE_CODE (gnu_new_type) == ARRAY_TYPE
1714 && TREE_CODE (gnu_old_type) == ARRAY_TYPE
1715 && TYPE_NONALIASED_COMPONENT (gnu_new_type)
1716 != TYPE_NONALIASED_COMPONENT (gnu_old_type)));
1718 TYPE_ALIAS_SET (gnu_new_type) = get_alias_set (gnu_old_type);
1721 case ALIAS_SET_SUBSET:
1722 case ALIAS_SET_SUPERSET:
1724 alias_set_type old_set = get_alias_set (gnu_old_type);
1725 alias_set_type new_set = get_alias_set (gnu_new_type);
1727 /* Do nothing if the alias sets conflict. This ensures that we
1728 never call record_alias_subset several times for the same pair
1729 or at all for alias set 0. */
1730 if (!alias_sets_conflict_p (old_set, new_set))
1732 if (op == ALIAS_SET_SUBSET)
1733 record_alias_subset (old_set, new_set);
1735 record_alias_subset (new_set, old_set);
1744 record_component_aliases (gnu_new_type);
1747 /* Record TYPE as a builtin type for Ada. NAME is the name of the type.
1748 ARTIFICIAL_P is true if the type was generated by the compiler. */
1751 record_builtin_type (const char *name, tree type, bool artificial_p)
1753 tree type_decl = build_decl (input_location,
1754 TYPE_DECL, get_identifier (name), type);
1755 DECL_ARTIFICIAL (type_decl) = artificial_p;
1756 TYPE_ARTIFICIAL (type) = artificial_p;
1757 gnat_pushdecl (type_decl, Empty);
1759 if (debug_hooks->type_decl)
1760 debug_hooks->type_decl (type_decl, false);
1763 /* Finish constructing the character type CHAR_TYPE.
1765 In Ada character types are enumeration types and, as a consequence, are
1766 represented in the front-end by integral types holding the positions of
1767 the enumeration values as defined by the language, which means that the
1768 integral types are unsigned.
1770 Unfortunately the signedness of 'char' in C is implementation-defined
1771 and GCC even has the option -f[un]signed-char to toggle it at run time.
1772 Since GNAT's philosophy is to be compatible with C by default, to wit
1773 Interfaces.C.char is defined as a mere copy of Character, we may need
1774 to declare character types as signed types in GENERIC and generate the
1775 necessary adjustments to make them behave as unsigned types.
1777 The overall strategy is as follows: if 'char' is unsigned, do nothing;
1778 if 'char' is signed, translate character types of CHAR_TYPE_SIZE and
1779 character subtypes with RM_Size = Esize = CHAR_TYPE_SIZE into signed
1780 types. The idea is to ensure that the bit pattern contained in the
1781 Esize'd objects is not changed, even though the numerical value will
1782 be interpreted differently depending on the signedness. */
1785 finish_character_type (tree char_type)
1787 if (TYPE_UNSIGNED (char_type))
1790 /* Make a copy of a generic unsigned version since we'll modify it. */
1791 tree unsigned_char_type
1792 = (char_type == char_type_node
1793 ? unsigned_char_type_node
1794 : copy_type (gnat_unsigned_type_for (char_type)));
1796 /* Create an unsigned version of the type and set it as debug type. */
1797 TYPE_NAME (unsigned_char_type) = TYPE_NAME (char_type);
1798 TYPE_STRING_FLAG (unsigned_char_type) = TYPE_STRING_FLAG (char_type);
1799 TYPE_ARTIFICIAL (unsigned_char_type) = TYPE_ARTIFICIAL (char_type);
1800 SET_TYPE_DEBUG_TYPE (char_type, unsigned_char_type);
1802 /* If this is a subtype, make the debug type a subtype of the debug type
1803 of the base type and convert literal RM bounds to unsigned. */
1804 if (TREE_TYPE (char_type))
1806 tree base_unsigned_char_type = TYPE_DEBUG_TYPE (TREE_TYPE (char_type));
1807 tree min_value = TYPE_RM_MIN_VALUE (char_type);
1808 tree max_value = TYPE_RM_MAX_VALUE (char_type);
1810 if (TREE_CODE (min_value) == INTEGER_CST)
1811 min_value = fold_convert (base_unsigned_char_type, min_value);
1812 if (TREE_CODE (max_value) == INTEGER_CST)
1813 max_value = fold_convert (base_unsigned_char_type, max_value);
1815 TREE_TYPE (unsigned_char_type) = base_unsigned_char_type;
1816 SET_TYPE_RM_MIN_VALUE (unsigned_char_type, min_value);
1817 SET_TYPE_RM_MAX_VALUE (unsigned_char_type, max_value);
1820 /* Adjust the RM bounds of the original type to unsigned; that's especially
1821 important for types since they are implicit in this case. */
1822 SET_TYPE_RM_MIN_VALUE (char_type, TYPE_MIN_VALUE (unsigned_char_type));
1823 SET_TYPE_RM_MAX_VALUE (char_type, TYPE_MAX_VALUE (unsigned_char_type));
1826 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1827 finish constructing the record type as a fat pointer type. */
1830 finish_fat_pointer_type (tree record_type, tree field_list)
1832 /* Make sure we can put it into a register. */
1833 if (STRICT_ALIGNMENT)
1834 SET_TYPE_ALIGN (record_type, MIN (BIGGEST_ALIGNMENT, 2 * POINTER_SIZE));
1836 /* Show what it really is. */
1837 TYPE_FAT_POINTER_P (record_type) = 1;
1839 /* Do not emit debug info for it since the types of its fields may still be
1840 incomplete at this point. */
1841 finish_record_type (record_type, field_list, 0, false);
1843 /* Force type_contains_placeholder_p to return true on it. Although the
1844 PLACEHOLDER_EXPRs are referenced only indirectly, this isn't a pointer
1845 type but the representation of the unconstrained array. */
1846 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (record_type) = 2;
1849 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1850 finish constructing the record or union type. If REP_LEVEL is zero, this
1851 record has no representation clause and so will be entirely laid out here.
1852 If REP_LEVEL is one, this record has a representation clause and has been
1853 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
1854 this record is derived from a parent record and thus inherits its layout;
1855 only make a pass on the fields to finalize them. DEBUG_INFO_P is true if
1856 additional debug info needs to be output for this type. */
1859 finish_record_type (tree record_type, tree field_list, int rep_level,
1862 const enum tree_code orig_code = TREE_CODE (record_type);
1863 const bool had_size = TYPE_SIZE (record_type) != NULL_TREE;
1864 const bool had_size_unit = TYPE_SIZE_UNIT (record_type) != NULL_TREE;
1865 const bool had_align = TYPE_ALIGN (record_type) > 0;
1866 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
1867 out just like a UNION_TYPE, since the size will be fixed. */
1868 const enum tree_code code
1869 = (orig_code == QUAL_UNION_TYPE && rep_level > 0 && had_size
1870 ? UNION_TYPE : orig_code);
1871 tree name = TYPE_IDENTIFIER (record_type);
1872 tree ada_size = bitsize_zero_node;
1873 tree size = bitsize_zero_node;
1876 TYPE_FIELDS (record_type) = field_list;
1878 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
1879 generate debug info and have a parallel type. */
1880 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
1882 /* Globally initialize the record first. If this is a rep'ed record,
1883 that just means some initializations; otherwise, layout the record. */
1886 if (TYPE_ALIGN (record_type) < BITS_PER_UNIT)
1887 SET_TYPE_ALIGN (record_type, BITS_PER_UNIT);
1890 TYPE_SIZE (record_type) = bitsize_zero_node;
1893 TYPE_SIZE_UNIT (record_type) = size_zero_node;
1897 /* Ensure there isn't a size already set. There can be in an error
1898 case where there is a rep clause but all fields have errors and
1899 no longer have a position. */
1900 TYPE_SIZE (record_type) = NULL_TREE;
1902 /* Ensure we use the traditional GCC layout for bitfields when we need
1903 to pack the record type or have a representation clause. The other
1904 possible layout (Microsoft C compiler), if available, would prevent
1905 efficient packing in almost all cases. */
1906 #ifdef TARGET_MS_BITFIELD_LAYOUT
1907 if (TARGET_MS_BITFIELD_LAYOUT && TYPE_PACKED (record_type))
1908 decl_attributes (&record_type,
1909 tree_cons (get_identifier ("gcc_struct"),
1910 NULL_TREE, NULL_TREE),
1911 ATTR_FLAG_TYPE_IN_PLACE);
1914 layout_type (record_type);
1917 /* At this point, the position and size of each field is known. It was
1918 either set before entry by a rep clause, or by laying out the type above.
1920 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
1921 to compute the Ada size; the GCC size and alignment (for rep'ed records
1922 that are not padding types); and the mode (for rep'ed records). We also
1923 clear the DECL_BIT_FIELD indication for the cases we know have not been
1924 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
1926 if (code == QUAL_UNION_TYPE)
1927 field_list = nreverse (field_list);
1929 for (field = field_list; field; field = DECL_CHAIN (field))
1931 tree type = TREE_TYPE (field);
1932 tree pos = bit_position (field);
1933 tree this_size = DECL_SIZE (field);
1936 if (RECORD_OR_UNION_TYPE_P (type)
1937 && !TYPE_FAT_POINTER_P (type)
1938 && !TYPE_CONTAINS_TEMPLATE_P (type)
1939 && TYPE_ADA_SIZE (type))
1940 this_ada_size = TYPE_ADA_SIZE (type);
1942 this_ada_size = this_size;
1944 const bool variant_part = (TREE_CODE (type) == QUAL_UNION_TYPE);
1945 const bool variant_part_at_zero = variant_part && integer_zerop (pos);
1947 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
1948 if (DECL_BIT_FIELD (field)
1949 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
1951 const unsigned int align = TYPE_ALIGN (type);
1953 /* In the general case, type alignment is required. */
1954 if (value_factor_p (pos, align))
1956 /* The enclosing record type must be sufficiently aligned.
1957 Otherwise, if no alignment was specified for it and it
1958 has been laid out already, bump its alignment to the
1959 desired one if this is compatible with its size and
1960 maximum alignment, if any. */
1961 if (TYPE_ALIGN (record_type) >= align)
1963 SET_DECL_ALIGN (field, MAX (DECL_ALIGN (field), align));
1964 DECL_BIT_FIELD (field) = 0;
1968 && value_factor_p (TYPE_SIZE (record_type), align)
1969 && (!TYPE_MAX_ALIGN (record_type)
1970 || TYPE_MAX_ALIGN (record_type) >= align))
1972 SET_TYPE_ALIGN (record_type, align);
1973 SET_DECL_ALIGN (field, MAX (DECL_ALIGN (field), align));
1974 DECL_BIT_FIELD (field) = 0;
1978 /* In the non-strict alignment case, only byte alignment is. */
1979 if (!STRICT_ALIGNMENT
1980 && DECL_BIT_FIELD (field)
1981 && value_factor_p (pos, BITS_PER_UNIT))
1982 DECL_BIT_FIELD (field) = 0;
1985 /* Clear DECL_BIT_FIELD_TYPE for a variant part at offset 0, it's simply
1986 not supported by the DECL_BIT_FIELD_REPRESENTATIVE machinery because
1987 the variant part is always the last field in the list. */
1988 if (variant_part_at_zero)
1989 DECL_BIT_FIELD_TYPE (field) = NULL_TREE;
1991 /* If we still have DECL_BIT_FIELD set at this point, we know that the
1992 field is technically not addressable. Except that it can actually
1993 be addressed if it is BLKmode and happens to be properly aligned. */
1994 if (DECL_BIT_FIELD (field)
1995 && !(DECL_MODE (field) == BLKmode
1996 && value_factor_p (pos, BITS_PER_UNIT)))
1997 DECL_NONADDRESSABLE_P (field) = 1;
1999 /* A type must be as aligned as its most aligned field that is not
2000 a bit-field. But this is already enforced by layout_type. */
2001 if (rep_level > 0 && !DECL_BIT_FIELD (field))
2002 SET_TYPE_ALIGN (record_type,
2003 MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field)));
2008 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
2009 size = size_binop (MAX_EXPR, size, this_size);
2012 case QUAL_UNION_TYPE:
2014 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
2015 this_ada_size, ada_size);
2016 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
2021 /* Since we know here that all fields are sorted in order of
2022 increasing bit position, the size of the record is one
2023 higher than the ending bit of the last field processed,
2024 unless we have a variant part at offset 0, since in this
2025 case we might have a field outside the variant part that
2026 has a higher ending position; so use a MAX in this case.
2027 Also, if this field is a QUAL_UNION_TYPE, we need to take
2028 into account the previous size in the case of empty variants. */
2030 = merge_sizes (ada_size, pos, this_ada_size, variant_part,
2031 variant_part_at_zero);
2033 = merge_sizes (size, pos, this_size, variant_part,
2034 variant_part_at_zero);
2042 if (code == QUAL_UNION_TYPE)
2043 nreverse (field_list);
2045 /* We need to set the regular sizes if REP_LEVEL is one. */
2048 /* If this is a padding record, we never want to make the size smaller
2049 than what was specified in it, if any. */
2050 if (TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
2051 size = TYPE_SIZE (record_type);
2053 tree size_unit = had_size_unit
2054 ? TYPE_SIZE_UNIT (record_type)
2055 : convert (sizetype,
2056 size_binop (CEIL_DIV_EXPR, size,
2057 bitsize_unit_node));
2058 const unsigned int align = TYPE_ALIGN (record_type);
2060 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
2061 TYPE_SIZE_UNIT (record_type)
2062 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
2065 /* We need to set the Ada size if REP_LEVEL is zero or one. */
2068 /* Now set any of the values we've just computed that apply. */
2069 if (!TYPE_FAT_POINTER_P (record_type)
2070 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
2071 SET_TYPE_ADA_SIZE (record_type, ada_size);
2074 /* We need to set the mode if REP_LEVEL is one or two. */
2077 compute_record_mode (record_type);
2078 finish_bitfield_layout (record_type);
2081 /* Reset the TYPE_MAX_ALIGN field since it's private to gigi. */
2082 TYPE_MAX_ALIGN (record_type) = 0;
2085 rest_of_record_type_compilation (record_type);
2088 /* Append PARALLEL_TYPE on the chain of parallel types of TYPE. If
2089 PARRALEL_TYPE has no context and its computation is not deferred yet, also
2090 propagate TYPE's context to PARALLEL_TYPE's or defer its propagation to the
2091 moment TYPE will get a context. */
2094 add_parallel_type (tree type, tree parallel_type)
2096 tree decl = TYPE_STUB_DECL (type);
2098 while (DECL_PARALLEL_TYPE (decl))
2099 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
2101 SET_DECL_PARALLEL_TYPE (decl, parallel_type);
2103 /* If PARALLEL_TYPE already has a context, we are done. */
2104 if (TYPE_CONTEXT (parallel_type))
2107 /* Otherwise, try to get one from TYPE's context. If so, simply propagate
2108 it to PARALLEL_TYPE. */
2109 if (TYPE_CONTEXT (type))
2110 gnat_set_type_context (parallel_type, TYPE_CONTEXT (type));
2112 /* Otherwise TYPE has not context yet. We know it will have one thanks to
2113 gnat_pushdecl and then its context will be propagated to PARALLEL_TYPE,
2114 so we have nothing to do in this case. */
2117 /* Return true if TYPE has a parallel type. */
2120 has_parallel_type (tree type)
2122 tree decl = TYPE_STUB_DECL (type);
2124 return DECL_PARALLEL_TYPE (decl) != NULL_TREE;
2127 /* Wrap up compilation of RECORD_TYPE, i.e. output additional debug info
2128 associated with it. It need not be invoked directly in most cases as
2129 finish_record_type takes care of doing so. */
2132 rest_of_record_type_compilation (tree record_type)
2134 bool var_size = false;
2137 /* If this is a padded type, the bulk of the debug info has already been
2138 generated for the field's type. */
2139 if (TYPE_IS_PADDING_P (record_type))
2142 /* If the type already has a parallel type (XVS type), then we're done. */
2143 if (has_parallel_type (record_type))
2146 for (field = TYPE_FIELDS (record_type); field; field = DECL_CHAIN (field))
2148 /* We need to make an XVE/XVU record if any field has variable size,
2149 whether or not the record does. For example, if we have a union,
2150 it may be that all fields, rounded up to the alignment, have the
2151 same size, in which case we'll use that size. But the debug
2152 output routines (except Dwarf2) won't be able to output the fields,
2153 so we need to make the special record. */
2154 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
2155 /* If a field has a non-constant qualifier, the record will have
2156 variable size too. */
2157 || (TREE_CODE (record_type) == QUAL_UNION_TYPE
2158 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
2165 /* If this record type is of variable size, make a parallel record type that
2166 will tell the debugger how the former is laid out (see exp_dbug.ads). */
2167 if (var_size && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
2169 tree new_record_type
2170 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
2171 ? UNION_TYPE : TREE_CODE (record_type));
2172 tree orig_name = TYPE_IDENTIFIER (record_type), new_name;
2173 tree last_pos = bitsize_zero_node;
2174 tree old_field, prev_old_field = NULL_TREE;
2177 = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
2179 TYPE_NAME (new_record_type) = new_name;
2180 SET_TYPE_ALIGN (new_record_type, BIGGEST_ALIGNMENT);
2181 TYPE_STUB_DECL (new_record_type)
2182 = create_type_stub_decl (new_name, new_record_type);
2183 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
2184 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
2185 gnat_pushdecl (TYPE_STUB_DECL (new_record_type), Empty);
2186 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
2187 TYPE_SIZE_UNIT (new_record_type)
2188 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
2190 /* Now scan all the fields, replacing each field with a new field
2191 corresponding to the new encoding. */
2192 for (old_field = TYPE_FIELDS (record_type); old_field;
2193 old_field = DECL_CHAIN (old_field))
2195 tree field_type = TREE_TYPE (old_field);
2196 tree field_name = DECL_NAME (old_field);
2197 tree curpos = fold_bit_position (old_field);
2198 tree pos, new_field;
2200 unsigned int align = 0;
2202 /* See how the position was modified from the last position.
2204 There are two basic cases we support: a value was added
2205 to the last position or the last position was rounded to
2206 a boundary and they something was added. Check for the
2207 first case first. If not, see if there is any evidence
2208 of rounding. If so, round the last position and retry.
2210 If this is a union, the position can be taken as zero. */
2211 if (TREE_CODE (new_record_type) == UNION_TYPE)
2212 pos = bitsize_zero_node;
2214 pos = compute_related_constant (curpos, last_pos);
2217 && TREE_CODE (curpos) == MULT_EXPR
2218 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1)))
2220 tree offset = TREE_OPERAND (curpos, 0);
2221 align = tree_to_uhwi (TREE_OPERAND (curpos, 1));
2222 align = scale_by_factor_of (offset, align);
2223 last_pos = round_up (last_pos, align);
2224 pos = compute_related_constant (curpos, last_pos);
2227 && TREE_CODE (curpos) == PLUS_EXPR
2228 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1))
2229 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
2231 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1)))
2233 tree offset = TREE_OPERAND (TREE_OPERAND (curpos, 0), 0);
2234 unsigned HOST_WIDE_INT addend
2235 = tree_to_uhwi (TREE_OPERAND (curpos, 1));
2237 = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1));
2238 align = scale_by_factor_of (offset, align);
2239 align = MIN (align, addend & -addend);
2240 last_pos = round_up (last_pos, align);
2241 pos = compute_related_constant (curpos, last_pos);
2243 else if (potential_alignment_gap (prev_old_field, old_field, pos))
2245 align = TYPE_ALIGN (field_type);
2246 last_pos = round_up (last_pos, align);
2247 pos = compute_related_constant (curpos, last_pos);
2250 /* If we can't compute a position, set it to zero.
2252 ??? We really should abort here, but it's too much work
2253 to get this correct for all cases. */
2255 pos = bitsize_zero_node;
2257 /* See if this type is variable-sized and make a pointer type
2258 and indicate the indirection if so. Beware that the debug
2259 back-end may adjust the position computed above according
2260 to the alignment of the field type, i.e. the pointer type
2261 in this case, if we don't preventively counter that. */
2262 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
2264 field_type = build_pointer_type (field_type);
2265 if (align != 0 && TYPE_ALIGN (field_type) > align)
2267 field_type = copy_type (field_type);
2268 SET_TYPE_ALIGN (field_type, align);
2273 /* Make a new field name, if necessary. */
2274 if (var || align != 0)
2279 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
2280 align / BITS_PER_UNIT);
2282 strcpy (suffix, "XVL");
2284 field_name = concat_name (field_name, suffix);
2288 = create_field_decl (field_name, field_type, new_record_type,
2289 DECL_SIZE (old_field), pos, 0, 0);
2290 DECL_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
2291 TYPE_FIELDS (new_record_type) = new_field;
2293 /* If old_field is a QUAL_UNION_TYPE, take its size as being
2294 zero. The only time it's not the last field of the record
2295 is when there are other components at fixed positions after
2296 it (meaning there was a rep clause for every field) and we
2297 want to be able to encode them. */
2298 last_pos = size_binop (PLUS_EXPR, curpos,
2299 (TREE_CODE (TREE_TYPE (old_field))
2302 : DECL_SIZE (old_field));
2303 prev_old_field = old_field;
2306 TYPE_FIELDS (new_record_type) = nreverse (TYPE_FIELDS (new_record_type));
2308 add_parallel_type (record_type, new_record_type);
2312 /* Utility function of above to merge LAST_SIZE, the previous size of a record
2313 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
2314 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
2315 replace a value of zero with the old size. If MAX is true, we take the
2316 MAX of the end position of this field with LAST_SIZE. In all other cases,
2317 we use FIRST_BIT plus SIZE. Return an expression for the size. */
2320 merge_sizes (tree last_size, tree first_bit, tree size, bool special, bool max)
2322 tree type = TREE_TYPE (last_size);
2325 if (!special || TREE_CODE (size) != COND_EXPR)
2327 new_size = size_binop (PLUS_EXPR, first_bit, size);
2329 new_size = size_binop (MAX_EXPR, last_size, new_size);
2333 new_size = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
2334 integer_zerop (TREE_OPERAND (size, 1))
2335 ? last_size : merge_sizes (last_size, first_bit,
2336 TREE_OPERAND (size, 1),
2338 integer_zerop (TREE_OPERAND (size, 2))
2339 ? last_size : merge_sizes (last_size, first_bit,
2340 TREE_OPERAND (size, 2),
2343 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
2344 when fed through SUBSTITUTE_IN_EXPR) into thinking that a constant
2345 size is not constant. */
2346 while (TREE_CODE (new_size) == NON_LVALUE_EXPR)
2347 new_size = TREE_OPERAND (new_size, 0);
2352 /* Return the bit position of FIELD, in bits from the start of the record,
2353 and fold it as much as possible. This is a tree of type bitsizetype. */
2356 fold_bit_position (const_tree field)
2358 tree offset = DECL_FIELD_OFFSET (field);
2359 if (TREE_CODE (offset) == MULT_EXPR || TREE_CODE (offset) == PLUS_EXPR)
2360 offset = size_binop (TREE_CODE (offset),
2361 fold_convert (bitsizetype, TREE_OPERAND (offset, 0)),
2362 fold_convert (bitsizetype, TREE_OPERAND (offset, 1)));
2364 offset = fold_convert (bitsizetype, offset);
2365 return size_binop (PLUS_EXPR, DECL_FIELD_BIT_OFFSET (field),
2366 size_binop (MULT_EXPR, offset, bitsize_unit_node));
2369 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
2370 related by the addition of a constant. Return that constant if so. */
2373 compute_related_constant (tree op0, tree op1)
2375 tree factor, op0_var, op1_var, op0_cst, op1_cst, result;
2377 if (TREE_CODE (op0) == MULT_EXPR
2378 && TREE_CODE (op1) == MULT_EXPR
2379 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST
2380 && TREE_OPERAND (op1, 1) == TREE_OPERAND (op0, 1))
2382 factor = TREE_OPERAND (op0, 1);
2383 op0 = TREE_OPERAND (op0, 0);
2384 op1 = TREE_OPERAND (op1, 0);
2389 op0_cst = split_plus (op0, &op0_var);
2390 op1_cst = split_plus (op1, &op1_var);
2391 result = size_binop (MINUS_EXPR, op0_cst, op1_cst);
2393 if (operand_equal_p (op0_var, op1_var, 0))
2394 return factor ? size_binop (MULT_EXPR, factor, result) : result;
2399 /* Utility function of above to split a tree OP which may be a sum, into a
2400 constant part, which is returned, and a variable part, which is stored
2401 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
2405 split_plus (tree in, tree *pvar)
2407 /* Strip conversions in order to ease the tree traversal and maximize the
2408 potential for constant or plus/minus discovery. We need to be careful
2409 to always return and set *pvar to bitsizetype trees, but it's worth
2411 in = remove_conversions (in, false);
2413 *pvar = convert (bitsizetype, in);
2415 if (TREE_CODE (in) == INTEGER_CST)
2417 *pvar = bitsize_zero_node;
2418 return convert (bitsizetype, in);
2420 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
2422 tree lhs_var, rhs_var;
2423 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
2424 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
2426 if (lhs_var == TREE_OPERAND (in, 0)
2427 && rhs_var == TREE_OPERAND (in, 1))
2428 return bitsize_zero_node;
2430 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
2431 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
2434 return bitsize_zero_node;
2437 /* Return a copy of TYPE but safe to modify in any way. */
2440 copy_type (tree type)
2442 tree new_type = copy_node (type);
2444 /* Unshare the language-specific data. */
2445 if (TYPE_LANG_SPECIFIC (type))
2447 TYPE_LANG_SPECIFIC (new_type) = NULL;
2448 SET_TYPE_LANG_SPECIFIC (new_type, GET_TYPE_LANG_SPECIFIC (type));
2451 /* And the contents of the language-specific slot if needed. */
2452 if ((INTEGRAL_TYPE_P (type) || TREE_CODE (type) == REAL_TYPE)
2453 && TYPE_RM_VALUES (type))
2455 TYPE_RM_VALUES (new_type) = NULL_TREE;
2456 SET_TYPE_RM_SIZE (new_type, TYPE_RM_SIZE (type));
2457 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_RM_MIN_VALUE (type));
2458 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_RM_MAX_VALUE (type));
2461 /* copy_node clears this field instead of copying it, because it is
2462 aliased with TREE_CHAIN. */
2463 TYPE_STUB_DECL (new_type) = TYPE_STUB_DECL (type);
2465 TYPE_POINTER_TO (new_type) = NULL_TREE;
2466 TYPE_REFERENCE_TO (new_type) = NULL_TREE;
2467 TYPE_MAIN_VARIANT (new_type) = new_type;
2468 TYPE_NEXT_VARIANT (new_type) = NULL_TREE;
2469 TYPE_CANONICAL (new_type) = new_type;
2474 /* Return a subtype of sizetype with range MIN to MAX and whose
2475 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
2476 of the associated TYPE_DECL. */
2479 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
2481 /* First build a type for the desired range. */
2482 tree type = build_nonshared_range_type (sizetype, min, max);
2484 /* Then set the index type. */
2485 SET_TYPE_INDEX_TYPE (type, index);
2486 create_type_decl (NULL_TREE, type, true, false, gnat_node);
2491 /* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
2492 sizetype is used. */
2495 create_range_type (tree type, tree min, tree max)
2502 /* First build a type with the base range. */
2503 range_type = build_nonshared_range_type (type, TYPE_MIN_VALUE (type),
2504 TYPE_MAX_VALUE (type));
2506 /* Then set the actual range. */
2507 SET_TYPE_RM_MIN_VALUE (range_type, min);
2508 SET_TYPE_RM_MAX_VALUE (range_type, max);
2513 \f/* Return an extra subtype of TYPE with range MIN to MAX. */
2516 create_extra_subtype (tree type, tree min, tree max)
2518 const bool uns = TYPE_UNSIGNED (type);
2519 const unsigned prec = TYPE_PRECISION (type);
2520 tree subtype = uns ? make_unsigned_type (prec) : make_signed_type (prec);
2522 TREE_TYPE (subtype) = type;
2523 TYPE_EXTRA_SUBTYPE_P (subtype) = 1;
2525 SET_TYPE_RM_MIN_VALUE (subtype, min);
2526 SET_TYPE_RM_MAX_VALUE (subtype, max);
2531 /* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of TYPE.
2532 NAME gives the name of the type to be used in the declaration. */
2535 create_type_stub_decl (tree name, tree type)
2537 tree type_decl = build_decl (input_location, TYPE_DECL, name, type);
2538 DECL_ARTIFICIAL (type_decl) = 1;
2539 TYPE_ARTIFICIAL (type) = 1;
2543 /* Return a TYPE_DECL node for TYPE. NAME gives the name of the type to be
2544 used in the declaration. ARTIFICIAL_P is true if the declaration was
2545 generated by the compiler. DEBUG_INFO_P is true if we need to write
2546 debug information about this type. GNAT_NODE is used for the position
2550 create_type_decl (tree name, tree type, bool artificial_p, bool debug_info_p,
2553 enum tree_code code = TREE_CODE (type);
2555 = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
2558 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
2559 gcc_assert (!TYPE_IS_DUMMY_P (type));
2561 /* If the type hasn't been named yet, we're naming it; preserve an existing
2562 TYPE_STUB_DECL that has been attached to it for some purpose. */
2563 if (!is_named && TYPE_STUB_DECL (type))
2565 type_decl = TYPE_STUB_DECL (type);
2566 DECL_NAME (type_decl) = name;
2569 type_decl = build_decl (input_location, TYPE_DECL, name, type);
2571 DECL_ARTIFICIAL (type_decl) = artificial_p;
2572 TYPE_ARTIFICIAL (type) = artificial_p;
2574 /* Add this decl to the current binding level. */
2575 gnat_pushdecl (type_decl, gnat_node);
2577 /* If we're naming the type, equate the TYPE_STUB_DECL to the name. This
2578 causes the name to be also viewed as a "tag" by the debug back-end, with
2579 the advantage that no DW_TAG_typedef is emitted for artificial "tagged"
2582 Note that if "type" is used as a DECL_ORIGINAL_TYPE, it may be referenced
2583 from multiple contexts, and "type_decl" references a copy of it: in such a
2584 case, do not mess TYPE_STUB_DECL: we do not want to re-use the TYPE_DECL
2585 with the mechanism above. */
2586 if (!is_named && type != DECL_ORIGINAL_TYPE (type_decl))
2587 TYPE_STUB_DECL (type) = type_decl;
2589 /* Do not generate debug info for UNCONSTRAINED_ARRAY_TYPE that the
2590 back-end doesn't support, and for others if we don't need to. */
2591 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
2592 DECL_IGNORED_P (type_decl) = 1;
2597 /* Return a VAR_DECL or CONST_DECL node.
2599 NAME gives the name of the variable. ASM_NAME is its assembler name
2600 (if provided). TYPE is its data type (a GCC ..._TYPE node). INIT is
2601 the GCC tree for an optional initial expression; NULL_TREE if none.
2603 CONST_FLAG is true if this variable is constant, in which case we might
2604 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
2606 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
2607 definition to be made visible outside of the current compilation unit, for
2608 instance variable definitions in a package specification.
2610 EXTERN_FLAG is true when processing an external variable declaration (as
2611 opposed to a definition: no storage is to be allocated for the variable).
2613 STATIC_FLAG is only relevant when not at top level and indicates whether
2614 to always allocate storage to the variable.
2616 VOLATILE_FLAG is true if this variable is declared as volatile.
2618 ARTIFICIAL_P is true if the variable was generated by the compiler.
2620 DEBUG_INFO_P is true if we need to write debug information for it.
2622 ATTR_LIST is the list of attributes to be attached to the variable.
2624 GNAT_NODE is used for the position of the decl. */
2627 create_var_decl (tree name, tree asm_name, tree type, tree init,
2628 bool const_flag, bool public_flag, bool extern_flag,
2629 bool static_flag, bool volatile_flag, bool artificial_p,
2630 bool debug_info_p, struct attrib *attr_list,
2631 Node_Id gnat_node, bool const_decl_allowed_p)
2633 /* Whether the object has static storage duration, either explicitly or by
2634 virtue of being declared at the global level. */
2635 const bool static_storage = static_flag || global_bindings_p ();
2637 /* Whether the initializer is constant: for an external object or an object
2638 with static storage duration, we check that the initializer is a valid
2639 constant expression for initializing a static variable; otherwise, we
2640 only check that it is constant. */
2641 const bool init_const
2643 && gnat_types_compatible_p (type, TREE_TYPE (init))
2644 && (extern_flag || static_storage
2645 ? initializer_constant_valid_p (init, TREE_TYPE (init))
2647 : TREE_CONSTANT (init)));
2649 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
2650 case the initializer may be used in lieu of the DECL node (as done in
2651 Identifier_to_gnu). This is useful to prevent the need of elaboration
2652 code when an identifier for which such a DECL is made is in turn used
2653 as an initializer. We used to rely on CONST_DECL vs VAR_DECL for this,
2654 but extra constraints apply to this choice (see below) and they are not
2655 relevant to the distinction we wish to make. */
2656 const bool constant_p = const_flag && init_const;
2658 /* The actual DECL node. CONST_DECL was initially intended for enumerals
2659 and may be used for scalars in general but not for aggregates. */
2661 = build_decl (input_location,
2663 && const_decl_allowed_p
2664 && !AGGREGATE_TYPE_P (type) ? CONST_DECL : VAR_DECL),
2667 /* Detect constants created by the front-end to hold 'reference to function
2668 calls for stabilization purposes. This is needed for renaming. */
2669 if (const_flag && init && POINTER_TYPE_P (type))
2672 if (TREE_CODE (inner) == COMPOUND_EXPR)
2673 inner = TREE_OPERAND (inner, 1);
2674 inner = remove_conversions (inner, true);
2675 if (TREE_CODE (inner) == ADDR_EXPR
2676 && ((TREE_CODE (TREE_OPERAND (inner, 0)) == CALL_EXPR
2677 && !call_is_atomic_load (TREE_OPERAND (inner, 0)))
2678 || (TREE_CODE (TREE_OPERAND (inner, 0)) == VAR_DECL
2679 && DECL_RETURN_VALUE_P (TREE_OPERAND (inner, 0)))))
2680 DECL_RETURN_VALUE_P (var_decl) = 1;
2683 /* If this is external, throw away any initializations (they will be done
2684 elsewhere) unless this is a constant for which we would like to remain
2685 able to get the initializer. If we are defining a global here, leave a
2686 constant initialization and save any variable elaborations for the
2687 elaboration routine. If we are just annotating types, throw away the
2688 initialization if it isn't a constant. */
2689 if ((extern_flag && !constant_p)
2690 || (type_annotate_only && init && !TREE_CONSTANT (init)))
2693 /* At the global level, a non-constant initializer generates elaboration
2694 statements. Check that such statements are allowed, that is to say,
2695 not violating a No_Elaboration_Code restriction. */
2696 if (init && !init_const && global_bindings_p ())
2697 Check_Elaboration_Code_Allowed (gnat_node);
2699 /* Attach the initializer, if any. */
2700 DECL_INITIAL (var_decl) = init;
2702 /* Directly set some flags. */
2703 DECL_ARTIFICIAL (var_decl) = artificial_p;
2704 DECL_EXTERNAL (var_decl) = extern_flag;
2706 TREE_CONSTANT (var_decl) = constant_p;
2707 TREE_READONLY (var_decl) = const_flag;
2709 /* The object is public if it is external or if it is declared public
2710 and has static storage duration. */
2711 TREE_PUBLIC (var_decl) = extern_flag || (public_flag && static_storage);
2713 /* We need to allocate static storage for an object with static storage
2714 duration if it isn't external. */
2715 TREE_STATIC (var_decl) = !extern_flag && static_storage;
2717 TREE_SIDE_EFFECTS (var_decl)
2718 = TREE_THIS_VOLATILE (var_decl)
2719 = TYPE_VOLATILE (type) | volatile_flag;
2721 if (TREE_SIDE_EFFECTS (var_decl))
2722 TREE_ADDRESSABLE (var_decl) = 1;
2724 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
2725 try to fiddle with DECL_COMMON. However, on platforms that don't
2726 support global BSS sections, uninitialized global variables would
2727 go in DATA instead, thus increasing the size of the executable. */
2729 && TREE_CODE (var_decl) == VAR_DECL
2730 && TREE_PUBLIC (var_decl)
2731 && !have_global_bss_p ())
2732 DECL_COMMON (var_decl) = 1;
2734 /* Do not emit debug info if not requested, or for an external constant whose
2735 initializer is not absolute because this would require a global relocation
2736 in a read-only section which runs afoul of the PE-COFF run-time relocation
2742 && initializer_constant_valid_p (init, TREE_TYPE (init))
2743 != null_pointer_node))
2744 DECL_IGNORED_P (var_decl) = 1;
2746 /* ??? Some attributes cannot be applied to CONST_DECLs. */
2747 if (TREE_CODE (var_decl) == VAR_DECL)
2748 process_attributes (&var_decl, &attr_list, true, gnat_node);
2750 /* Add this decl to the current binding level. */
2751 gnat_pushdecl (var_decl, gnat_node);
2753 if (TREE_CODE (var_decl) == VAR_DECL && asm_name)
2755 /* Let the target mangle the name if this isn't a verbatim asm. */
2756 if (*IDENTIFIER_POINTER (asm_name) != '*')
2757 asm_name = targetm.mangle_decl_assembler_name (var_decl, asm_name);
2759 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
2765 /* Return true if TYPE, an aggregate type, contains (or is) an array.
2766 If SELF_REFERENTIAL is true, then an additional requirement on the
2767 array is that it be self-referential. */
2770 aggregate_type_contains_array_p (tree type, bool self_referential)
2772 switch (TREE_CODE (type))
2776 case QUAL_UNION_TYPE:
2779 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
2780 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
2781 && aggregate_type_contains_array_p (TREE_TYPE (field),
2788 return self_referential ? type_contains_placeholder_p (type) : true;
2795 /* Return a FIELD_DECL node. NAME is the field's name, TYPE is its type and
2796 RECORD_TYPE is the type of the enclosing record. If SIZE is nonzero, it
2797 is the specified size of the field. If POS is nonzero, it is the bit
2798 position. PACKED is 1 if the enclosing record is packed, -1 if it has
2799 Component_Alignment of Storage_Unit. If ADDRESSABLE is nonzero, it
2800 means we are allowed to take the address of the field; if it is negative,
2801 we should not make a bitfield, which is used by make_aligning_type. */
2804 create_field_decl (tree name, tree type, tree record_type, tree size, tree pos,
2805 int packed, int addressable)
2807 tree field_decl = build_decl (input_location, FIELD_DECL, name, type);
2809 DECL_CONTEXT (field_decl) = record_type;
2810 TREE_READONLY (field_decl) = TYPE_READONLY (type);
2812 /* If a size is specified, use it. Otherwise, if the record type is packed
2813 compute a size to use, which may differ from the object's natural size.
2814 We always set a size in this case to trigger the checks for bitfield
2815 creation below, which is typically required when no position has been
2818 size = convert (bitsizetype, size);
2819 else if (packed == 1)
2821 size = rm_size (type);
2822 if (TYPE_MODE (type) == BLKmode)
2823 size = round_up (size, BITS_PER_UNIT);
2826 /* If we may, according to ADDRESSABLE, then make a bitfield when the size
2827 is specified for two reasons: first, when it differs from the natural
2828 size; second, when the alignment is insufficient.
2830 We never make a bitfield if the type of the field has a nonconstant size,
2831 because no such entity requiring bitfield operations should reach here.
2833 We do *preventively* make a bitfield when there might be the need for it
2834 but we don't have all the necessary information to decide, as is the case
2835 of a field in a packed record.
2837 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
2838 in layout_decl or finish_record_type to clear the bit_field indication if
2839 it is in fact not needed. */
2840 if (addressable >= 0
2842 && TREE_CODE (size) == INTEGER_CST
2843 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
2845 || !tree_int_cst_equal (size, TYPE_SIZE (type))
2846 || (pos && !value_factor_p (pos, TYPE_ALIGN (type)))
2847 || (TYPE_ALIGN (record_type)
2848 && TYPE_ALIGN (record_type) < TYPE_ALIGN (type))))
2850 DECL_BIT_FIELD (field_decl) = 1;
2851 DECL_SIZE (field_decl) = size;
2852 if (!packed && !pos)
2854 if (TYPE_ALIGN (record_type)
2855 && TYPE_ALIGN (record_type) < TYPE_ALIGN (type))
2856 SET_DECL_ALIGN (field_decl, TYPE_ALIGN (record_type));
2858 SET_DECL_ALIGN (field_decl, TYPE_ALIGN (type));
2862 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
2864 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
2865 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
2866 Likewise for an aggregate without specified position that contains an
2867 array, because in this case slices of variable length of this array
2868 must be handled by GCC and variable-sized objects need to be aligned
2869 to at least a byte boundary. */
2870 if (packed && (TYPE_MODE (type) == BLKmode
2872 && AGGREGATE_TYPE_P (type)
2873 && aggregate_type_contains_array_p (type, false))))
2874 SET_DECL_ALIGN (field_decl, BITS_PER_UNIT);
2876 /* Bump the alignment if need be, either for bitfield/packing purposes or
2877 to satisfy the type requirements if no such considerations apply. When
2878 we get the alignment from the type, indicate if this is from an explicit
2879 user request, which prevents stor-layout from lowering it later on. */
2882 const unsigned int field_align
2883 = DECL_BIT_FIELD (field_decl)
2889 if (field_align > DECL_ALIGN (field_decl))
2890 SET_DECL_ALIGN (field_decl, field_align);
2891 else if (!field_align && TYPE_ALIGN (type) > DECL_ALIGN (field_decl))
2893 SET_DECL_ALIGN (field_decl, TYPE_ALIGN (type));
2894 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (type);
2900 /* We need to pass in the alignment the DECL is known to have.
2901 This is the lowest-order bit set in POS, but no more than
2902 the alignment of the record, if one is specified. Note
2903 that an alignment of 0 is taken as infinite. */
2904 unsigned int known_align;
2906 if (tree_fits_uhwi_p (pos))
2907 known_align = tree_to_uhwi (pos) & - tree_to_uhwi (pos);
2909 known_align = BITS_PER_UNIT;
2911 if (TYPE_ALIGN (record_type)
2912 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
2913 known_align = TYPE_ALIGN (record_type);
2915 layout_decl (field_decl, known_align);
2916 SET_DECL_OFFSET_ALIGN (field_decl,
2917 tree_fits_uhwi_p (pos)
2918 ? BIGGEST_ALIGNMENT : BITS_PER_UNIT);
2919 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
2920 &DECL_FIELD_BIT_OFFSET (field_decl),
2921 DECL_OFFSET_ALIGN (field_decl), pos);
2924 /* In addition to what our caller says, claim the field is addressable if we
2925 know that its type is not suitable.
2927 The field may also be "technically" nonaddressable, meaning that even if
2928 we attempt to take the field's address we will actually get the address
2929 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
2930 value we have at this point is not accurate enough, so we don't account
2931 for this here and let finish_record_type decide. */
2932 if (!addressable && !type_for_nonaliased_component_p (type))
2935 /* Note that there is a trade-off in making a field nonaddressable because
2936 this will cause type-based alias analysis to use the same alias set for
2937 accesses to the field as for accesses to the whole record: while doing
2938 so will make it more likely to disambiguate accesses to other objects
2939 and accesses to the field, it will make it less likely to disambiguate
2940 accesses to the other fields of the record and accesses to the field.
2941 If the record is fully static, then the trade-off is irrelevant since
2942 the fields of the record can always be disambiguated by their offsets
2943 but, if the record is dynamic, then it can become problematic. */
2944 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
2949 /* Return a PARM_DECL node with NAME and TYPE. */
2952 create_param_decl (tree name, tree type)
2954 tree param_decl = build_decl (input_location, PARM_DECL, name, type);
2956 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
2957 can lead to various ABI violations. */
2958 if (targetm.calls.promote_prototypes (NULL_TREE)
2959 && INTEGRAL_TYPE_P (type)
2960 && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node))
2962 /* We have to be careful about biased types here. Make a subtype
2963 of integer_type_node with the proper biasing. */
2964 if (TREE_CODE (type) == INTEGER_TYPE
2965 && TYPE_BIASED_REPRESENTATION_P (type))
2968 = make_unsigned_type (TYPE_PRECISION (integer_type_node));
2969 TREE_TYPE (subtype) = integer_type_node;
2970 TYPE_BIASED_REPRESENTATION_P (subtype) = 1;
2971 SET_TYPE_RM_MIN_VALUE (subtype, TYPE_MIN_VALUE (type));
2972 SET_TYPE_RM_MAX_VALUE (subtype, TYPE_MAX_VALUE (type));
2976 type = integer_type_node;
2979 DECL_ARG_TYPE (param_decl) = type;
2983 /* Process the attributes in ATTR_LIST for NODE, which is either a DECL or
2984 a TYPE. If IN_PLACE is true, the tree pointed to by NODE should not be
2985 changed. GNAT_NODE is used for the position of error messages. */
2988 process_attributes (tree *node, struct attrib **attr_list, bool in_place,
2991 struct attrib *attr;
2993 for (attr = *attr_list; attr; attr = attr->next)
2996 case ATTR_MACHINE_ATTRIBUTE:
2997 Sloc_to_locus (Sloc (gnat_node), &input_location);
2998 decl_attributes (node, tree_cons (attr->name, attr->args, NULL_TREE),
2999 in_place ? ATTR_FLAG_TYPE_IN_PLACE : 0);
3002 case ATTR_LINK_ALIAS:
3003 if (!DECL_EXTERNAL (*node))
3005 TREE_STATIC (*node) = 1;
3006 assemble_alias (*node, attr->name);
3010 case ATTR_WEAK_EXTERNAL:
3012 declare_weak (*node);
3014 post_error ("?weak declarations not supported on this target",
3018 case ATTR_LINK_SECTION:
3019 if (targetm_common.have_named_sections)
3021 set_decl_section_name (*node, IDENTIFIER_POINTER (attr->name));
3022 DECL_COMMON (*node) = 0;
3025 post_error ("?section attributes are not supported for this target",
3029 case ATTR_LINK_CONSTRUCTOR:
3030 DECL_STATIC_CONSTRUCTOR (*node) = 1;
3031 TREE_USED (*node) = 1;
3034 case ATTR_LINK_DESTRUCTOR:
3035 DECL_STATIC_DESTRUCTOR (*node) = 1;
3036 TREE_USED (*node) = 1;
3039 case ATTR_THREAD_LOCAL_STORAGE:
3040 set_decl_tls_model (*node, decl_default_tls_model (*node));
3041 DECL_COMMON (*node) = 0;
3048 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
3052 value_factor_p (tree value, unsigned HOST_WIDE_INT factor)
3054 gcc_checking_assert (pow2p_hwi (factor));
3056 if (tree_fits_uhwi_p (value))
3057 return (tree_to_uhwi (value) & (factor - 1)) == 0;
3059 if (TREE_CODE (value) == MULT_EXPR)
3060 return (value_factor_p (TREE_OPERAND (value, 0), factor)
3061 || value_factor_p (TREE_OPERAND (value, 1), factor));
3066 /* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
3067 feed it with the elaboration of GNAT_SCOPE. */
3069 static struct deferred_decl_context_node *
3070 add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global)
3072 struct deferred_decl_context_node *new_node;
3075 = (struct deferred_decl_context_node * ) xmalloc (sizeof (*new_node));
3076 new_node->decl = decl;
3077 new_node->gnat_scope = gnat_scope;
3078 new_node->force_global = force_global;
3079 new_node->types.create (1);
3080 new_node->next = deferred_decl_context_queue;
3081 deferred_decl_context_queue = new_node;
3085 /* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
3086 feed it with the DECL_CONTEXT computed as part of N as soon as it is
3090 add_deferred_type_context (struct deferred_decl_context_node *n, tree type)
3092 n->types.safe_push (type);
3095 /* Get the GENERIC node corresponding to GNAT_SCOPE, if available. Return
3096 NULL_TREE if it is not available. */
3099 compute_deferred_decl_context (Entity_Id gnat_scope)
3103 if (present_gnu_tree (gnat_scope))
3104 context = get_gnu_tree (gnat_scope);
3108 if (TREE_CODE (context) == TYPE_DECL)
3110 const tree context_type = TREE_TYPE (context);
3112 /* Skip dummy types: only the final ones can appear in the context
3114 if (TYPE_DUMMY_P (context_type))
3117 /* ..._TYPE nodes are more useful than TYPE_DECL nodes in the context
3120 context = context_type;
3126 /* Try to process all deferred nodes in the queue. Keep in the queue the ones
3127 that cannot be processed yet, remove the other ones. If FORCE is true,
3128 force the processing for all nodes, use the global context when nodes don't
3129 have a GNU translation. */
3132 process_deferred_decl_context (bool force)
3134 struct deferred_decl_context_node **it = &deferred_decl_context_queue;
3135 struct deferred_decl_context_node *node;
3139 bool processed = false;
3140 tree context = NULL_TREE;
3141 Entity_Id gnat_scope;
3145 /* If FORCE, get the innermost elaborated scope. Otherwise, just try to
3146 get the first scope. */
3147 gnat_scope = node->gnat_scope;
3148 while (Present (gnat_scope))
3150 context = compute_deferred_decl_context (gnat_scope);
3151 if (!force || context)
3153 gnat_scope = get_debug_scope (gnat_scope, NULL);
3156 /* Imported declarations must not be in a local context (i.e. not inside
3158 if (context && node->force_global > 0)
3164 gcc_assert (TREE_CODE (ctx) != FUNCTION_DECL);
3165 ctx = DECL_P (ctx) ? DECL_CONTEXT (ctx) : TYPE_CONTEXT (ctx);
3169 /* If FORCE, we want to get rid of all nodes in the queue: in case there
3170 was no elaborated scope, use the global context. */
3171 if (force && !context)
3172 context = get_global_context ();
3179 DECL_CONTEXT (node->decl) = context;
3181 /* Propagate it to the TYPE_CONTEXT attributes of the requested
3183 FOR_EACH_VEC_ELT (node->types, i, t)
3185 gnat_set_type_context (t, context);
3190 /* If this node has been successfuly processed, remove it from the
3191 queue. Then move to the next node. */
3195 node->types.release ();
3203 /* Return VALUE scaled by the biggest power-of-2 factor of EXPR. */
3206 scale_by_factor_of (tree expr, unsigned int value)
3208 unsigned HOST_WIDE_INT addend = 0;
3209 unsigned HOST_WIDE_INT factor = 1;
3211 /* Peel conversions around EXPR and try to extract bodies from function
3212 calls: it is possible to get the scale factor from size functions. */
3213 expr = remove_conversions (expr, true);
3214 if (TREE_CODE (expr) == CALL_EXPR)
3215 expr = maybe_inline_call_in_expr (expr);
3217 /* Sometimes we get PLUS_EXPR (BIT_AND_EXPR (..., X), Y), where Y is a
3218 multiple of the scale factor we are looking for. */
3219 if (TREE_CODE (expr) == PLUS_EXPR
3220 && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
3221 && tree_fits_uhwi_p (TREE_OPERAND (expr, 1)))
3223 addend = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
3224 expr = TREE_OPERAND (expr, 0);
3227 /* An expression which is a bitwise AND with a mask has a power-of-2 factor
3228 corresponding to the number of trailing zeros of the mask. */
3229 if (TREE_CODE (expr) == BIT_AND_EXPR
3230 && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST)
3232 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
3235 while ((mask & 1) == 0 && i < HOST_BITS_PER_WIDE_INT)
3243 /* If the addend is not a multiple of the factor we found, give up. In
3244 theory we could find a smaller common factor but it's useless for our
3245 needs. This situation arises when dealing with a field F1 with no
3246 alignment requirement but that is following a field F2 with such
3247 requirements. As long as we have F2's offset, we don't need alignment
3248 information to compute F1's. */
3249 if (addend % factor != 0)
3252 return factor * value;
3255 /* Given two consecutive field decls PREV_FIELD and CURR_FIELD, return true
3256 unless we can prove these 2 fields are laid out in such a way that no gap
3257 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
3258 is the distance in bits between the end of PREV_FIELD and the starting
3259 position of CURR_FIELD. It is ignored if null. */
3262 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
3264 /* If this is the first field of the record, there cannot be any gap */
3268 /* If the previous field is a union type, then return false: The only
3269 time when such a field is not the last field of the record is when
3270 there are other components at fixed positions after it (meaning there
3271 was a rep clause for every field), in which case we don't want the
3272 alignment constraint to override them. */
3273 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
3276 /* If the distance between the end of prev_field and the beginning of
3277 curr_field is constant, then there is a gap if the value of this
3278 constant is not null. */
3279 if (offset && tree_fits_uhwi_p (offset))
3280 return !integer_zerop (offset);
3282 /* If the size and position of the previous field are constant,
3283 then check the sum of this size and position. There will be a gap
3284 iff it is not multiple of the current field alignment. */
3285 if (tree_fits_uhwi_p (DECL_SIZE (prev_field))
3286 && tree_fits_uhwi_p (bit_position (prev_field)))
3287 return ((tree_to_uhwi (bit_position (prev_field))
3288 + tree_to_uhwi (DECL_SIZE (prev_field)))
3289 % DECL_ALIGN (curr_field) != 0);
3291 /* If both the position and size of the previous field are multiples
3292 of the current field alignment, there cannot be any gap. */
3293 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
3294 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
3297 /* Fallback, return that there may be a potential gap */
3301 /* Return a LABEL_DECL with NAME. GNAT_NODE is used for the position of
3305 create_label_decl (tree name, Node_Id gnat_node)
3308 = build_decl (input_location, LABEL_DECL, name, void_type_node);
3310 SET_DECL_MODE (label_decl, VOIDmode);
3312 /* Add this decl to the current binding level. */
3313 gnat_pushdecl (label_decl, gnat_node);
3318 /* Return a FUNCTION_DECL node. NAME is the name of the subprogram, ASM_NAME
3319 its assembler name, TYPE its type (a FUNCTION_TYPE or METHOD_TYPE node),
3320 PARAM_DECL_LIST the list of its parameters (a list of PARM_DECL nodes
3321 chained through the DECL_CHAIN field).
3323 INLINE_STATUS describes the inline flags to be set on the FUNCTION_DECL.
3325 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
3326 definition to be made visible outside of the current compilation unit.
3328 EXTERN_FLAG is true when processing an external subprogram declaration.
3330 ARTIFICIAL_P is true if the subprogram was generated by the compiler.
3332 DEBUG_INFO_P is true if we need to write debug information for it.
3334 DEFINITION is true if the subprogram is to be considered as a definition.
3336 ATTR_LIST is the list of attributes to be attached to the subprogram.
3338 GNAT_NODE is used for the position of the decl. */
3341 create_subprog_decl (tree name, tree asm_name, tree type, tree param_decl_list,
3342 enum inline_status_t inline_status, bool public_flag,
3343 bool extern_flag, bool artificial_p, bool debug_info_p,
3344 bool definition, struct attrib *attr_list,
3347 tree subprog_decl = build_decl (input_location, FUNCTION_DECL, name, type);
3348 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
3350 DECL_ARTIFICIAL (subprog_decl) = artificial_p;
3351 DECL_EXTERNAL (subprog_decl) = extern_flag;
3352 DECL_FUNCTION_IS_DEF (subprog_decl) = definition;
3353 DECL_IGNORED_P (subprog_decl) = !debug_info_p;
3354 TREE_PUBLIC (subprog_decl) = public_flag;
3356 switch (inline_status)
3359 DECL_UNINLINABLE (subprog_decl) = 1;
3366 if (Back_End_Inlining)
3368 decl_attributes (&subprog_decl,
3369 tree_cons (get_identifier ("always_inline"),
3370 NULL_TREE, NULL_TREE),
3371 ATTR_FLAG_TYPE_IN_PLACE);
3373 /* Inline_Always guarantees that every direct call is inlined and
3374 that there is no indirect reference to the subprogram, so the
3375 instance in the original package (as well as its clones in the
3376 client packages created for inter-unit inlining) can be made
3377 private, which causes the out-of-line body to be eliminated. */
3378 TREE_PUBLIC (subprog_decl) = 0;
3381 /* ... fall through ... */
3384 DECL_DISREGARD_INLINE_LIMITS (subprog_decl) = 1;
3386 /* ... fall through ... */
3389 DECL_DECLARED_INLINE_P (subprog_decl) = 1;
3390 if (!Debug_Generated_Code)
3391 DECL_NO_INLINE_WARNING_P (subprog_decl) = artificial_p;
3398 process_attributes (&subprog_decl, &attr_list, true, gnat_node);
3400 /* Once everything is processed, finish the subprogram declaration. */
3401 finish_subprog_decl (subprog_decl, asm_name, type);
3403 /* Add this decl to the current binding level. */
3404 gnat_pushdecl (subprog_decl, gnat_node);
3406 /* Output the assembler code and/or RTL for the declaration. */
3407 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
3409 return subprog_decl;
3412 /* Given a subprogram declaration DECL, its assembler name and its type,
3413 finish constructing the subprogram declaration from ASM_NAME and TYPE. */
3416 finish_subprog_decl (tree decl, tree asm_name, tree type)
3419 = build_decl (DECL_SOURCE_LOCATION (decl), RESULT_DECL, NULL_TREE,
3422 DECL_ARTIFICIAL (result_decl) = 1;
3423 DECL_IGNORED_P (result_decl) = 1;
3424 DECL_CONTEXT (result_decl) = decl;
3425 DECL_BY_REFERENCE (result_decl) = TREE_ADDRESSABLE (type);
3426 DECL_RESULT (decl) = result_decl;
3428 /* Propagate the "const" property. */
3429 TREE_READONLY (decl) = TYPE_READONLY (type);
3431 /* Propagate the "pure" property. */
3432 DECL_PURE_P (decl) = TYPE_RESTRICT (type);
3434 /* Propagate the "noreturn" property. */
3435 TREE_THIS_VOLATILE (decl) = TYPE_VOLATILE (type);
3439 /* Let the target mangle the name if this isn't a verbatim asm. */
3440 if (*IDENTIFIER_POINTER (asm_name) != '*')
3441 asm_name = targetm.mangle_decl_assembler_name (decl, asm_name);
3443 SET_DECL_ASSEMBLER_NAME (decl, asm_name);
3445 /* The expand_main_function circuitry expects "main_identifier_node" to
3446 designate the DECL_NAME of the 'main' entry point, in turn expected
3447 to be declared as the "main" function literally by default. Ada
3448 program entry points are typically declared with a different name
3449 within the binder generated file, exported as 'main' to satisfy the
3450 system expectations. Force main_identifier_node in this case. */
3451 if (asm_name == main_identifier_node)
3452 DECL_NAME (decl) = main_identifier_node;
3456 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
3457 body. This routine needs to be invoked before processing the declarations
3458 appearing in the subprogram. */
3461 begin_subprog_body (tree subprog_decl)
3465 announce_function (subprog_decl);
3467 /* This function is being defined. */
3468 TREE_STATIC (subprog_decl) = 1;
3470 /* The failure of this assertion will likely come from a wrong context for
3471 the subprogram body, e.g. another procedure for a procedure declared at
3473 gcc_assert (current_function_decl == decl_function_context (subprog_decl));
3475 current_function_decl = subprog_decl;
3477 /* Enter a new binding level and show that all the parameters belong to
3481 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
3482 param_decl = DECL_CHAIN (param_decl))
3483 DECL_CONTEXT (param_decl) = subprog_decl;
3486 /* Finish translating the current subprogram and set its BODY. */
3489 end_subprog_body (tree body)
3491 tree fndecl = current_function_decl;
3493 /* Attach the BLOCK for this level to the function and pop the level. */
3494 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
3495 DECL_INITIAL (fndecl) = current_binding_level->block;
3498 /* The body should be a BIND_EXPR whose BLOCK is the top-level one. */
3499 if (TREE_CODE (body) == BIND_EXPR)
3501 BLOCK_SUPERCONTEXT (BIND_EXPR_BLOCK (body)) = fndecl;
3502 DECL_INITIAL (fndecl) = BIND_EXPR_BLOCK (body);
3505 DECL_SAVED_TREE (fndecl) = body;
3507 current_function_decl = decl_function_context (fndecl);
3510 /* Wrap up compilation of SUBPROG_DECL, a subprogram body. */
3513 rest_of_subprog_body_compilation (tree subprog_decl)
3515 /* We cannot track the location of errors past this point. */
3516 Current_Error_Node = Empty;
3518 /* If we're only annotating types, don't actually compile this function. */
3519 if (type_annotate_only)
3522 /* Dump functions before gimplification. */
3523 dump_function (TDI_original, subprog_decl);
3525 if (!decl_function_context (subprog_decl))
3526 cgraph_node::finalize_function (subprog_decl, false);
3528 /* Register this function with cgraph just far enough to get it
3529 added to our parent's nested function list. */
3530 (void) cgraph_node::get_create (subprog_decl);
3534 gnat_builtin_function (tree decl)
3536 gnat_pushdecl (decl, Empty);
3540 /* Return an integer type with the number of bits of precision given by
3541 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
3542 it is a signed type. */
3545 gnat_type_for_size (unsigned precision, int unsignedp)
3550 if (precision <= 2 * MAX_BITS_PER_WORD
3551 && signed_and_unsigned_types[precision][unsignedp])
3552 return signed_and_unsigned_types[precision][unsignedp];
3555 t = make_unsigned_type (precision);
3557 t = make_signed_type (precision);
3558 TYPE_ARTIFICIAL (t) = 1;
3560 if (precision <= 2 * MAX_BITS_PER_WORD)
3561 signed_and_unsigned_types[precision][unsignedp] = t;
3565 sprintf (type_name, "%sSIGNED_%u", unsignedp ? "UN" : "", precision);
3566 TYPE_NAME (t) = get_identifier (type_name);
3572 /* Likewise for floating-point types. */
3575 float_type_for_precision (int precision, machine_mode mode)
3580 if (float_types[(int) mode])
3581 return float_types[(int) mode];
3583 float_types[(int) mode] = t = make_node (REAL_TYPE);
3584 TYPE_PRECISION (t) = precision;
3587 gcc_assert (TYPE_MODE (t) == mode);
3590 sprintf (type_name, "FLOAT_%d", precision);
3591 TYPE_NAME (t) = get_identifier (type_name);
3597 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
3598 an unsigned type; otherwise a signed type is returned. */
3601 gnat_type_for_mode (machine_mode mode, int unsignedp)
3603 if (mode == BLKmode)
3606 if (mode == VOIDmode)
3607 return void_type_node;
3609 if (COMPLEX_MODE_P (mode))
3612 scalar_float_mode float_mode;
3613 if (is_a <scalar_float_mode> (mode, &float_mode))
3614 return float_type_for_precision (GET_MODE_PRECISION (float_mode),
3617 scalar_int_mode int_mode;
3618 if (is_a <scalar_int_mode> (mode, &int_mode))
3619 return gnat_type_for_size (GET_MODE_BITSIZE (int_mode), unsignedp);
3621 if (VECTOR_MODE_P (mode))
3623 machine_mode inner_mode = GET_MODE_INNER (mode);
3624 tree inner_type = gnat_type_for_mode (inner_mode, unsignedp);
3626 return build_vector_type_for_mode (inner_type, mode);
3632 /* Return the signed or unsigned version of TYPE_NODE, a scalar type, the
3633 signedness being specified by UNSIGNEDP. */
3636 gnat_signed_or_unsigned_type_for (int unsignedp, tree type_node)
3638 if (type_node == char_type_node)
3639 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
3641 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), unsignedp);
3643 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
3645 type = copy_type (type);
3646 TREE_TYPE (type) = type_node;
3648 else if (TREE_TYPE (type_node)
3649 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
3650 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
3652 type = copy_type (type);
3653 TREE_TYPE (type) = TREE_TYPE (type_node);
3659 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
3660 transparently converted to each other. */
3663 gnat_types_compatible_p (tree t1, tree t2)
3665 enum tree_code code;
3667 /* This is the default criterion. */
3668 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
3671 /* We only check structural equivalence here. */
3672 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
3675 /* Vector types are also compatible if they have the same number of subparts
3676 and the same form of (scalar) element type. */
3677 if (code == VECTOR_TYPE
3678 && known_eq (TYPE_VECTOR_SUBPARTS (t1), TYPE_VECTOR_SUBPARTS (t2))
3679 && TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2))
3680 && TYPE_PRECISION (TREE_TYPE (t1)) == TYPE_PRECISION (TREE_TYPE (t2)))
3683 /* Array types are also compatible if they are constrained and have the same
3684 domain(s), the same component type and the same scalar storage order. */
3685 if (code == ARRAY_TYPE
3686 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
3687 || (TYPE_DOMAIN (t1)
3689 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
3690 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
3691 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
3692 TYPE_MAX_VALUE (TYPE_DOMAIN (t2)))))
3693 && (TREE_TYPE (t1) == TREE_TYPE (t2)
3694 || (TREE_CODE (TREE_TYPE (t1)) == ARRAY_TYPE
3695 && gnat_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2))))
3696 && TYPE_REVERSE_STORAGE_ORDER (t1) == TYPE_REVERSE_STORAGE_ORDER (t2))
3702 /* Return true if EXPR is a useless type conversion. */
3705 gnat_useless_type_conversion (tree expr)
3707 if (CONVERT_EXPR_P (expr)
3708 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
3709 || TREE_CODE (expr) == NON_LVALUE_EXPR)
3710 return gnat_types_compatible_p (TREE_TYPE (expr),
3711 TREE_TYPE (TREE_OPERAND (expr, 0)));
3716 /* Return true if T, a {FUNCTION,METHOD}_TYPE, has the specified flags. */
3719 fntype_same_flags_p (const_tree t, tree cico_list, bool return_unconstrained_p,
3720 bool return_by_direct_ref_p, bool return_by_invisi_ref_p)
3722 return TYPE_CI_CO_LIST (t) == cico_list
3723 && TYPE_RETURN_UNCONSTRAINED_P (t) == return_unconstrained_p
3724 && TYPE_RETURN_BY_DIRECT_REF_P (t) == return_by_direct_ref_p
3725 && TREE_ADDRESSABLE (t) == return_by_invisi_ref_p;
3728 /* EXP is an expression for the size of an object. If this size contains
3729 discriminant references, replace them with the maximum (if MAX_P) or
3730 minimum (if !MAX_P) possible value of the discriminant.
3732 Note that the expression may have already been gimplified,in which case
3733 COND_EXPRs have VOID_TYPE and no operands, and this must be handled. */
3736 max_size (tree exp, bool max_p)
3738 enum tree_code code = TREE_CODE (exp);
3739 tree type = TREE_TYPE (exp);
3742 switch (TREE_CODE_CLASS (code))
3744 case tcc_declaration:
3748 case tcc_exceptional:
3749 gcc_assert (code == SSA_NAME);
3753 if (code == CALL_EXPR)
3758 t = maybe_inline_call_in_expr (exp);
3760 return max_size (t, max_p);
3762 n = call_expr_nargs (exp);
3764 argarray = XALLOCAVEC (tree, n);
3765 for (i = 0; i < n; i++)
3766 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
3767 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
3772 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
3773 modify. Otherwise, we treat it like a variable. */
3774 if (CONTAINS_PLACEHOLDER_P (exp))
3776 tree base_type = get_base_type (TREE_TYPE (TREE_OPERAND (exp, 1)));
3778 = fold_convert (base_type,
3780 ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type));
3782 /* Walk down the extra subtypes to get more restrictive bounds. */
3783 while (TYPE_IS_EXTRA_SUBTYPE_P (type))
3785 type = TREE_TYPE (type);
3787 val = fold_build2 (MIN_EXPR, base_type, val,
3788 fold_convert (base_type,
3789 TYPE_MAX_VALUE (type)));
3791 val = fold_build2 (MAX_EXPR, base_type, val,
3792 fold_convert (base_type,
3793 TYPE_MIN_VALUE (type)));
3796 return fold_convert (type, max_size (val, max_p));
3801 case tcc_comparison:
3802 return build_int_cst (type, max_p ? 1 : 0);
3805 op0 = TREE_OPERAND (exp, 0);
3807 if (code == NON_LVALUE_EXPR)
3808 return max_size (op0, max_p);
3810 if (VOID_TYPE_P (TREE_TYPE (op0)))
3811 return max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type);
3813 op0 = max_size (op0, code == NEGATE_EXPR ? !max_p : max_p);
3815 if (op0 == TREE_OPERAND (exp, 0))
3818 return fold_build1 (code, type, op0);
3821 op0 = TREE_OPERAND (exp, 0);
3822 op1 = TREE_OPERAND (exp, 1);
3824 /* If we have a multiply-add with a "negative" value in an unsigned
3825 type, do a multiply-subtract with the negated value, in order to
3826 avoid creating a spurious overflow below. */
3827 if (code == PLUS_EXPR
3828 && TREE_CODE (op0) == MULT_EXPR
3829 && TYPE_UNSIGNED (type)
3830 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST
3831 && !TREE_OVERFLOW (TREE_OPERAND (op0, 1))
3832 && tree_int_cst_sign_bit (TREE_OPERAND (op0, 1)))
3835 op1 = build2 (MULT_EXPR, type, TREE_OPERAND (op0, 0),
3836 fold_build1 (NEGATE_EXPR, type,
3837 TREE_OPERAND (op0, 1)));
3842 op0 = max_size (op0, max_p);
3843 op1 = max_size (op1, code == MINUS_EXPR ? !max_p : max_p);
3845 if ((code == MINUS_EXPR || code == PLUS_EXPR))
3847 /* If the op0 has overflowed and the op1 is a variable,
3848 propagate the overflow by returning the op0. */
3849 if (TREE_CODE (op0) == INTEGER_CST
3850 && TREE_OVERFLOW (op0)
3851 && TREE_CODE (op1) != INTEGER_CST)
3854 /* If we have a "negative" value in an unsigned type, do the
3855 opposite operation on the negated value, in order to avoid
3856 creating a spurious overflow below. */
3857 if (TYPE_UNSIGNED (type)
3858 && TREE_CODE (op1) == INTEGER_CST
3859 && !TREE_OVERFLOW (op1)
3860 && tree_int_cst_sign_bit (op1))
3862 op1 = fold_build1 (NEGATE_EXPR, type, op1);
3863 code = (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR);
3867 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3870 /* We need to detect overflows so we call size_binop here. */
3871 return size_binop (code, op0, op1);
3873 case tcc_expression:
3874 switch (TREE_CODE_LENGTH (code))
3877 if (code == SAVE_EXPR)
3880 op0 = max_size (TREE_OPERAND (exp, 0),
3881 code == TRUTH_NOT_EXPR ? !max_p : max_p);
3883 if (op0 == TREE_OPERAND (exp, 0))
3886 return fold_build1 (code, type, op0);
3889 if (code == COMPOUND_EXPR)
3890 return max_size (TREE_OPERAND (exp, 1), max_p);
3892 op0 = max_size (TREE_OPERAND (exp, 0), max_p);
3893 op1 = max_size (TREE_OPERAND (exp, 1), max_p);
3895 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3898 return fold_build2 (code, type, op0, op1);
3901 if (code == COND_EXPR)
3903 op0 = TREE_OPERAND (exp, 0);
3904 op1 = TREE_OPERAND (exp, 1);
3905 op2 = TREE_OPERAND (exp, 2);
3910 op1 = max_size (op1, max_p);
3911 op2 = max_size (op2, max_p);
3913 /* If we have the MAX of a "negative" value in an unsigned type
3914 and zero for a length expression, just return zero. */
3916 && TREE_CODE (op0) == LE_EXPR
3917 && TYPE_UNSIGNED (type)
3918 && TREE_CODE (op1) == INTEGER_CST
3919 && !TREE_OVERFLOW (op1)
3920 && tree_int_cst_sign_bit (op1)
3921 && integer_zerop (op2))
3924 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type, op1, op2);
3932 /* Other tree classes cannot happen. */
3940 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
3941 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
3942 Return a constructor for the template. */
3945 build_template (tree template_type, tree array_type, tree expr)
3947 vec<constructor_elt, va_gc> *template_elts = NULL;
3948 tree bound_list = NULL_TREE;
3951 while (TREE_CODE (array_type) == RECORD_TYPE
3952 && (TYPE_PADDING_P (array_type)
3953 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
3954 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
3956 if (TREE_CODE (array_type) == ARRAY_TYPE
3957 || (TREE_CODE (array_type) == INTEGER_TYPE
3958 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
3959 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
3961 /* First make the list for a CONSTRUCTOR for the template. Go down
3962 the field list of the template instead of the type chain because
3963 this array might be an Ada array of array and we can't tell where
3964 the nested array stop being the underlying object. */
3965 for (field = TYPE_FIELDS (template_type);
3967 field = DECL_CHAIN (DECL_CHAIN (field)))
3969 tree bounds, min, max;
3971 /* If we have a bound list, get the bounds from there. Likewise
3972 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
3973 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the
3974 template, but this will only give us a maximum range. */
3977 bounds = TREE_VALUE (bound_list);
3978 bound_list = TREE_CHAIN (bound_list);
3980 else if (TREE_CODE (array_type) == ARRAY_TYPE)
3982 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
3983 array_type = TREE_TYPE (array_type);
3985 else if (expr && TREE_CODE (expr) == PARM_DECL
3986 && DECL_BY_COMPONENT_PTR_P (expr))
3987 bounds = TREE_TYPE (field);
3991 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
3992 max = convert (TREE_TYPE (DECL_CHAIN (field)), TYPE_MAX_VALUE (bounds));
3994 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
3995 substitute it from OBJECT. */
3996 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
3997 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
3999 CONSTRUCTOR_APPEND_ELT (template_elts, field, min);
4000 CONSTRUCTOR_APPEND_ELT (template_elts, DECL_CHAIN (field), max);
4003 return gnat_build_constructor (template_type, template_elts);
4006 /* Return true if TYPE is suitable for the element type of a vector. */
4009 type_for_vector_element_p (tree type)
4013 if (!INTEGRAL_TYPE_P (type)
4014 && !SCALAR_FLOAT_TYPE_P (type)
4015 && !FIXED_POINT_TYPE_P (type))
4018 mode = TYPE_MODE (type);
4019 if (GET_MODE_CLASS (mode) != MODE_INT
4020 && !SCALAR_FLOAT_MODE_P (mode)
4021 && !ALL_SCALAR_FIXED_POINT_MODE_P (mode))
4027 /* Return a vector type given the SIZE and the INNER_TYPE, or NULL_TREE if
4028 this is not possible. If ATTRIBUTE is non-zero, we are processing the
4029 attribute declaration and want to issue error messages on failure. */
4032 build_vector_type_for_size (tree inner_type, tree size, tree attribute)
4034 unsigned HOST_WIDE_INT size_int, inner_size_int;
4037 /* Silently punt on variable sizes. We can't make vector types for them,
4038 need to ignore them on front-end generated subtypes of unconstrained
4039 base types, and this attribute is for binding implementors, not end
4040 users, so we should never get there from legitimate explicit uses. */
4041 if (!tree_fits_uhwi_p (size))
4043 size_int = tree_to_uhwi (size);
4045 if (!type_for_vector_element_p (inner_type))
4048 error ("invalid element type for attribute %qs",
4049 IDENTIFIER_POINTER (attribute));
4052 inner_size_int = tree_to_uhwi (TYPE_SIZE_UNIT (inner_type));
4054 if (size_int % inner_size_int)
4057 error ("vector size not an integral multiple of component size");
4064 error ("zero vector size");
4068 nunits = size_int / inner_size_int;
4069 if (nunits & (nunits - 1))
4072 error ("number of components of vector not a power of two");
4076 return build_vector_type (inner_type, nunits);
4079 /* Return a vector type whose representative array type is ARRAY_TYPE, or
4080 NULL_TREE if this is not possible. If ATTRIBUTE is non-zero, we are
4081 processing the attribute and want to issue error messages on failure. */
4084 build_vector_type_for_array (tree array_type, tree attribute)
4086 tree vector_type = build_vector_type_for_size (TREE_TYPE (array_type),
4087 TYPE_SIZE_UNIT (array_type),
4092 TYPE_REPRESENTATIVE_ARRAY (vector_type) = array_type;
4096 /* Build a type to be used to represent an aliased object whose nominal type
4097 is an unconstrained array. This consists of a RECORD_TYPE containing a
4098 field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an ARRAY_TYPE.
4099 If ARRAY_TYPE is that of an unconstrained array, this is used to represent
4100 an arbitrary unconstrained object. Use NAME as the name of the record.
4101 DEBUG_INFO_P is true if we need to write debug information for the type. */
4104 build_unc_object_type (tree template_type, tree object_type, tree name,
4108 tree type = make_node (RECORD_TYPE);
4110 = create_field_decl (get_identifier ("BOUNDS"), template_type, type,
4111 NULL_TREE, NULL_TREE, 0, 1);
4113 = create_field_decl (get_identifier ("ARRAY"), object_type, type,
4114 NULL_TREE, NULL_TREE, 0, 1);
4116 TYPE_NAME (type) = name;
4117 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
4118 DECL_CHAIN (template_field) = array_field;
4119 finish_record_type (type, template_field, 0, true);
4121 /* Declare it now since it will never be declared otherwise. This is
4122 necessary to ensure that its subtrees are properly marked. */
4123 decl = create_type_decl (name, type, true, debug_info_p, Empty);
4125 /* template_type will not be used elsewhere than here, so to keep the debug
4126 info clean and in order to avoid scoping issues, make decl its
4128 gnat_set_type_context (template_type, decl);
4133 /* Same, taking a thin or fat pointer type instead of a template type. */
4136 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
4137 tree name, bool debug_info_p)
4141 gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
4144 = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type)
4145 ? TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
4146 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
4149 build_unc_object_type (template_type, object_type, name, debug_info_p);
4152 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
4153 In the normal case this is just two adjustments, but we have more to
4154 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
4157 update_pointer_to (tree old_type, tree new_type)
4159 tree ptr = TYPE_POINTER_TO (old_type);
4160 tree ref = TYPE_REFERENCE_TO (old_type);
4163 /* If this is the main variant, process all the other variants first. */
4164 if (TYPE_MAIN_VARIANT (old_type) == old_type)
4165 for (t = TYPE_NEXT_VARIANT (old_type); t; t = TYPE_NEXT_VARIANT (t))
4166 update_pointer_to (t, new_type);
4168 /* If no pointers and no references, we are done. */
4172 /* Merge the old type qualifiers in the new type.
4174 Each old variant has qualifiers for specific reasons, and the new
4175 designated type as well. Each set of qualifiers represents useful
4176 information grabbed at some point, and merging the two simply unifies
4177 these inputs into the final type description.
4179 Consider for instance a volatile type frozen after an access to constant
4180 type designating it; after the designated type's freeze, we get here with
4181 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
4182 when the access type was processed. We will make a volatile and readonly
4183 designated type, because that's what it really is.
4185 We might also get here for a non-dummy OLD_TYPE variant with different
4186 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
4187 to private record type elaboration (see the comments around the call to
4188 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
4189 the qualifiers in those cases too, to avoid accidentally discarding the
4190 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
4192 = build_qualified_type (new_type,
4193 TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
4195 /* If old type and new type are identical, there is nothing to do. */
4196 if (old_type == new_type)
4199 /* Otherwise, first handle the simple case. */
4200 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
4202 tree new_ptr, new_ref;
4204 /* If pointer or reference already points to new type, nothing to do.
4205 This can happen as update_pointer_to can be invoked multiple times
4206 on the same couple of types because of the type variants. */
4207 if ((ptr && TREE_TYPE (ptr) == new_type)
4208 || (ref && TREE_TYPE (ref) == new_type))
4211 /* Chain PTR and its variants at the end. */
4212 new_ptr = TYPE_POINTER_TO (new_type);
4215 while (TYPE_NEXT_PTR_TO (new_ptr))
4216 new_ptr = TYPE_NEXT_PTR_TO (new_ptr);
4217 TYPE_NEXT_PTR_TO (new_ptr) = ptr;
4220 TYPE_POINTER_TO (new_type) = ptr;
4222 /* Now adjust them. */
4223 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
4224 for (t = TYPE_MAIN_VARIANT (ptr); t; t = TYPE_NEXT_VARIANT (t))
4226 TREE_TYPE (t) = new_type;
4227 if (TYPE_NULL_BOUNDS (t))
4228 TREE_TYPE (TREE_OPERAND (TYPE_NULL_BOUNDS (t), 0)) = new_type;
4231 /* Chain REF and its variants at the end. */
4232 new_ref = TYPE_REFERENCE_TO (new_type);
4235 while (TYPE_NEXT_REF_TO (new_ref))
4236 new_ref = TYPE_NEXT_REF_TO (new_ref);
4237 TYPE_NEXT_REF_TO (new_ref) = ref;
4240 TYPE_REFERENCE_TO (new_type) = ref;
4242 /* Now adjust them. */
4243 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
4244 for (t = TYPE_MAIN_VARIANT (ref); t; t = TYPE_NEXT_VARIANT (t))
4245 TREE_TYPE (t) = new_type;
4247 TYPE_POINTER_TO (old_type) = NULL_TREE;
4248 TYPE_REFERENCE_TO (old_type) = NULL_TREE;
4251 /* Now deal with the unconstrained array case. In this case the pointer
4252 is actually a record where both fields are pointers to dummy nodes.
4253 Turn them into pointers to the correct types using update_pointer_to.
4254 Likewise for the pointer to the object record (thin pointer). */
4257 tree new_ptr = TYPE_POINTER_TO (new_type);
4259 gcc_assert (TYPE_IS_FAT_POINTER_P (ptr));
4261 /* If PTR already points to NEW_TYPE, nothing to do. This can happen
4262 since update_pointer_to can be invoked multiple times on the same
4263 couple of types because of the type variants. */
4264 if (TYPE_UNCONSTRAINED_ARRAY (ptr) == new_type)
4268 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
4269 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))));
4272 (TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (ptr)))),
4273 TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (new_ptr)))));
4275 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type),
4276 TYPE_OBJECT_RECORD_TYPE (new_type));
4278 TYPE_POINTER_TO (old_type) = NULL_TREE;
4279 TYPE_REFERENCE_TO (old_type) = NULL_TREE;
4283 /* Convert EXPR, a pointer to a constrained array, into a pointer to an
4284 unconstrained one. This involves making or finding a template. */
4287 convert_to_fat_pointer (tree type, tree expr)
4289 tree template_type = TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))));
4290 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
4291 tree etype = TREE_TYPE (expr);
4293 vec<constructor_elt, va_gc> *v;
4296 /* If EXPR is null, make a fat pointer that contains a null pointer to the
4297 array (compare_fat_pointers ensures that this is the full discriminant)
4298 and a valid pointer to the bounds. This latter property is necessary
4299 since the compiler can hoist the load of the bounds done through it. */
4300 if (integer_zerop (expr))
4302 tree ptr_template_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4303 tree null_bounds, t;
4305 if (TYPE_NULL_BOUNDS (ptr_template_type))
4306 null_bounds = TYPE_NULL_BOUNDS (ptr_template_type);
4309 /* The template type can still be dummy at this point so we build an
4310 empty constructor. The middle-end will fill it in with zeros. */
4311 t = build_constructor (template_type, NULL);
4312 TREE_CONSTANT (t) = TREE_STATIC (t) = 1;
4313 null_bounds = build_unary_op (ADDR_EXPR, NULL_TREE, t);
4314 SET_TYPE_NULL_BOUNDS (ptr_template_type, null_bounds);
4317 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4318 fold_convert (p_array_type, null_pointer_node));
4319 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), null_bounds);
4320 t = build_constructor (type, v);
4321 /* Do not set TREE_CONSTANT so as to force T to static memory. */
4322 TREE_CONSTANT (t) = 0;
4323 TREE_STATIC (t) = 1;
4328 /* If EXPR is a thin pointer, make template and data from the record. */
4329 if (TYPE_IS_THIN_POINTER_P (etype))
4331 tree field = TYPE_FIELDS (TREE_TYPE (etype));
4333 expr = gnat_protect_expr (expr);
4335 /* If we have a TYPE_UNCONSTRAINED_ARRAY attached to the RECORD_TYPE,
4336 the thin pointer value has been shifted so we shift it back to get
4337 the template address. */
4338 if (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)))
4341 = build_binary_op (POINTER_PLUS_EXPR, etype, expr,
4342 fold_build1 (NEGATE_EXPR, sizetype,
4344 (DECL_CHAIN (field))));
4346 = fold_convert (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))),
4350 /* Otherwise we explicitly take the address of the fields. */
4353 expr = build_unary_op (INDIRECT_REF, NULL_TREE, expr);
4355 = build_unary_op (ADDR_EXPR, NULL_TREE,
4356 build_component_ref (expr, field, false));
4357 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
4358 build_component_ref (expr, DECL_CHAIN (field),
4363 /* Otherwise, build the constructor for the template. */
4366 = build_unary_op (ADDR_EXPR, NULL_TREE,
4367 build_template (template_type, TREE_TYPE (etype),
4370 /* The final result is a constructor for the fat pointer.
4372 If EXPR is an argument of a foreign convention subprogram, the type it
4373 points to is directly the component type. In this case, the expression
4374 type may not match the corresponding FIELD_DECL type at this point, so we
4375 call "convert" here to fix that up if necessary. This type consistency is
4376 required, for instance because it ensures that possible later folding of
4377 COMPONENT_REFs against this constructor always yields something of the
4378 same type as the initial reference.
4380 Note that the call to "build_template" above is still fine because it
4381 will only refer to the provided TEMPLATE_TYPE in this case. */
4382 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type), convert (p_array_type, expr));
4383 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), template_addr);
4384 return gnat_build_constructor (type, v);
4387 /* Create an expression whose value is that of EXPR,
4388 converted to type TYPE. The TREE_TYPE of the value
4389 is always TYPE. This function implements all reasonable
4390 conversions; callers should filter out those that are
4391 not permitted by the language being compiled. */
4394 convert (tree type, tree expr)
4396 tree etype = TREE_TYPE (expr);
4397 enum tree_code ecode = TREE_CODE (etype);
4398 enum tree_code code = TREE_CODE (type);
4400 /* If the expression is already of the right type, we are done. */
4404 /* If both input and output have padding and are of variable size, do this
4405 as an unchecked conversion. Likewise if one is a mere variant of the
4406 other, so we avoid a pointless unpad/repad sequence. */
4407 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
4408 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4409 && (!TREE_CONSTANT (TYPE_SIZE (type))
4410 || !TREE_CONSTANT (TYPE_SIZE (etype))
4411 || TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
4412 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
4413 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
4416 /* If the output type has padding, convert to the inner type and make a
4417 constructor to build the record, unless a variable size is involved. */
4418 else if (code == RECORD_TYPE && TYPE_PADDING_P (type))
4420 /* If we previously converted from another type and our type is
4421 of variable size, remove the conversion to avoid the need for
4422 variable-sized temporaries. Likewise for a conversion between
4423 original and packable version. */
4424 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4425 && (!TREE_CONSTANT (TYPE_SIZE (type))
4426 || (ecode == RECORD_TYPE
4427 && TYPE_NAME (etype)
4428 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
4429 expr = TREE_OPERAND (expr, 0);
4431 /* If we are just removing the padding from expr, convert the original
4432 object if we have variable size in order to avoid the need for some
4433 variable-sized temporaries. Likewise if the padding is a variant
4434 of the other, so we avoid a pointless unpad/repad sequence. */
4435 if (TREE_CODE (expr) == COMPONENT_REF
4436 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
4437 && (!TREE_CONSTANT (TYPE_SIZE (type))
4438 || TYPE_MAIN_VARIANT (type)
4439 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr, 0)))
4440 || (ecode == RECORD_TYPE
4441 && TYPE_NAME (etype)
4442 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
4443 return convert (type, TREE_OPERAND (expr, 0));
4445 /* If the inner type is of self-referential size and the expression type
4446 is a record, do this as an unchecked conversion unless both types are
4447 essentially the same. */
4448 if (ecode == RECORD_TYPE
4449 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type)))
4450 && TYPE_MAIN_VARIANT (etype)
4451 != TYPE_MAIN_VARIANT (TREE_TYPE (TYPE_FIELDS (type))))
4452 return unchecked_convert (type, expr, false);
4454 /* If we are converting between array types with variable size, do the
4455 final conversion as an unchecked conversion, again to avoid the need
4456 for some variable-sized temporaries. If valid, this conversion is
4457 very likely purely technical and without real effects. */
4458 if (ecode == ARRAY_TYPE
4459 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == ARRAY_TYPE
4460 && !TREE_CONSTANT (TYPE_SIZE (etype))
4461 && !TREE_CONSTANT (TYPE_SIZE (type)))
4462 return unchecked_convert (type,
4463 convert (TREE_TYPE (TYPE_FIELDS (type)),
4467 tree t = convert (TREE_TYPE (TYPE_FIELDS (type)), expr);
4469 /* If converting to the inner type has already created a CONSTRUCTOR with
4470 the right size, then reuse it instead of creating another one. This
4471 can happen for the padding type built to overalign local variables. */
4472 if (TREE_CODE (t) == VIEW_CONVERT_EXPR
4473 && TREE_CODE (TREE_OPERAND (t, 0)) == CONSTRUCTOR
4474 && TREE_CONSTANT (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (t, 0))))
4475 && tree_int_cst_equal (TYPE_SIZE (type),
4476 TYPE_SIZE (TREE_TYPE (TREE_OPERAND (t, 0)))))
4477 return build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (t, 0));
4479 vec<constructor_elt, va_gc> *v;
4481 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type), t);
4482 return gnat_build_constructor (type, v);
4485 /* If the input type has padding, remove it and convert to the output type.
4486 The conditions ordering is arranged to ensure that the output type is not
4487 a padding type here, as it is not clear whether the conversion would
4488 always be correct if this was to happen. */
4489 else if (ecode == RECORD_TYPE && TYPE_PADDING_P (etype))
4493 /* If we have just converted to this padded type, just get the
4494 inner expression. */
4495 if (TREE_CODE (expr) == CONSTRUCTOR)
4496 unpadded = CONSTRUCTOR_ELT (expr, 0)->value;
4498 /* Otherwise, build an explicit component reference. */
4500 unpadded = build_component_ref (expr, TYPE_FIELDS (etype), false);
4502 return convert (type, unpadded);
4505 /* If the input is a biased type, convert first to the base type and add
4506 the bias. Note that the bias must go through a full conversion to the
4507 base type, lest it is itself a biased value; this happens for subtypes
4509 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4510 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
4511 fold_convert (TREE_TYPE (etype), expr),
4512 convert (TREE_TYPE (etype),
4513 TYPE_MIN_VALUE (etype))));
4515 /* If the input is a justified modular type, we need to extract the actual
4516 object before converting it to an other type with the exceptions of an
4517 [unconstrained] array or a mere type variant. It is useful to avoid
4518 the extraction and conversion in these cases because it could end up
4519 replacing a VAR_DECL by a constructor and we might be about the take
4520 the address of the result. */
4521 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
4522 && code != ARRAY_TYPE
4523 && code != UNCONSTRAINED_ARRAY_TYPE
4524 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
4526 convert (type, build_component_ref (expr, TYPE_FIELDS (etype), false));
4528 /* If converting to a type that contains a template, convert to the data
4529 type and then build the template. */
4530 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
4532 tree obj_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4533 vec<constructor_elt, va_gc> *v;
4536 /* If the source already has a template, get a reference to the
4537 associated array only, as we are going to rebuild a template
4538 for the target type anyway. */
4539 expr = maybe_unconstrained_array (expr);
4541 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4542 build_template (TREE_TYPE (TYPE_FIELDS (type)),
4543 obj_type, NULL_TREE));
4545 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
4546 convert (obj_type, expr));
4547 return gnat_build_constructor (type, v);
4550 /* There are some cases of expressions that we process specially. */
4551 switch (TREE_CODE (expr))
4557 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
4558 conversion in gnat_expand_expr. NULL_EXPR does not represent
4559 and actual value, so no conversion is needed. */
4560 expr = copy_node (expr);
4561 TREE_TYPE (expr) = type;
4565 /* If we are converting a STRING_CST to another constrained array type,
4566 just make a new one in the proper type. */
4568 && !(TREE_CONSTANT (TYPE_SIZE (etype))
4569 && !TREE_CONSTANT (TYPE_SIZE (type))))
4571 expr = copy_node (expr);
4572 TREE_TYPE (expr) = type;
4578 /* If we are converting a VECTOR_CST to a mere type variant, just make
4579 a new one in the proper type. */
4580 if (code == ecode && gnat_types_compatible_p (type, etype))
4582 expr = copy_node (expr);
4583 TREE_TYPE (expr) = type;
4589 /* If we are converting a CONSTRUCTOR to a mere type variant, or to
4590 another padding type around the same type, just make a new one in
4593 && (gnat_types_compatible_p (type, etype)
4594 || (code == RECORD_TYPE
4595 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4596 && TREE_TYPE (TYPE_FIELDS (type))
4597 == TREE_TYPE (TYPE_FIELDS (etype)))))
4599 expr = copy_node (expr);
4600 TREE_TYPE (expr) = type;
4601 CONSTRUCTOR_ELTS (expr) = vec_safe_copy (CONSTRUCTOR_ELTS (expr));
4605 /* Likewise for a conversion between original and packable version, or
4606 conversion between types of the same size and with the same list of
4607 fields, but we have to work harder to preserve type consistency. */
4609 && code == RECORD_TYPE
4610 && (TYPE_NAME (type) == TYPE_NAME (etype)
4611 || tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (etype))))
4614 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4615 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4616 vec<constructor_elt, va_gc> *v;
4618 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
4619 unsigned HOST_WIDE_INT idx;
4622 /* Whether we need to clear TREE_CONSTANT et al. on the output
4623 constructor when we convert in place. */
4624 bool clear_constant = false;
4626 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
4628 /* Skip the missing fields in the CONSTRUCTOR. */
4629 while (efield && field && !SAME_FIELD_P (efield, index))
4631 efield = DECL_CHAIN (efield);
4632 field = DECL_CHAIN (field);
4634 /* The field must be the same. */
4635 if (!(efield && field && SAME_FIELD_P (efield, field)))
4638 = {field, convert (TREE_TYPE (field), value)};
4639 v->quick_push (elt);
4641 /* If packing has made this field a bitfield and the input
4642 value couldn't be emitted statically any more, we need to
4643 clear TREE_CONSTANT on our output. */
4645 && TREE_CONSTANT (expr)
4646 && !CONSTRUCTOR_BITFIELD_P (efield)
4647 && CONSTRUCTOR_BITFIELD_P (field)
4648 && !initializer_constant_valid_for_bitfield_p (value))
4649 clear_constant = true;
4651 efield = DECL_CHAIN (efield);
4652 field = DECL_CHAIN (field);
4655 /* If we have been able to match and convert all the input fields
4656 to their output type, convert in place now. We'll fallback to a
4657 view conversion downstream otherwise. */
4660 expr = copy_node (expr);
4661 TREE_TYPE (expr) = type;
4662 CONSTRUCTOR_ELTS (expr) = v;
4664 TREE_CONSTANT (expr) = TREE_STATIC (expr) = 0;
4669 /* Likewise for a conversion between array type and vector type with a
4670 compatible representative array. */
4671 else if (code == VECTOR_TYPE
4672 && ecode == ARRAY_TYPE
4673 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4676 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4677 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4678 vec<constructor_elt, va_gc> *v;
4679 unsigned HOST_WIDE_INT ix;
4682 /* Build a VECTOR_CST from a *constant* array constructor. */
4683 if (TREE_CONSTANT (expr))
4685 bool constant_p = true;
4687 /* Iterate through elements and check if all constructor
4688 elements are *_CSTs. */
4689 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4690 if (!CONSTANT_CLASS_P (value))
4697 return build_vector_from_ctor (type,
4698 CONSTRUCTOR_ELTS (expr));
4701 /* Otherwise, build a regular vector constructor. */
4703 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4705 constructor_elt elt = {NULL_TREE, value};
4706 v->quick_push (elt);
4708 expr = copy_node (expr);
4709 TREE_TYPE (expr) = type;
4710 CONSTRUCTOR_ELTS (expr) = v;
4715 case UNCONSTRAINED_ARRAY_REF:
4716 /* First retrieve the underlying array. */
4717 expr = maybe_unconstrained_array (expr);
4718 etype = TREE_TYPE (expr);
4719 ecode = TREE_CODE (etype);
4722 case VIEW_CONVERT_EXPR:
4724 /* GCC 4.x is very sensitive to type consistency overall, and view
4725 conversions thus are very frequent. Even though just "convert"ing
4726 the inner operand to the output type is fine in most cases, it
4727 might expose unexpected input/output type mismatches in special
4728 circumstances so we avoid such recursive calls when we can. */
4729 tree op0 = TREE_OPERAND (expr, 0);
4731 /* If we are converting back to the original type, we can just
4732 lift the input conversion. This is a common occurrence with
4733 switches back-and-forth amongst type variants. */
4734 if (type == TREE_TYPE (op0))
4737 /* Otherwise, if we're converting between two aggregate or vector
4738 types, we might be allowed to substitute the VIEW_CONVERT_EXPR
4739 target type in place or to just convert the inner expression. */
4740 if ((AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
4741 || (VECTOR_TYPE_P (type) && VECTOR_TYPE_P (etype)))
4743 /* If we are converting between mere variants, we can just
4744 substitute the VIEW_CONVERT_EXPR in place. */
4745 if (gnat_types_compatible_p (type, etype))
4746 return build1 (VIEW_CONVERT_EXPR, type, op0);
4748 /* Otherwise, we may just bypass the input view conversion unless
4749 one of the types is a fat pointer, which is handled by
4750 specialized code below which relies on exact type matching. */
4751 else if (!TYPE_IS_FAT_POINTER_P (type)
4752 && !TYPE_IS_FAT_POINTER_P (etype))
4753 return convert (type, op0);
4763 /* Check for converting to a pointer to an unconstrained array. */
4764 if (TYPE_IS_FAT_POINTER_P (type) && !TYPE_IS_FAT_POINTER_P (etype))
4765 return convert_to_fat_pointer (type, expr);
4767 /* If we are converting between two aggregate or vector types that are mere
4768 variants, just make a VIEW_CONVERT_EXPR. Likewise when we are converting
4769 to a vector type from its representative array type. */
4770 else if ((code == ecode
4771 && (AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type))
4772 && gnat_types_compatible_p (type, etype))
4773 || (code == VECTOR_TYPE
4774 && ecode == ARRAY_TYPE
4775 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4777 return build1 (VIEW_CONVERT_EXPR, type, expr);
4779 /* If we are converting between tagged types, try to upcast properly.
4780 But don't do it if we are just annotating types since tagged types
4781 aren't fully laid out in this mode. */
4782 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4783 && TYPE_ALIGN_OK (etype) && TYPE_ALIGN_OK (type)
4784 && !type_annotate_only)
4786 tree child_etype = etype;
4788 tree field = TYPE_FIELDS (child_etype);
4789 if (DECL_NAME (field) == parent_name_id && TREE_TYPE (field) == type)
4790 return build_component_ref (expr, field, false);
4791 child_etype = TREE_TYPE (field);
4792 } while (TREE_CODE (child_etype) == RECORD_TYPE);
4795 /* If we are converting from a smaller form of record type back to it, just
4796 make a VIEW_CONVERT_EXPR. But first pad the expression to have the same
4797 size on both sides. */
4798 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4799 && smaller_form_type_p (etype, type))
4801 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4802 false, false, false, true),
4804 return build1 (VIEW_CONVERT_EXPR, type, expr);
4807 /* In all other cases of related types, make a NOP_EXPR. */
4808 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
4809 return fold_convert (type, expr);
4814 return fold_build1 (CONVERT_EXPR, type, expr);
4817 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4818 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4819 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4820 return unchecked_convert (type, expr, false);
4822 /* If the output is a biased type, convert first to the base type and
4823 subtract the bias. Note that the bias itself must go through a full
4824 conversion to the base type, lest it is a biased value; this happens
4825 for subtypes of biased types. */
4826 if (TYPE_BIASED_REPRESENTATION_P (type))
4827 return fold_convert (type,
4828 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4829 convert (TREE_TYPE (type), expr),
4830 convert (TREE_TYPE (type),
4831 TYPE_MIN_VALUE (type))));
4833 /* ... fall through ... */
4837 /* If we are converting an additive expression to an integer type
4838 with lower precision, be wary of the optimization that can be
4839 applied by convert_to_integer. There are 2 problematic cases:
4840 - if the first operand was originally of a biased type,
4841 because we could be recursively called to convert it
4842 to an intermediate type and thus rematerialize the
4843 additive operator endlessly,
4844 - if the expression contains a placeholder, because an
4845 intermediate conversion that changes the sign could
4846 be inserted and thus introduce an artificial overflow
4847 at compile time when the placeholder is substituted. */
4848 if (code == INTEGER_TYPE
4849 && ecode == INTEGER_TYPE
4850 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4851 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4853 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4855 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4856 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4857 || CONTAINS_PLACEHOLDER_P (expr))
4858 return build1 (NOP_EXPR, type, expr);
4861 return fold (convert_to_integer (type, expr));
4864 case REFERENCE_TYPE:
4865 /* If converting between two thin pointers, adjust if needed to account
4866 for differing offsets from the base pointer, depending on whether
4867 there is a TYPE_UNCONSTRAINED_ARRAY attached to the record type. */
4868 if (TYPE_IS_THIN_POINTER_P (etype) && TYPE_IS_THIN_POINTER_P (type))
4871 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype))
4872 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (etype))))
4875 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))
4876 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (type))))
4878 tree byte_diff = size_diffop (type_pos, etype_pos);
4880 expr = build1 (NOP_EXPR, type, expr);
4881 if (integer_zerop (byte_diff))
4884 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
4885 fold_convert (sizetype, byte_diff));
4888 /* If converting fat pointer to normal or thin pointer, get the pointer
4889 to the array and then convert it. */
4890 if (TYPE_IS_FAT_POINTER_P (etype))
4891 expr = build_component_ref (expr, TYPE_FIELDS (etype), false);
4893 return fold (convert_to_pointer (type, expr));
4896 return fold (convert_to_real (type, expr));
4899 /* Do a normal conversion between scalar and justified modular type. */
4900 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
4902 vec<constructor_elt, va_gc> *v;
4905 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4906 convert (TREE_TYPE (TYPE_FIELDS (type)),
4908 return gnat_build_constructor (type, v);
4911 /* In these cases, assume the front-end has validated the conversion.
4912 If the conversion is valid, it will be a bit-wise conversion, so
4913 it can be viewed as an unchecked conversion. */
4914 return unchecked_convert (type, expr, false);
4917 /* Do a normal conversion between unconstrained and constrained array
4918 type, assuming the latter is a constrained version of the former. */
4919 if (TREE_CODE (expr) == INDIRECT_REF
4920 && ecode == ARRAY_TYPE
4921 && TREE_TYPE (etype) == TREE_TYPE (type))
4923 tree ptr_type = build_pointer_type (type);
4924 tree t = build_unary_op (INDIRECT_REF, NULL_TREE,
4925 fold_convert (ptr_type,
4926 TREE_OPERAND (expr, 0)));
4927 TREE_READONLY (t) = TREE_READONLY (expr);
4928 TREE_THIS_NOTRAP (t) = TREE_THIS_NOTRAP (expr);
4932 /* In these cases, assume the front-end has validated the conversion.
4933 If the conversion is valid, it will be a bit-wise conversion, so
4934 it can be viewed as an unchecked conversion. */
4935 return unchecked_convert (type, expr, false);
4938 /* This is a either a conversion between a tagged type and some
4939 subtype, which we have to mark as a UNION_TYPE because of
4940 overlapping fields or a conversion of an Unchecked_Union. */
4941 return unchecked_convert (type, expr, false);
4943 case UNCONSTRAINED_ARRAY_TYPE:
4944 /* If the input is a VECTOR_TYPE, convert to the representative
4945 array type first. */
4946 if (ecode == VECTOR_TYPE)
4948 expr = convert (TYPE_REPRESENTATIVE_ARRAY (etype), expr);
4949 etype = TREE_TYPE (expr);
4950 ecode = TREE_CODE (etype);
4953 /* If EXPR is a constrained array, take its address, convert it to a
4954 fat pointer, and then dereference it. Likewise if EXPR is a
4955 record containing both a template and a constrained array.
4956 Note that a record representing a justified modular type
4957 always represents a packed constrained array. */
4958 if (ecode == ARRAY_TYPE
4959 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4960 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4961 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4964 (INDIRECT_REF, NULL_TREE,
4965 convert_to_fat_pointer (TREE_TYPE (type),
4966 build_unary_op (ADDR_EXPR,
4969 /* Do something very similar for converting one unconstrained
4970 array to another. */
4971 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
4973 build_unary_op (INDIRECT_REF, NULL_TREE,
4974 convert (TREE_TYPE (type),
4975 build_unary_op (ADDR_EXPR,
4981 return fold (convert_to_complex (type, expr));
4988 /* Create an expression whose value is that of EXPR converted to the common
4989 index type, which is sizetype. EXPR is supposed to be in the base type
4990 of the GNAT index type. Calling it is equivalent to doing
4992 convert (sizetype, expr)
4994 but we try to distribute the type conversion with the knowledge that EXPR
4995 cannot overflow in its type. This is a best-effort approach and we fall
4996 back to the above expression as soon as difficulties are encountered.
4998 This is necessary to overcome issues that arise when the GNAT base index
4999 type and the GCC common index type (sizetype) don't have the same size,
5000 which is quite frequent on 64-bit architectures. In this case, and if
5001 the GNAT base index type is signed but the iteration type of the loop has
5002 been forced to unsigned, the loop scalar evolution engine cannot compute
5003 a simple evolution for the general induction variables associated with the
5004 array indices, because it will preserve the wrap-around semantics in the
5005 unsigned type of their "inner" part. As a result, many loop optimizations
5008 The solution is to use a special (basic) induction variable that is at
5009 least as large as sizetype, and to express the aforementioned general
5010 induction variables in terms of this induction variable, eliminating
5011 the problematic intermediate truncation to the GNAT base index type.
5012 This is possible as long as the original expression doesn't overflow
5013 and if the middle-end hasn't introduced artificial overflows in the
5014 course of the various simplification it can make to the expression. */
5017 convert_to_index_type (tree expr)
5019 enum tree_code code = TREE_CODE (expr);
5020 tree type = TREE_TYPE (expr);
5022 /* If the type is unsigned, overflow is allowed so we cannot be sure that
5023 EXPR doesn't overflow. Keep it simple if optimization is disabled. */
5024 if (TYPE_UNSIGNED (type) || !optimize || optimize_debug)
5025 return convert (sizetype, expr);
5030 /* The main effect of the function: replace a loop parameter with its
5031 associated special induction variable. */
5032 if (DECL_LOOP_PARM_P (expr) && DECL_INDUCTION_VAR (expr))
5033 expr = DECL_INDUCTION_VAR (expr);
5038 tree otype = TREE_TYPE (TREE_OPERAND (expr, 0));
5039 /* Bail out as soon as we suspect some sort of type frobbing. */
5040 if (TYPE_PRECISION (type) != TYPE_PRECISION (otype)
5041 || TYPE_UNSIGNED (type) != TYPE_UNSIGNED (otype))
5045 /* ... fall through ... */
5047 case NON_LVALUE_EXPR:
5048 return fold_build1 (code, sizetype,
5049 convert_to_index_type (TREE_OPERAND (expr, 0)));
5054 return fold_build2 (code, sizetype,
5055 convert_to_index_type (TREE_OPERAND (expr, 0)),
5056 convert_to_index_type (TREE_OPERAND (expr, 1)));
5059 return fold_build2 (code, sizetype, TREE_OPERAND (expr, 0),
5060 convert_to_index_type (TREE_OPERAND (expr, 1)));
5063 return fold_build3 (code, sizetype, TREE_OPERAND (expr, 0),
5064 convert_to_index_type (TREE_OPERAND (expr, 1)),
5065 convert_to_index_type (TREE_OPERAND (expr, 2)));
5071 return convert (sizetype, expr);
5074 /* Remove all conversions that are done in EXP. This includes converting
5075 from a padded type or to a justified modular type. If TRUE_ADDRESS
5076 is true, always return the address of the containing object even if
5077 the address is not bit-aligned. */
5080 remove_conversions (tree exp, bool true_address)
5082 switch (TREE_CODE (exp))
5086 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
5087 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
5089 remove_conversions (CONSTRUCTOR_ELT (exp, 0)->value, true);
5093 if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
5094 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
5098 case VIEW_CONVERT_EXPR:
5099 case NON_LVALUE_EXPR:
5100 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
5109 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
5110 refers to the underlying array. If it has TYPE_CONTAINS_TEMPLATE_P,
5111 likewise return an expression pointing to the underlying array. */
5114 maybe_unconstrained_array (tree exp)
5116 enum tree_code code = TREE_CODE (exp);
5117 tree type = TREE_TYPE (exp);
5119 switch (TREE_CODE (type))
5121 case UNCONSTRAINED_ARRAY_TYPE:
5122 if (code == UNCONSTRAINED_ARRAY_REF)
5124 const bool read_only = TREE_READONLY (exp);
5125 const bool no_trap = TREE_THIS_NOTRAP (exp);
5127 exp = TREE_OPERAND (exp, 0);
5128 type = TREE_TYPE (exp);
5130 if (TREE_CODE (exp) == COND_EXPR)
5133 = build_unary_op (INDIRECT_REF, NULL_TREE,
5134 build_component_ref (TREE_OPERAND (exp, 1),
5138 = build_unary_op (INDIRECT_REF, NULL_TREE,
5139 build_component_ref (TREE_OPERAND (exp, 2),
5143 exp = build3 (COND_EXPR,
5144 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))),
5145 TREE_OPERAND (exp, 0), op1, op2);
5149 exp = build_unary_op (INDIRECT_REF, NULL_TREE,
5150 build_component_ref (exp,
5153 TREE_READONLY (exp) = read_only;
5154 TREE_THIS_NOTRAP (exp) = no_trap;
5158 else if (code == NULL_EXPR)
5159 exp = build1 (NULL_EXPR,
5160 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type)))),
5161 TREE_OPERAND (exp, 0));
5165 /* If this is a padded type and it contains a template, convert to the
5166 unpadded type first. */
5167 if (TYPE_PADDING_P (type)
5168 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
5169 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
5171 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
5172 code = TREE_CODE (exp);
5173 type = TREE_TYPE (exp);
5176 if (TYPE_CONTAINS_TEMPLATE_P (type))
5178 /* If the array initializer is a box, return NULL_TREE. */
5179 if (code == CONSTRUCTOR && CONSTRUCTOR_NELTS (exp) < 2)
5182 exp = build_component_ref (exp, DECL_CHAIN (TYPE_FIELDS (type)),
5184 type = TREE_TYPE (exp);
5186 /* If the array type is padded, convert to the unpadded type. */
5187 if (TYPE_IS_PADDING_P (type))
5188 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
5199 /* Return true if EXPR is an expression that can be folded as an operand
5200 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
5203 can_fold_for_view_convert_p (tree expr)
5207 /* The folder will fold NOP_EXPRs between integral types with the same
5208 precision (in the middle-end's sense). We cannot allow it if the
5209 types don't have the same precision in the Ada sense as well. */
5210 if (TREE_CODE (expr) != NOP_EXPR)
5213 t1 = TREE_TYPE (expr);
5214 t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
5216 /* Defer to the folder for non-integral conversions. */
5217 if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
5220 /* Only fold conversions that preserve both precisions. */
5221 if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
5222 && operand_equal_p (rm_size (t1), rm_size (t2), 0))
5228 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
5229 If NOTRUNC_P is true, truncation operations should be suppressed.
5231 Special care is required with (source or target) integral types whose
5232 precision is not equal to their size, to make sure we fetch or assign
5233 the value bits whose location might depend on the endianness, e.g.
5235 Rmsize : constant := 8;
5236 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
5238 type Bit_Array is array (1 .. Rmsize) of Boolean;
5239 pragma Pack (Bit_Array);
5241 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
5243 Value : Int := 2#1000_0001#;
5244 Vbits : Bit_Array := To_Bit_Array (Value);
5246 we expect the 8 bits at Vbits'Address to always contain Value, while
5247 their original location depends on the endianness, at Value'Address
5248 on a little-endian architecture but not on a big-endian one.
5250 One pitfall is that we cannot use TYPE_UNSIGNED directly to decide how
5251 the bits between the precision and the size are filled, because of the
5252 trick used in the E_Signed_Integer_Subtype case of gnat_to_gnu_entity.
5253 So we use the special predicate type_unsigned_for_rm above. */
5256 unchecked_convert (tree type, tree expr, bool notrunc_p)
5258 tree etype = TREE_TYPE (expr);
5259 enum tree_code ecode = TREE_CODE (etype);
5260 enum tree_code code = TREE_CODE (type);
5262 = (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype));
5264 = (code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type));
5266 = (AGGREGATE_TYPE_P (etype) && TYPE_REVERSE_STORAGE_ORDER (etype));
5268 = (AGGREGATE_TYPE_P (type) && TYPE_REVERSE_STORAGE_ORDER (type));
5272 /* If the expression is already of the right type, we are done. */
5276 /* If both types are integral or regular pointer, then just do a normal
5277 conversion. Likewise for a conversion to an unconstrained array. */
5278 if (((INTEGRAL_TYPE_P (type)
5279 || (POINTER_TYPE_P (type) && !TYPE_IS_THIN_POINTER_P (type))
5280 || (code == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (type)))
5281 && (INTEGRAL_TYPE_P (etype)
5282 || (POINTER_TYPE_P (etype) && !TYPE_IS_THIN_POINTER_P (etype))
5283 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype))))
5284 || code == UNCONSTRAINED_ARRAY_TYPE)
5288 tree ntype = copy_type (etype);
5289 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
5290 TYPE_MAIN_VARIANT (ntype) = ntype;
5291 expr = build1 (NOP_EXPR, ntype, expr);
5296 tree rtype = copy_type (type);
5297 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
5298 TYPE_MAIN_VARIANT (rtype) = rtype;
5299 expr = convert (rtype, expr);
5300 expr = build1 (NOP_EXPR, type, expr);
5303 expr = convert (type, expr);
5306 /* If we are converting to an integral type whose precision is not equal
5307 to its size, first unchecked convert to a record type that contains a
5308 field of the given precision. Then extract the result from the field.
5310 There is a subtlety if the source type is an aggregate type with reverse
5311 storage order because its representation is not contiguous in the native
5312 storage order, i.e. a direct unchecked conversion to an integral type
5313 with N bits of precision cannot read the first N bits of the aggregate
5314 type. To overcome it, we do an unchecked conversion to an integral type
5315 with reverse storage order and return the resulting value. This also
5316 ensures that the result of the unchecked conversion doesn't depend on
5317 the endianness of the target machine, but only on the storage order of
5320 Finally, for the sake of consistency, we do the unchecked conversion
5321 to an integral type with reverse storage order as soon as the source
5322 type is an aggregate type with reverse storage order, even if there
5323 are no considerations of precision or size involved. Ultimately, we
5324 further extend this processing to any scalar type. */
5325 else if ((INTEGRAL_TYPE_P (type)
5326 && TYPE_RM_SIZE (type)
5327 && ((c = tree_int_cst_compare (TYPE_RM_SIZE (type),
5328 TYPE_SIZE (type))) < 0
5330 || (SCALAR_FLOAT_TYPE_P (type) && ereverse))
5332 tree rec_type = make_node (RECORD_TYPE);
5333 tree field_type, field;
5335 TYPE_REVERSE_STORAGE_ORDER (rec_type) = ereverse;
5339 const unsigned HOST_WIDE_INT prec
5340 = TREE_INT_CST_LOW (TYPE_RM_SIZE (type));
5341 if (type_unsigned_for_rm (type))
5342 field_type = make_unsigned_type (prec);
5344 field_type = make_signed_type (prec);
5345 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (type));
5350 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
5351 NULL_TREE, bitsize_zero_node, c < 0, 0);
5353 finish_record_type (rec_type, field, 1, false);
5355 expr = unchecked_convert (rec_type, expr, notrunc_p);
5356 expr = build_component_ref (expr, field, false);
5357 expr = fold_build1 (NOP_EXPR, type, expr);
5360 /* Similarly if we are converting from an integral type whose precision is
5361 not equal to its size, first copy into a field of the given precision
5362 and unchecked convert the record type.
5364 The same considerations as above apply if the target type is an aggregate
5365 type with reverse storage order and we also proceed similarly. */
5366 else if ((INTEGRAL_TYPE_P (etype)
5367 && TYPE_RM_SIZE (etype)
5368 && ((c = tree_int_cst_compare (TYPE_RM_SIZE (etype),
5369 TYPE_SIZE (etype))) < 0
5371 || (SCALAR_FLOAT_TYPE_P (etype) && reverse))
5373 tree rec_type = make_node (RECORD_TYPE);
5374 vec<constructor_elt, va_gc> *v;
5376 tree field_type, field;
5378 TYPE_REVERSE_STORAGE_ORDER (rec_type) = reverse;
5382 const unsigned HOST_WIDE_INT prec
5383 = TREE_INT_CST_LOW (TYPE_RM_SIZE (etype));
5384 if (type_unsigned_for_rm (etype))
5385 field_type = make_unsigned_type (prec);
5387 field_type = make_signed_type (prec);
5388 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (etype));
5393 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
5394 NULL_TREE, bitsize_zero_node, c < 0, 0);
5396 finish_record_type (rec_type, field, 1, false);
5398 expr = fold_build1 (NOP_EXPR, field_type, expr);
5399 CONSTRUCTOR_APPEND_ELT (v, field, expr);
5400 expr = gnat_build_constructor (rec_type, v);
5401 expr = unchecked_convert (type, expr, notrunc_p);
5404 /* If we are converting from a scalar type to a type with a different size,
5405 we need to pad to have the same size on both sides.
5407 ??? We cannot do it unconditionally because unchecked conversions are
5408 used liberally by the front-end to implement interface thunks:
5410 type ada__tags__addr_ptr is access system.address;
5411 S191s : constant ada__tags__addr_ptr := ada__tags__addr_ptr!(S190s);
5412 return p___size__4 (p__object!(S191s.all));
5414 so we need to skip dereferences. */
5415 else if (!INDIRECT_REF_P (expr)
5416 && !AGGREGATE_TYPE_P (etype)
5417 && ecode != UNCONSTRAINED_ARRAY_TYPE
5418 && TREE_CONSTANT (TYPE_SIZE (type))
5419 && (c = tree_int_cst_compare (TYPE_SIZE (etype), TYPE_SIZE (type))))
5423 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
5424 false, false, false, true),
5426 expr = unchecked_convert (type, expr, notrunc_p);
5430 tree rec_type = maybe_pad_type (type, TYPE_SIZE (etype), 0, Empty,
5431 false, false, false, true);
5432 expr = unchecked_convert (rec_type, expr, notrunc_p);
5433 expr = build_component_ref (expr, TYPE_FIELDS (rec_type), false);
5437 /* Likewise if we are converting from a scalar type to a type with self-
5438 referential size. We use the max size to do the padding in this case. */
5439 else if (!INDIRECT_REF_P (expr)
5440 && !AGGREGATE_TYPE_P (etype)
5441 && ecode != UNCONSTRAINED_ARRAY_TYPE
5442 && CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type)))
5444 tree new_size = max_size (TYPE_SIZE (type), true);
5445 c = tree_int_cst_compare (TYPE_SIZE (etype), new_size);
5448 expr = convert (maybe_pad_type (etype, new_size, 0, Empty,
5449 false, false, false, true),
5451 expr = unchecked_convert (type, expr, notrunc_p);
5455 tree rec_type = maybe_pad_type (type, TYPE_SIZE (etype), 0, Empty,
5456 false, false, false, true);
5457 expr = unchecked_convert (rec_type, expr, notrunc_p);
5458 expr = build_component_ref (expr, TYPE_FIELDS (rec_type), false);
5462 /* We have a special case when we are converting between two unconstrained
5463 array types. In that case, take the address, convert the fat pointer
5464 types, and dereference. */
5465 else if (ecode == code && code == UNCONSTRAINED_ARRAY_TYPE)
5466 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
5467 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
5468 build_unary_op (ADDR_EXPR, NULL_TREE,
5471 /* Another special case is when we are converting to a vector type from its
5472 representative array type; this a regular conversion. */
5473 else if (code == VECTOR_TYPE
5474 && ecode == ARRAY_TYPE
5475 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
5477 expr = convert (type, expr);
5479 /* And, if the array type is not the representative, we try to build an
5480 intermediate vector type of which the array type is the representative
5481 and to do the unchecked conversion between the vector types, in order
5482 to enable further simplifications in the middle-end. */
5483 else if (code == VECTOR_TYPE
5484 && ecode == ARRAY_TYPE
5485 && (tem = build_vector_type_for_array (etype, NULL_TREE)))
5487 expr = convert (tem, expr);
5488 return unchecked_convert (type, expr, notrunc_p);
5491 /* If we are converting a CONSTRUCTOR to a more aligned aggregate type, bump
5492 the alignment of the CONSTRUCTOR to speed up the copy operation. But do
5493 not do it for a conversion between original and packable version to avoid
5494 an infinite recursion. */
5495 else if (TREE_CODE (expr) == CONSTRUCTOR
5496 && AGGREGATE_TYPE_P (type)
5497 && TYPE_NAME (type) != TYPE_NAME (etype)
5498 && TYPE_ALIGN (etype) < TYPE_ALIGN (type))
5500 expr = convert (maybe_pad_type (etype, NULL_TREE, TYPE_ALIGN (type),
5501 Empty, false, false, false, true),
5503 return unchecked_convert (type, expr, notrunc_p);
5506 /* If we are converting a CONSTRUCTOR to a larger aggregate type, bump the
5507 size of the CONSTRUCTOR to make sure there are enough allocated bytes.
5508 But do not do it for a conversion between original and packable version
5509 to avoid an infinite recursion. */
5510 else if (TREE_CODE (expr) == CONSTRUCTOR
5511 && AGGREGATE_TYPE_P (type)
5512 && TYPE_NAME (type) != TYPE_NAME (etype)
5513 && TREE_CONSTANT (TYPE_SIZE (type))
5514 && (!TREE_CONSTANT (TYPE_SIZE (etype))
5515 || tree_int_cst_lt (TYPE_SIZE (etype), TYPE_SIZE (type))))
5517 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0,
5518 Empty, false, false, false, true),
5520 return unchecked_convert (type, expr, notrunc_p);
5523 /* Otherwise, just build a VIEW_CONVERT_EXPR of the expression. */
5526 expr = maybe_unconstrained_array (expr);
5527 etype = TREE_TYPE (expr);
5528 ecode = TREE_CODE (etype);
5529 if (can_fold_for_view_convert_p (expr))
5530 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
5532 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
5535 /* If the result is a non-biased integral type whose precision is not equal
5536 to its size, sign- or zero-extend the result. But we need not do this
5537 if the input is also an integral type and both are unsigned or both are
5538 signed and have the same precision. */
5542 && INTEGRAL_TYPE_P (type)
5543 && (type_rm_size = TYPE_RM_SIZE (type))
5544 && tree_int_cst_compare (type_rm_size, TYPE_SIZE (type)) < 0
5545 && !(INTEGRAL_TYPE_P (etype)
5546 && type_unsigned_for_rm (type) == type_unsigned_for_rm (etype)
5547 && (type_unsigned_for_rm (type)
5548 || tree_int_cst_compare (type_rm_size,
5549 TYPE_RM_SIZE (etype)
5550 ? TYPE_RM_SIZE (etype)
5551 : TYPE_SIZE (etype)) == 0)))
5553 if (integer_zerop (type_rm_size))
5554 expr = build_int_cst (type, 0);
5558 = gnat_type_for_size (TREE_INT_CST_LOW (TYPE_SIZE (type)),
5559 type_unsigned_for_rm (type));
5561 = convert (base_type,
5562 size_binop (MINUS_EXPR,
5563 TYPE_SIZE (type), type_rm_size));
5566 build_binary_op (RSHIFT_EXPR, base_type,
5567 build_binary_op (LSHIFT_EXPR, base_type,
5575 /* An unchecked conversion should never raise Constraint_Error. The code
5576 below assumes that GCC's conversion routines overflow the same way that
5577 the underlying hardware does. This is probably true. In the rare case
5578 when it is false, we can rely on the fact that such conversions are
5579 erroneous anyway. */
5580 if (TREE_CODE (expr) == INTEGER_CST)
5581 TREE_OVERFLOW (expr) = 0;
5583 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
5584 show no longer constant. */
5585 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
5586 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
5588 TREE_CONSTANT (expr) = 0;
5593 /* Return the appropriate GCC tree code for the specified GNAT_TYPE,
5594 the latter being a record type as predicated by Is_Record_Type. */
5597 tree_code_for_record_type (Entity_Id gnat_type)
5599 Node_Id component_list, component;
5601 /* Return UNION_TYPE if it's an Unchecked_Union whose non-discriminant
5602 fields are all in the variant part. Otherwise, return RECORD_TYPE. */
5603 if (!Is_Unchecked_Union (gnat_type))
5606 gnat_type = Implementation_Base_Type (gnat_type);
5608 = Component_List (Type_Definition (Declaration_Node (gnat_type)));
5610 for (component = First_Non_Pragma (Component_Items (component_list));
5611 Present (component);
5612 component = Next_Non_Pragma (component))
5613 if (Ekind (Defining_Entity (component)) == E_Component)
5619 /* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
5620 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
5621 according to the presence of an alignment clause on the type or, if it
5622 is an array, on the component type. */
5625 is_double_float_or_array (Entity_Id gnat_type, bool *align_clause)
5627 gnat_type = Underlying_Type (gnat_type);
5629 *align_clause = Present (Alignment_Clause (gnat_type));
5631 if (Is_Array_Type (gnat_type))
5633 gnat_type = Underlying_Type (Component_Type (gnat_type));
5634 if (Present (Alignment_Clause (gnat_type)))
5635 *align_clause = true;
5638 if (!Is_Floating_Point_Type (gnat_type))
5641 if (UI_To_Int (Esize (gnat_type)) != 64)
5647 /* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
5648 size is greater or equal to 64 bits, or an array of such a type. Set
5649 ALIGN_CLAUSE according to the presence of an alignment clause on the
5650 type or, if it is an array, on the component type. */
5653 is_double_scalar_or_array (Entity_Id gnat_type, bool *align_clause)
5655 gnat_type = Underlying_Type (gnat_type);
5657 *align_clause = Present (Alignment_Clause (gnat_type));
5659 if (Is_Array_Type (gnat_type))
5661 gnat_type = Underlying_Type (Component_Type (gnat_type));
5662 if (Present (Alignment_Clause (gnat_type)))
5663 *align_clause = true;
5666 if (!Is_Scalar_Type (gnat_type))
5669 if (UI_To_Int (Esize (gnat_type)) < 64)
5675 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
5676 component of an aggregate type. */
5679 type_for_nonaliased_component_p (tree gnu_type)
5681 /* If the type is passed by reference, we may have pointers to the
5682 component so it cannot be made non-aliased. */
5683 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
5686 /* We used to say that any component of aggregate type is aliased
5687 because the front-end may take 'Reference of it. The front-end
5688 has been enhanced in the meantime so as to use a renaming instead
5689 in most cases, but the back-end can probably take the address of
5690 such a component too so we go for the conservative stance.
5692 For instance, we might need the address of any array type, even
5693 if normally passed by copy, to construct a fat pointer if the
5694 component is used as an actual for an unconstrained formal.
5696 Likewise for record types: even if a specific record subtype is
5697 passed by copy, the parent type might be passed by ref (e.g. if
5698 it's of variable size) and we might take the address of a child
5699 component to pass to a parent formal. We have no way to check
5700 for such conditions here. */
5701 if (AGGREGATE_TYPE_P (gnu_type))
5707 /* Return true if TYPE is a smaller form of ORIG_TYPE. */
5710 smaller_form_type_p (tree type, tree orig_type)
5714 /* We're not interested in variants here. */
5715 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig_type))
5718 /* Like a variant, a packable version keeps the original TYPE_NAME. */
5719 if (TYPE_NAME (type) != TYPE_NAME (orig_type))
5722 size = TYPE_SIZE (type);
5723 osize = TYPE_SIZE (orig_type);
5725 if (!(TREE_CODE (size) == INTEGER_CST && TREE_CODE (osize) == INTEGER_CST))
5728 return tree_int_cst_lt (size, osize) != 0;
5731 /* Return whether EXPR, which is the renamed object in an object renaming
5732 declaration, can be materialized as a reference (with a REFERENCE_TYPE).
5733 This should be synchronized with Exp_Dbug.Debug_Renaming_Declaration. */
5736 can_materialize_object_renaming_p (Node_Id expr)
5740 expr = Original_Node (expr);
5745 case N_Expanded_Name:
5746 if (!Present (Renamed_Object (Entity (expr))))
5748 expr = Renamed_Object (Entity (expr));
5751 case N_Selected_Component:
5753 if (Is_Packed (Underlying_Type (Etype (Prefix (expr)))))
5757 = Normalized_First_Bit (Entity (Selector_Name (expr)));
5758 if (!UI_Is_In_Int_Range (bitpos)
5759 || (bitpos != UI_No_Uint && bitpos != UI_From_Int (0)))
5762 expr = Prefix (expr);
5766 case N_Indexed_Component:
5769 const Entity_Id t = Underlying_Type (Etype (Prefix (expr)));
5771 if (Is_Array_Type (t) && Present (Packed_Array_Impl_Type (t)))
5774 expr = Prefix (expr);
5778 case N_Explicit_Dereference:
5779 expr = Prefix (expr);
5788 /* Perform final processing on global declarations. */
5790 static GTY (()) tree dummy_global;
5793 gnat_write_global_declarations (void)
5798 /* If we have declared types as used at the global level, insert them in
5799 the global hash table. We use a dummy variable for this purpose, but
5800 we need to build it unconditionally to avoid -fcompare-debug issues. */
5801 if (first_global_object_name)
5803 struct varpool_node *node;
5806 ASM_FORMAT_PRIVATE_NAME (label, first_global_object_name, 0);
5808 = build_decl (BUILTINS_LOCATION, VAR_DECL, get_identifier (label),
5810 DECL_HARD_REGISTER (dummy_global) = 1;
5811 TREE_STATIC (dummy_global) = 1;
5812 node = varpool_node::get_create (dummy_global);
5813 node->definition = 1;
5814 node->force_output = 1;
5816 if (types_used_by_cur_var_decl)
5817 while (!types_used_by_cur_var_decl->is_empty ())
5819 tree t = types_used_by_cur_var_decl->pop ();
5820 types_used_by_var_decl_insert (t, dummy_global);
5824 /* Output debug information for all global type declarations first. This
5825 ensures that global types whose compilation hasn't been finalized yet,
5826 for example pointers to Taft amendment types, have their compilation
5827 finalized in the right context. */
5828 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5829 if (TREE_CODE (iter) == TYPE_DECL && !DECL_IGNORED_P (iter))
5830 debug_hooks->type_decl (iter, false);
5832 /* Output imported functions. */
5833 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5834 if (TREE_CODE (iter) == FUNCTION_DECL
5835 && DECL_EXTERNAL (iter)
5836 && DECL_INITIAL (iter) == NULL
5837 && !DECL_IGNORED_P (iter)
5838 && DECL_FUNCTION_IS_DEF (iter))
5839 debug_hooks->early_global_decl (iter);
5841 /* Output global constants. */
5842 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5843 if (TREE_CODE (iter) == CONST_DECL && !DECL_IGNORED_P (iter))
5844 debug_hooks->early_global_decl (iter);
5846 /* Then output the global variables. We need to do that after the debug
5847 information for global types is emitted so that they are finalized. Skip
5848 external global variables, unless we need to emit debug info for them:
5849 this is useful for imported variables, for instance. */
5850 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5851 if (TREE_CODE (iter) == VAR_DECL
5852 && (!DECL_EXTERNAL (iter) || !DECL_IGNORED_P (iter)))
5853 rest_of_decl_compilation (iter, true, 0);
5855 /* Output the imported modules/declarations. In GNAT, these are only
5856 materializing subprogram. */
5857 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5858 if (TREE_CODE (iter) == IMPORTED_DECL && !DECL_IGNORED_P (iter))
5859 debug_hooks->imported_module_or_decl (iter, DECL_NAME (iter),
5860 DECL_CONTEXT (iter), false, false);
5863 /* ************************************************************************
5864 * * GCC builtins support *
5865 * ************************************************************************ */
5867 /* The general scheme is fairly simple:
5869 For each builtin function/type to be declared, gnat_install_builtins calls
5870 internal facilities which eventually get to gnat_pushdecl, which in turn
5871 tracks the so declared builtin function decls in the 'builtin_decls' global
5872 datastructure. When an Intrinsic subprogram declaration is processed, we
5873 search this global datastructure to retrieve the associated BUILT_IN DECL
5876 /* Search the chain of currently available builtin declarations for a node
5877 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
5878 found, if any, or NULL_TREE otherwise. */
5880 builtin_decl_for (tree name)
5885 FOR_EACH_VEC_SAFE_ELT (builtin_decls, i, decl)
5886 if (DECL_NAME (decl) == name)
5892 /* The code below eventually exposes gnat_install_builtins, which declares
5893 the builtin types and functions we might need, either internally or as
5894 user accessible facilities.
5896 ??? This is a first implementation shot, still in rough shape. It is
5897 heavily inspired from the "C" family implementation, with chunks copied
5898 verbatim from there.
5900 Two obvious improvement candidates are:
5901 o Use a more efficient name/decl mapping scheme
5902 o Devise a middle-end infrastructure to avoid having to copy
5903 pieces between front-ends. */
5905 /* ----------------------------------------------------------------------- *
5906 * BUILTIN ELEMENTARY TYPES *
5907 * ----------------------------------------------------------------------- */
5909 /* Standard data types to be used in builtin argument declarations. */
5913 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
5915 CTI_CONST_STRING_TYPE,
5920 static tree c_global_trees[CTI_MAX];
5922 #define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
5923 #define string_type_node c_global_trees[CTI_STRING_TYPE]
5924 #define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
5926 /* ??? In addition some attribute handlers, we currently don't support a
5927 (small) number of builtin-types, which in turns inhibits support for a
5928 number of builtin functions. */
5929 #define wint_type_node void_type_node
5930 #define intmax_type_node void_type_node
5931 #define uintmax_type_node void_type_node
5933 /* Used to help initialize the builtin-types.def table. When a type of
5934 the correct size doesn't exist, use error_mark_node instead of NULL.
5935 The later results in segfaults even when a decl using the type doesn't
5939 builtin_type_for_size (int size, bool unsignedp)
5941 tree type = gnat_type_for_size (size, unsignedp);
5942 return type ? type : error_mark_node;
5945 /* Build/push the elementary type decls that builtin functions/types
5949 install_builtin_elementary_types (void)
5951 signed_size_type_node = gnat_signed_type_for (size_type_node);
5952 pid_type_node = integer_type_node;
5954 string_type_node = build_pointer_type (char_type_node);
5955 const_string_type_node
5956 = build_pointer_type (build_qualified_type
5957 (char_type_node, TYPE_QUAL_CONST));
5960 /* ----------------------------------------------------------------------- *
5961 * BUILTIN FUNCTION TYPES *
5962 * ----------------------------------------------------------------------- */
5964 /* Now, builtin function types per se. */
5968 #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
5969 #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
5970 #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
5971 #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
5972 #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5973 #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5974 #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
5975 #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5977 #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5979 #define DEF_FUNCTION_TYPE_8(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5980 ARG6, ARG7, ARG8) NAME,
5981 #define DEF_FUNCTION_TYPE_9(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5982 ARG6, ARG7, ARG8, ARG9) NAME,
5983 #define DEF_FUNCTION_TYPE_10(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5984 ARG6, ARG7, ARG8, ARG9, ARG10) NAME,
5985 #define DEF_FUNCTION_TYPE_11(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5986 ARG6, ARG7, ARG8, ARG9, ARG10, ARG11) NAME,
5987 #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
5988 #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
5989 #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
5990 #define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5991 #define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5992 #define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5994 #define DEF_FUNCTION_TYPE_VAR_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5996 #define DEF_FUNCTION_TYPE_VAR_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5998 #define DEF_POINTER_TYPE(NAME, TYPE) NAME,
5999 #include "builtin-types.def"
6000 #include "ada-builtin-types.def"
6001 #undef DEF_PRIMITIVE_TYPE
6002 #undef DEF_FUNCTION_TYPE_0
6003 #undef DEF_FUNCTION_TYPE_1
6004 #undef DEF_FUNCTION_TYPE_2
6005 #undef DEF_FUNCTION_TYPE_3
6006 #undef DEF_FUNCTION_TYPE_4
6007 #undef DEF_FUNCTION_TYPE_5
6008 #undef DEF_FUNCTION_TYPE_6
6009 #undef DEF_FUNCTION_TYPE_7
6010 #undef DEF_FUNCTION_TYPE_8
6011 #undef DEF_FUNCTION_TYPE_9
6012 #undef DEF_FUNCTION_TYPE_10
6013 #undef DEF_FUNCTION_TYPE_11
6014 #undef DEF_FUNCTION_TYPE_VAR_0
6015 #undef DEF_FUNCTION_TYPE_VAR_1
6016 #undef DEF_FUNCTION_TYPE_VAR_2
6017 #undef DEF_FUNCTION_TYPE_VAR_3
6018 #undef DEF_FUNCTION_TYPE_VAR_4
6019 #undef DEF_FUNCTION_TYPE_VAR_5
6020 #undef DEF_FUNCTION_TYPE_VAR_6
6021 #undef DEF_FUNCTION_TYPE_VAR_7
6022 #undef DEF_POINTER_TYPE
6026 typedef enum c_builtin_type builtin_type;
6028 /* A temporary array used in communication with def_fn_type. */
6029 static GTY(()) tree builtin_types[(int) BT_LAST + 1];
6031 /* A helper function for install_builtin_types. Build function type
6032 for DEF with return type RET and N arguments. If VAR is true, then the
6033 function should be variadic after those N arguments.
6035 Takes special care not to ICE if any of the types involved are
6036 error_mark_node, which indicates that said type is not in fact available
6037 (see builtin_type_for_size). In which case the function type as a whole
6038 should be error_mark_node. */
6041 def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
6044 tree *args = XALLOCAVEC (tree, n);
6049 for (i = 0; i < n; ++i)
6051 builtin_type a = (builtin_type) va_arg (list, int);
6052 t = builtin_types[a];
6053 if (t == error_mark_node)
6058 t = builtin_types[ret];
6059 if (t == error_mark_node)
6062 t = build_varargs_function_type_array (t, n, args);
6064 t = build_function_type_array (t, n, args);
6067 builtin_types[def] = t;
6071 /* Build the builtin function types and install them in the builtin_types
6072 array for later use in builtin function decls. */
6075 install_builtin_function_types (void)
6077 tree va_list_ref_type_node;
6078 tree va_list_arg_type_node;
6080 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
6082 va_list_arg_type_node = va_list_ref_type_node =
6083 build_pointer_type (TREE_TYPE (va_list_type_node));
6087 va_list_arg_type_node = va_list_type_node;
6088 va_list_ref_type_node = build_reference_type (va_list_type_node);
6091 #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
6092 builtin_types[ENUM] = VALUE;
6093 #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
6094 def_fn_type (ENUM, RETURN, 0, 0);
6095 #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
6096 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
6097 #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
6098 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
6099 #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
6100 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
6101 #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
6102 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
6103 #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
6104 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
6105 #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6107 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
6108 #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6110 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
6111 #define DEF_FUNCTION_TYPE_8(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6113 def_fn_type (ENUM, RETURN, 0, 8, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
6115 #define DEF_FUNCTION_TYPE_9(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6116 ARG6, ARG7, ARG8, ARG9) \
6117 def_fn_type (ENUM, RETURN, 0, 9, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
6119 #define DEF_FUNCTION_TYPE_10(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5,\
6120 ARG6, ARG7, ARG8, ARG9, ARG10) \
6121 def_fn_type (ENUM, RETURN, 0, 10, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
6122 ARG7, ARG8, ARG9, ARG10);
6123 #define DEF_FUNCTION_TYPE_11(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5,\
6124 ARG6, ARG7, ARG8, ARG9, ARG10, ARG11) \
6125 def_fn_type (ENUM, RETURN, 0, 11, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
6126 ARG7, ARG8, ARG9, ARG10, ARG11);
6127 #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
6128 def_fn_type (ENUM, RETURN, 1, 0);
6129 #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
6130 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
6131 #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
6132 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
6133 #define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
6134 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
6135 #define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
6136 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
6137 #define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
6138 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
6139 #define DEF_FUNCTION_TYPE_VAR_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6141 def_fn_type (ENUM, RETURN, 1, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
6142 #define DEF_FUNCTION_TYPE_VAR_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6144 def_fn_type (ENUM, RETURN, 1, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
6145 #define DEF_POINTER_TYPE(ENUM, TYPE) \
6146 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
6148 #include "builtin-types.def"
6149 #include "ada-builtin-types.def"
6151 #undef DEF_PRIMITIVE_TYPE
6152 #undef DEF_FUNCTION_TYPE_0
6153 #undef DEF_FUNCTION_TYPE_1
6154 #undef DEF_FUNCTION_TYPE_2
6155 #undef DEF_FUNCTION_TYPE_3
6156 #undef DEF_FUNCTION_TYPE_4
6157 #undef DEF_FUNCTION_TYPE_5
6158 #undef DEF_FUNCTION_TYPE_6
6159 #undef DEF_FUNCTION_TYPE_7
6160 #undef DEF_FUNCTION_TYPE_8
6161 #undef DEF_FUNCTION_TYPE_9
6162 #undef DEF_FUNCTION_TYPE_10
6163 #undef DEF_FUNCTION_TYPE_11
6164 #undef DEF_FUNCTION_TYPE_VAR_0
6165 #undef DEF_FUNCTION_TYPE_VAR_1
6166 #undef DEF_FUNCTION_TYPE_VAR_2
6167 #undef DEF_FUNCTION_TYPE_VAR_3
6168 #undef DEF_FUNCTION_TYPE_VAR_4
6169 #undef DEF_FUNCTION_TYPE_VAR_5
6170 #undef DEF_FUNCTION_TYPE_VAR_6
6171 #undef DEF_FUNCTION_TYPE_VAR_7
6172 #undef DEF_POINTER_TYPE
6173 builtin_types[(int) BT_LAST] = NULL_TREE;
6176 /* ----------------------------------------------------------------------- *
6177 * BUILTIN ATTRIBUTES *
6178 * ----------------------------------------------------------------------- */
6180 enum built_in_attribute
6182 #define DEF_ATTR_NULL_TREE(ENUM) ENUM,
6183 #define DEF_ATTR_INT(ENUM, VALUE) ENUM,
6184 #define DEF_ATTR_STRING(ENUM, VALUE) ENUM,
6185 #define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
6186 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
6187 #include "builtin-attrs.def"
6188 #undef DEF_ATTR_NULL_TREE
6190 #undef DEF_ATTR_STRING
6191 #undef DEF_ATTR_IDENT
6192 #undef DEF_ATTR_TREE_LIST
6196 static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
6199 install_builtin_attributes (void)
6201 /* Fill in the built_in_attributes array. */
6202 #define DEF_ATTR_NULL_TREE(ENUM) \
6203 built_in_attributes[(int) ENUM] = NULL_TREE;
6204 #define DEF_ATTR_INT(ENUM, VALUE) \
6205 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
6206 #define DEF_ATTR_STRING(ENUM, VALUE) \
6207 built_in_attributes[(int) ENUM] = build_string (strlen (VALUE), VALUE);
6208 #define DEF_ATTR_IDENT(ENUM, STRING) \
6209 built_in_attributes[(int) ENUM] = get_identifier (STRING);
6210 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
6211 built_in_attributes[(int) ENUM] \
6212 = tree_cons (built_in_attributes[(int) PURPOSE], \
6213 built_in_attributes[(int) VALUE], \
6214 built_in_attributes[(int) CHAIN]);
6215 #include "builtin-attrs.def"
6216 #undef DEF_ATTR_NULL_TREE
6218 #undef DEF_ATTR_STRING
6219 #undef DEF_ATTR_IDENT
6220 #undef DEF_ATTR_TREE_LIST
6223 /* Handle a "const" attribute; arguments as in
6224 struct attribute_spec.handler. */
6227 handle_const_attribute (tree *node, tree ARG_UNUSED (name),
6228 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6231 if (TREE_CODE (*node) == FUNCTION_DECL)
6232 TREE_READONLY (*node) = 1;
6234 *no_add_attrs = true;
6239 /* Handle a "nothrow" attribute; arguments as in
6240 struct attribute_spec.handler. */
6243 handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
6244 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6247 if (TREE_CODE (*node) == FUNCTION_DECL)
6248 TREE_NOTHROW (*node) = 1;
6250 *no_add_attrs = true;
6255 /* Handle a "pure" attribute; arguments as in
6256 struct attribute_spec.handler. */
6259 handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6260 int ARG_UNUSED (flags), bool *no_add_attrs)
6262 if (TREE_CODE (*node) == FUNCTION_DECL)
6263 DECL_PURE_P (*node) = 1;
6264 /* TODO: support types. */
6267 warning (OPT_Wattributes, "%qs attribute ignored",
6268 IDENTIFIER_POINTER (name));
6269 *no_add_attrs = true;
6275 /* Handle a "no vops" attribute; arguments as in
6276 struct attribute_spec.handler. */
6279 handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
6280 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6281 bool *ARG_UNUSED (no_add_attrs))
6283 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
6284 DECL_IS_NOVOPS (*node) = 1;
6288 /* Helper for nonnull attribute handling; fetch the operand number
6289 from the attribute argument list. */
6292 get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
6294 /* Verify the arg number is a constant. */
6295 if (!tree_fits_uhwi_p (arg_num_expr))
6298 *valp = TREE_INT_CST_LOW (arg_num_expr);
6302 /* Handle the "nonnull" attribute. */
6304 handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
6305 tree args, int ARG_UNUSED (flags),
6309 unsigned HOST_WIDE_INT attr_arg_num;
6311 /* If no arguments are specified, all pointer arguments should be
6312 non-null. Verify a full prototype is given so that the arguments
6313 will have the correct types when we actually check them later.
6314 Avoid diagnosing type-generic built-ins since those have no
6318 if (!prototype_p (type)
6319 && (!TYPE_ATTRIBUTES (type)
6320 || !lookup_attribute ("type generic", TYPE_ATTRIBUTES (type))))
6322 error ("%qs attribute without arguments on a non-prototype",
6324 *no_add_attrs = true;
6329 /* Argument list specified. Verify that each argument number references
6330 a pointer argument. */
6331 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
6333 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
6335 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
6337 error ("%qs argument has invalid operand number (argument %lu)",
6338 "nonnull", (unsigned long) attr_arg_num);
6339 *no_add_attrs = true;
6343 if (prototype_p (type))
6345 function_args_iterator iter;
6348 function_args_iter_init (&iter, type);
6349 for (ck_num = 1; ; ck_num++, function_args_iter_next (&iter))
6351 argument = function_args_iter_cond (&iter);
6352 if (!argument || ck_num == arg_num)
6357 || TREE_CODE (argument) == VOID_TYPE)
6359 error ("%qs argument with out-of-range operand number "
6360 "(argument %lu, operand %lu)", "nonnull",
6361 (unsigned long) attr_arg_num, (unsigned long) arg_num);
6362 *no_add_attrs = true;
6366 if (TREE_CODE (argument) != POINTER_TYPE)
6368 error ("%qs argument references non-pointer operand "
6369 "(argument %lu, operand %lu)", "nonnull",
6370 (unsigned long) attr_arg_num, (unsigned long) arg_num);
6371 *no_add_attrs = true;
6380 /* Handle a "sentinel" attribute. */
6383 handle_sentinel_attribute (tree *node, tree name, tree args,
6384 int ARG_UNUSED (flags), bool *no_add_attrs)
6386 if (!prototype_p (*node))
6388 warning (OPT_Wattributes,
6389 "%qs attribute requires prototypes with named arguments",
6390 IDENTIFIER_POINTER (name));
6391 *no_add_attrs = true;
6395 if (!stdarg_p (*node))
6397 warning (OPT_Wattributes,
6398 "%qs attribute only applies to variadic functions",
6399 IDENTIFIER_POINTER (name));
6400 *no_add_attrs = true;
6406 tree position = TREE_VALUE (args);
6408 if (TREE_CODE (position) != INTEGER_CST)
6410 warning (0, "requested position is not an integer constant");
6411 *no_add_attrs = true;
6415 if (tree_int_cst_lt (position, integer_zero_node))
6417 warning (0, "requested position is less than zero");
6418 *no_add_attrs = true;
6426 /* Handle a "noreturn" attribute; arguments as in
6427 struct attribute_spec.handler. */
6430 handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6431 int ARG_UNUSED (flags), bool *no_add_attrs)
6433 tree type = TREE_TYPE (*node);
6435 /* See FIXME comment in c_common_attribute_table. */
6436 if (TREE_CODE (*node) == FUNCTION_DECL)
6437 TREE_THIS_VOLATILE (*node) = 1;
6438 else if (TREE_CODE (type) == POINTER_TYPE
6439 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
6441 = build_pointer_type
6442 (change_qualified_type (TREE_TYPE (type), TYPE_QUAL_VOLATILE));
6445 warning (OPT_Wattributes, "%qs attribute ignored",
6446 IDENTIFIER_POINTER (name));
6447 *no_add_attrs = true;
6453 /* Handle a "stack_protect" attribute; arguments as in
6454 struct attribute_spec.handler. */
6457 handle_stack_protect_attribute (tree *node, tree name, tree, int,
6460 if (TREE_CODE (*node) != FUNCTION_DECL)
6462 warning (OPT_Wattributes, "%qE attribute ignored", name);
6463 *no_add_attrs = true;
6469 /* Handle a "noinline" attribute; arguments as in
6470 struct attribute_spec.handler. */
6473 handle_noinline_attribute (tree *node, tree name,
6474 tree ARG_UNUSED (args),
6475 int ARG_UNUSED (flags), bool *no_add_attrs)
6477 if (TREE_CODE (*node) == FUNCTION_DECL)
6479 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (*node)))
6481 warning (OPT_Wattributes, "%qE attribute ignored due to conflict "
6482 "with attribute %qs", name, "always_inline");
6483 *no_add_attrs = true;
6486 DECL_UNINLINABLE (*node) = 1;
6490 warning (OPT_Wattributes, "%qE attribute ignored", name);
6491 *no_add_attrs = true;
6497 /* Handle a "noclone" attribute; arguments as in
6498 struct attribute_spec.handler. */
6501 handle_noclone_attribute (tree *node, tree name,
6502 tree ARG_UNUSED (args),
6503 int ARG_UNUSED (flags), bool *no_add_attrs)
6505 if (TREE_CODE (*node) != FUNCTION_DECL)
6507 warning (OPT_Wattributes, "%qE attribute ignored", name);
6508 *no_add_attrs = true;
6514 /* Handle a "no_icf" attribute; arguments as in
6515 struct attribute_spec.handler. */
6518 handle_noicf_attribute (tree *node, tree name,
6519 tree ARG_UNUSED (args),
6520 int ARG_UNUSED (flags), bool *no_add_attrs)
6522 if (TREE_CODE (*node) != FUNCTION_DECL)
6524 warning (OPT_Wattributes, "%qE attribute ignored", name);
6525 *no_add_attrs = true;
6531 /* Handle a "noipa" attribute; arguments as in
6532 struct attribute_spec.handler. */
6535 handle_noipa_attribute (tree *node, tree name, tree, int, bool *no_add_attrs)
6537 if (TREE_CODE (*node) != FUNCTION_DECL)
6539 warning (OPT_Wattributes, "%qE attribute ignored", name);
6540 *no_add_attrs = true;
6546 /* Handle a "leaf" attribute; arguments as in
6547 struct attribute_spec.handler. */
6550 handle_leaf_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6551 int ARG_UNUSED (flags), bool *no_add_attrs)
6553 if (TREE_CODE (*node) != FUNCTION_DECL)
6555 warning (OPT_Wattributes, "%qE attribute ignored", name);
6556 *no_add_attrs = true;
6558 if (!TREE_PUBLIC (*node))
6560 warning (OPT_Wattributes, "%qE attribute has no effect", name);
6561 *no_add_attrs = true;
6567 /* Handle a "always_inline" attribute; arguments as in
6568 struct attribute_spec.handler. */
6571 handle_always_inline_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6572 int ARG_UNUSED (flags), bool *no_add_attrs)
6574 if (TREE_CODE (*node) == FUNCTION_DECL)
6576 /* Set the attribute and mark it for disregarding inline limits. */
6577 DECL_DISREGARD_INLINE_LIMITS (*node) = 1;
6581 warning (OPT_Wattributes, "%qE attribute ignored", name);
6582 *no_add_attrs = true;
6588 /* Handle a "malloc" attribute; arguments as in
6589 struct attribute_spec.handler. */
6592 handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6593 int ARG_UNUSED (flags), bool *no_add_attrs)
6595 if (TREE_CODE (*node) == FUNCTION_DECL
6596 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
6597 DECL_IS_MALLOC (*node) = 1;
6600 warning (OPT_Wattributes, "%qs attribute ignored",
6601 IDENTIFIER_POINTER (name));
6602 *no_add_attrs = true;
6608 /* Fake handler for attributes we don't properly support. */
6611 fake_attribute_handler (tree * ARG_UNUSED (node),
6612 tree ARG_UNUSED (name),
6613 tree ARG_UNUSED (args),
6614 int ARG_UNUSED (flags),
6615 bool * ARG_UNUSED (no_add_attrs))
6620 /* Handle a "type_generic" attribute. */
6623 handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
6624 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6625 bool * ARG_UNUSED (no_add_attrs))
6627 /* Ensure we have a function type. */
6628 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
6630 /* Ensure we have a variadic function. */
6631 gcc_assert (!prototype_p (*node) || stdarg_p (*node));
6636 /* Handle a "flatten" attribute; arguments as in
6637 struct attribute_spec.handler. */
6640 handle_flatten_attribute (tree *node, tree name,
6641 tree args ATTRIBUTE_UNUSED,
6642 int flags ATTRIBUTE_UNUSED, bool *no_add_attrs)
6644 if (TREE_CODE (*node) == FUNCTION_DECL)
6645 /* Do nothing else, just set the attribute. We'll get at
6646 it later with lookup_attribute. */
6650 warning (OPT_Wattributes, "%qE attribute ignored", name);
6651 *no_add_attrs = true;
6657 /* Handle a "used" attribute; arguments as in
6658 struct attribute_spec.handler. */
6661 handle_used_attribute (tree *pnode, tree name, tree ARG_UNUSED (args),
6662 int ARG_UNUSED (flags), bool *no_add_attrs)
6666 if (TREE_CODE (node) == FUNCTION_DECL
6667 || (VAR_P (node) && TREE_STATIC (node))
6668 || (TREE_CODE (node) == TYPE_DECL))
6670 TREE_USED (node) = 1;
6671 DECL_PRESERVE_P (node) = 1;
6673 DECL_READ_P (node) = 1;
6677 warning (OPT_Wattributes, "%qE attribute ignored", name);
6678 *no_add_attrs = true;
6684 /* Handle a "cold" and attribute; arguments as in
6685 struct attribute_spec.handler. */
6688 handle_cold_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6689 int ARG_UNUSED (flags), bool *no_add_attrs)
6691 if (TREE_CODE (*node) == FUNCTION_DECL
6692 || TREE_CODE (*node) == LABEL_DECL)
6694 /* Attribute cold processing is done later with lookup_attribute. */
6698 warning (OPT_Wattributes, "%qE attribute ignored", name);
6699 *no_add_attrs = true;
6705 /* Handle a "hot" and attribute; arguments as in
6706 struct attribute_spec.handler. */
6709 handle_hot_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6710 int ARG_UNUSED (flags), bool *no_add_attrs)
6712 if (TREE_CODE (*node) == FUNCTION_DECL
6713 || TREE_CODE (*node) == LABEL_DECL)
6715 /* Attribute hot processing is done later with lookup_attribute. */
6719 warning (OPT_Wattributes, "%qE attribute ignored", name);
6720 *no_add_attrs = true;
6726 /* Handle a "target" attribute. */
6729 handle_target_attribute (tree *node, tree name, tree args, int flags,
6732 /* Ensure we have a function type. */
6733 if (TREE_CODE (*node) != FUNCTION_DECL)
6735 warning (OPT_Wattributes, "%qE attribute ignored", name);
6736 *no_add_attrs = true;
6738 else if (lookup_attribute ("target_clones", DECL_ATTRIBUTES (*node)))
6740 warning (OPT_Wattributes, "%qE attribute ignored due to conflict "
6741 "with %qs attribute", name, "target_clones");
6742 *no_add_attrs = true;
6744 else if (!targetm.target_option.valid_attribute_p (*node, name, args, flags))
6745 *no_add_attrs = true;
6747 /* Check that there's no empty string in values of the attribute. */
6748 for (tree t = args; t != NULL_TREE; t = TREE_CHAIN (t))
6750 tree value = TREE_VALUE (t);
6751 if (TREE_CODE (value) == STRING_CST
6752 && TREE_STRING_LENGTH (value) == 1
6753 && TREE_STRING_POINTER (value)[0] == '\0')
6755 warning (OPT_Wattributes, "empty string in attribute %<target%>");
6756 *no_add_attrs = true;
6763 /* Handle a "target_clones" attribute. */
6766 handle_target_clones_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6767 int ARG_UNUSED (flags), bool *no_add_attrs)
6769 /* Ensure we have a function type. */
6770 if (TREE_CODE (*node) == FUNCTION_DECL)
6772 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (*node)))
6774 warning (OPT_Wattributes, "%qE attribute ignored due to conflict "
6775 "with %qs attribute", name, "always_inline");
6776 *no_add_attrs = true;
6778 else if (lookup_attribute ("target", DECL_ATTRIBUTES (*node)))
6780 warning (OPT_Wattributes, "%qE attribute ignored due to conflict "
6781 "with %qs attribute", name, "target");
6782 *no_add_attrs = true;
6785 /* Do not inline functions with multiple clone targets. */
6786 DECL_UNINLINABLE (*node) = 1;
6790 warning (OPT_Wattributes, "%qE attribute ignored", name);
6791 *no_add_attrs = true;
6796 /* Handle a "vector_size" attribute; arguments as in
6797 struct attribute_spec.handler. */
6800 handle_vector_size_attribute (tree *node, tree name, tree args,
6801 int ARG_UNUSED (flags), bool *no_add_attrs)
6806 *no_add_attrs = true;
6808 /* We need to provide for vector pointers, vector arrays, and
6809 functions returning vectors. For example:
6811 __attribute__((vector_size(16))) short *foo;
6813 In this case, the mode is SI, but the type being modified is
6814 HI, so we need to look further. */
6815 while (POINTER_TYPE_P (type)
6816 || TREE_CODE (type) == FUNCTION_TYPE
6817 || TREE_CODE (type) == ARRAY_TYPE)
6818 type = TREE_TYPE (type);
6820 vector_type = build_vector_type_for_size (type, TREE_VALUE (args), name);
6824 /* Build back pointers if needed. */
6825 *node = reconstruct_complex_type (*node, vector_type);
6830 /* Handle a "vector_type" attribute; arguments as in
6831 struct attribute_spec.handler. */
6834 handle_vector_type_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6835 int ARG_UNUSED (flags), bool *no_add_attrs)
6840 *no_add_attrs = true;
6842 if (TREE_CODE (type) != ARRAY_TYPE)
6844 error ("attribute %qs applies to array types only",
6845 IDENTIFIER_POINTER (name));
6849 vector_type = build_vector_type_for_array (type, name);
6853 TYPE_REPRESENTATIVE_ARRAY (vector_type) = type;
6854 *node = vector_type;
6859 /* ----------------------------------------------------------------------- *
6860 * BUILTIN FUNCTIONS *
6861 * ----------------------------------------------------------------------- */
6863 /* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
6864 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
6865 if nonansi_p and flag_no_nonansi_builtin. */
6868 def_builtin_1 (enum built_in_function fncode,
6870 enum built_in_class fnclass,
6871 tree fntype, tree libtype,
6872 bool both_p, bool fallback_p,
6873 bool nonansi_p ATTRIBUTE_UNUSED,
6874 tree fnattrs, bool implicit_p)
6877 const char *libname;
6879 /* Preserve an already installed decl. It most likely was setup in advance
6880 (e.g. as part of the internal builtins) for specific reasons. */
6881 if (builtin_decl_explicit (fncode))
6884 if (fntype == error_mark_node)
6887 gcc_assert ((!both_p && !fallback_p)
6888 || !strncmp (name, "__builtin_",
6889 strlen ("__builtin_")));
6891 libname = name + strlen ("__builtin_");
6892 decl = add_builtin_function (name, fntype, fncode, fnclass,
6893 (fallback_p ? libname : NULL),
6896 /* ??? This is normally further controlled by command-line options
6897 like -fno-builtin, but we don't have them for Ada. */
6898 add_builtin_function (libname, libtype, fncode, fnclass,
6901 set_builtin_decl (fncode, decl, implicit_p);
6904 static int flag_isoc94 = 0;
6905 static int flag_isoc99 = 0;
6906 static int flag_isoc11 = 0;
6907 static int flag_isoc2x = 0;
6909 /* Install what the common builtins.def offers plus our local additions.
6911 Note that ada-builtins.def is included first so that locally redefined
6912 built-in functions take precedence over the commonly defined ones. */
6915 install_builtin_functions (void)
6917 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
6918 NONANSI_P, ATTRS, IMPLICIT, COND) \
6920 def_builtin_1 (ENUM, NAME, CLASS, \
6921 builtin_types[(int) TYPE], \
6922 builtin_types[(int) LIBTYPE], \
6923 BOTH_P, FALLBACK_P, NONANSI_P, \
6924 built_in_attributes[(int) ATTRS], IMPLICIT);
6925 #define DEF_ADA_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
6926 DEF_BUILTIN (ENUM, "__builtin_" NAME, BUILT_IN_FRONTEND, TYPE, BT_LAST, \
6927 false, false, false, ATTRS, true, true)
6928 #include "ada-builtins.def"
6929 #include "builtins.def"
6932 /* ----------------------------------------------------------------------- *
6933 * BUILTIN FUNCTIONS *
6934 * ----------------------------------------------------------------------- */
6936 /* Install the builtin functions we might need. */
6939 gnat_install_builtins (void)
6941 install_builtin_elementary_types ();
6942 install_builtin_function_types ();
6943 install_builtin_attributes ();
6945 /* Install builtins used by generic middle-end pieces first. Some of these
6946 know about internal specificities and control attributes accordingly, for
6947 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
6948 the generic definition from builtins.def. */
6949 build_common_builtin_nodes ();
6951 /* Now, install the target specific builtins, such as the AltiVec family on
6952 ppc, and the common set as exposed by builtins.def. */
6953 targetm.init_builtins ();
6954 install_builtin_functions ();
6957 #include "gt-ada-utils.h"
6958 #include "gtype-ada.h"