1 /****************************************************************************
3 * GNAT COMPILER COMPONENTS *
7 * C Implementation File *
9 * Copyright (C) 1992-2016, Free Software Foundation, Inc. *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
24 ****************************************************************************/
28 #include "coretypes.h"
32 #include "stringpool.h"
34 #include "diagnostic.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
44 #include "common/common-target.h"
45 #include "langhooks.h"
46 #include "tree-dump.h"
47 #include "tree-inline.h"
60 /* If nonzero, pretend we are allocating at global level. */
63 /* The default alignment of "double" floating-point types, i.e. floating
64 point types whose size is equal to 64 bits, or 0 if this alignment is
65 not specifically capped. */
66 int double_float_alignment;
68 /* The default alignment of "double" or larger scalar types, i.e. scalar
69 types whose size is greater or equal to 64 bits, or 0 if this alignment
70 is not specifically capped. */
71 int double_scalar_alignment;
73 /* True if floating-point arithmetics may use wider intermediate results. */
74 bool fp_arith_may_widen = true;
76 /* Tree nodes for the various types and decls we create. */
77 tree gnat_std_decls[(int) ADT_LAST];
79 /* Functions to call for each of the possible raise reasons. */
80 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
82 /* Likewise, but with extra info for each of the possible raise reasons. */
83 tree gnat_raise_decls_ext[(int) LAST_REASON_CODE + 1];
85 /* Forward declarations for handlers of attributes. */
86 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
87 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
88 static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
89 static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
90 static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
91 static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
92 static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
93 static tree handle_noinline_attribute (tree *, tree, tree, int, bool *);
94 static tree handle_noclone_attribute (tree *, tree, tree, int, bool *);
95 static tree handle_leaf_attribute (tree *, tree, tree, int, bool *);
96 static tree handle_always_inline_attribute (tree *, tree, tree, int, bool *);
97 static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
98 static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
99 static tree handle_vector_size_attribute (tree *, tree, tree, int, bool *);
100 static tree handle_vector_type_attribute (tree *, tree, tree, int, bool *);
102 /* Fake handler for attributes we don't properly support, typically because
103 they'd require dragging a lot of the common-c front-end circuitry. */
104 static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
106 /* Table of machine-independent internal attributes for Ada. We support
107 this minimal set of attributes to accommodate the needs of builtins. */
108 const struct attribute_spec gnat_internal_attribute_table[] =
110 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
111 affects_type_identity } */
112 { "const", 0, 0, true, false, false, handle_const_attribute,
114 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute,
116 { "pure", 0, 0, true, false, false, handle_pure_attribute,
118 { "no vops", 0, 0, true, false, false, handle_novops_attribute,
120 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute,
122 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute,
124 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute,
126 { "noinline", 0, 0, true, false, false, handle_noinline_attribute,
128 { "noclone", 0, 0, true, false, false, handle_noclone_attribute,
130 { "leaf", 0, 0, true, false, false, handle_leaf_attribute,
132 { "always_inline",0, 0, true, false, false, handle_always_inline_attribute,
134 { "malloc", 0, 0, true, false, false, handle_malloc_attribute,
136 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute,
139 { "vector_size", 1, 1, false, true, false, handle_vector_size_attribute,
141 { "vector_type", 0, 0, false, true, false, handle_vector_type_attribute,
143 { "may_alias", 0, 0, false, true, false, NULL, false },
145 /* ??? format and format_arg are heavy and not supported, which actually
146 prevents support for stdio builtins, which we however declare as part
147 of the common builtins.def contents. */
148 { "format", 3, 3, false, true, true, fake_attribute_handler, false },
149 { "format_arg", 1, 1, false, true, true, fake_attribute_handler, false },
151 { NULL, 0, 0, false, false, false, NULL, false }
154 /* Associates a GNAT tree node to a GCC tree node. It is used in
155 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
156 of `save_gnu_tree' for more info. */
157 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
159 #define GET_GNU_TREE(GNAT_ENTITY) \
160 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
162 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
163 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
165 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
166 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
168 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
169 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
171 #define GET_DUMMY_NODE(GNAT_ENTITY) \
172 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
174 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
175 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
177 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
178 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
180 /* This variable keeps a table for types for each precision so that we only
181 allocate each of them once. Signed and unsigned types are kept separate.
183 Note that these types are only used when fold-const requests something
184 special. Perhaps we should NOT share these types; we'll see how it
186 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
188 /* Likewise for float types, but record these by mode. */
189 static GTY(()) tree float_types[NUM_MACHINE_MODES];
191 /* For each binding contour we allocate a binding_level structure to indicate
192 the binding depth. */
194 struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
195 /* The binding level containing this one (the enclosing binding level). */
196 struct gnat_binding_level *chain;
197 /* The BLOCK node for this level. */
199 /* If nonzero, the setjmp buffer that needs to be updated for any
200 variable-sized definition within this context. */
204 /* The binding level currently in effect. */
205 static GTY(()) struct gnat_binding_level *current_binding_level;
207 /* A chain of gnat_binding_level structures awaiting reuse. */
208 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
210 /* The context to be used for global declarations. */
211 static GTY(()) tree global_context;
213 /* An array of global declarations. */
214 static GTY(()) vec<tree, va_gc> *global_decls;
216 /* An array of builtin function declarations. */
217 static GTY(()) vec<tree, va_gc> *builtin_decls;
219 /* A chain of unused BLOCK nodes. */
220 static GTY((deletable)) tree free_block_chain;
222 /* A hash table of padded types. It is modelled on the generic type
223 hash table in tree.c, which must thus be used as a reference. */
225 struct GTY((for_user)) pad_type_hash {
230 struct pad_type_hasher : ggc_cache_ptr_hash<pad_type_hash>
232 static inline hashval_t hash (pad_type_hash *t) { return t->hash; }
233 static bool equal (pad_type_hash *a, pad_type_hash *b);
234 static int keep_cache_entry (pad_type_hash *&);
238 hash_table<pad_type_hasher> *pad_type_hash_table;
240 static tree merge_sizes (tree, tree, tree, bool, bool);
241 static tree compute_related_constant (tree, tree);
242 static tree split_plus (tree, tree *);
243 static tree float_type_for_precision (int, machine_mode);
244 static tree convert_to_fat_pointer (tree, tree);
245 static unsigned int scale_by_factor_of (tree, unsigned int);
246 static bool potential_alignment_gap (tree, tree, tree);
248 /* A linked list used as a queue to defer the initialization of the
249 DECL_CONTEXT attribute of ..._DECL nodes and of the TYPE_CONTEXT attribute
250 of ..._TYPE nodes. */
251 struct deferred_decl_context_node
253 tree decl; /* The ..._DECL node to work on. */
254 Entity_Id gnat_scope; /* The corresponding entity's Scope attribute. */
255 int force_global; /* force_global value when pushing DECL. */
256 vec<tree, va_heap, vl_ptr> types; /* A list of ..._TYPE nodes to propagate the
258 struct deferred_decl_context_node *next; /* The next queue item. */
261 static struct deferred_decl_context_node *deferred_decl_context_queue = NULL;
263 /* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
264 feed it with the elaboration of GNAT_SCOPE. */
265 static struct deferred_decl_context_node *
266 add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global);
268 /* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
269 feed it with the DECL_CONTEXT computed as part of N as soon as it is
271 static void add_deferred_type_context (struct deferred_decl_context_node *n,
274 /* Initialize data structures of the utils.c module. */
277 init_gnat_utils (void)
279 /* Initialize the association of GNAT nodes to GCC trees. */
280 associate_gnat_to_gnu = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
282 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
283 dummy_node_table = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
285 /* Initialize the hash table of padded types. */
286 pad_type_hash_table = hash_table<pad_type_hasher>::create_ggc (512);
289 /* Destroy data structures of the utils.c module. */
292 destroy_gnat_utils (void)
294 /* Destroy the association of GNAT nodes to GCC trees. */
295 ggc_free (associate_gnat_to_gnu);
296 associate_gnat_to_gnu = NULL;
298 /* Destroy the association of GNAT nodes to GCC trees as dummies. */
299 ggc_free (dummy_node_table);
300 dummy_node_table = NULL;
302 /* Destroy the hash table of padded types. */
303 pad_type_hash_table->empty ();
304 pad_type_hash_table = NULL;
307 /* GNAT_ENTITY is a GNAT tree node for an entity. Associate GNU_DECL, a GCC
308 tree node, with GNAT_ENTITY. If GNU_DECL is not a ..._DECL node, abort.
309 If NO_CHECK is true, the latter check is suppressed.
311 If GNU_DECL is zero, reset a previous association. */
314 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
316 /* Check that GNAT_ENTITY is not already defined and that it is being set
317 to something which is a decl. If that is not the case, this usually
318 means GNAT_ENTITY is defined twice, but occasionally is due to some
320 gcc_assert (!(gnu_decl
321 && (PRESENT_GNU_TREE (gnat_entity)
322 || (!no_check && !DECL_P (gnu_decl)))));
324 SET_GNU_TREE (gnat_entity, gnu_decl);
327 /* GNAT_ENTITY is a GNAT tree node for an entity. Return the GCC tree node
328 that was associated with it. If there is no such tree node, abort.
330 In some cases, such as delayed elaboration or expressions that need to
331 be elaborated only once, GNAT_ENTITY is really not an entity. */
334 get_gnu_tree (Entity_Id gnat_entity)
336 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
337 return GET_GNU_TREE (gnat_entity);
340 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
343 present_gnu_tree (Entity_Id gnat_entity)
345 return PRESENT_GNU_TREE (gnat_entity);
348 /* Make a dummy type corresponding to GNAT_TYPE. */
351 make_dummy_type (Entity_Id gnat_type)
353 Entity_Id gnat_equiv = Gigi_Equivalent_Type (Underlying_Type (gnat_type));
356 /* If there was no equivalent type (can only happen when just annotating
357 types) or underlying type, go back to the original type. */
359 gnat_equiv = gnat_type;
361 /* If it there already a dummy type, use that one. Else make one. */
362 if (PRESENT_DUMMY_NODE (gnat_equiv))
363 return GET_DUMMY_NODE (gnat_equiv);
365 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
367 gnu_type = make_node (Is_Record_Type (gnat_equiv)
368 ? tree_code_for_record_type (gnat_equiv)
370 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
371 TYPE_DUMMY_P (gnu_type) = 1;
372 TYPE_STUB_DECL (gnu_type)
373 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
374 if (Is_By_Reference_Type (gnat_equiv))
375 TYPE_BY_REFERENCE_P (gnu_type) = 1;
377 SET_DUMMY_NODE (gnat_equiv, gnu_type);
382 /* Return the dummy type that was made for GNAT_TYPE, if any. */
385 get_dummy_type (Entity_Id gnat_type)
387 return GET_DUMMY_NODE (gnat_type);
390 /* Build dummy fat and thin pointer types whose designated type is specified
391 by GNAT_DESIG_TYPE/GNU_DESIG_TYPE and attach them to the latter. */
394 build_dummy_unc_pointer_types (Entity_Id gnat_desig_type, tree gnu_desig_type)
396 tree gnu_template_type, gnu_ptr_template, gnu_array_type, gnu_ptr_array;
397 tree gnu_fat_type, fields, gnu_object_type;
399 gnu_template_type = make_node (RECORD_TYPE);
400 TYPE_NAME (gnu_template_type) = create_concat_name (gnat_desig_type, "XUB");
401 TYPE_DUMMY_P (gnu_template_type) = 1;
402 gnu_ptr_template = build_pointer_type (gnu_template_type);
404 gnu_array_type = make_node (ENUMERAL_TYPE);
405 TYPE_NAME (gnu_array_type) = create_concat_name (gnat_desig_type, "XUA");
406 TYPE_DUMMY_P (gnu_array_type) = 1;
407 gnu_ptr_array = build_pointer_type (gnu_array_type);
409 gnu_fat_type = make_node (RECORD_TYPE);
410 /* Build a stub DECL to trigger the special processing for fat pointer types
412 TYPE_NAME (gnu_fat_type)
413 = create_type_stub_decl (create_concat_name (gnat_desig_type, "XUP"),
415 fields = create_field_decl (get_identifier ("P_ARRAY"), gnu_ptr_array,
416 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
418 = create_field_decl (get_identifier ("P_BOUNDS"), gnu_ptr_template,
419 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
420 finish_fat_pointer_type (gnu_fat_type, fields);
421 SET_TYPE_UNCONSTRAINED_ARRAY (gnu_fat_type, gnu_desig_type);
422 /* Suppress debug info until after the type is completed. */
423 TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (gnu_fat_type)) = 1;
425 gnu_object_type = make_node (RECORD_TYPE);
426 TYPE_NAME (gnu_object_type) = create_concat_name (gnat_desig_type, "XUT");
427 TYPE_DUMMY_P (gnu_object_type) = 1;
429 TYPE_POINTER_TO (gnu_desig_type) = gnu_fat_type;
430 TYPE_OBJECT_RECORD_TYPE (gnu_desig_type) = gnu_object_type;
433 /* Return true if we are in the global binding level. */
436 global_bindings_p (void)
438 return force_global || !current_function_decl;
441 /* Enter a new binding level. */
444 gnat_pushlevel (void)
446 struct gnat_binding_level *newlevel = NULL;
448 /* Reuse a struct for this binding level, if there is one. */
449 if (free_binding_level)
451 newlevel = free_binding_level;
452 free_binding_level = free_binding_level->chain;
455 newlevel = ggc_alloc<gnat_binding_level> ();
457 /* Use a free BLOCK, if any; otherwise, allocate one. */
458 if (free_block_chain)
460 newlevel->block = free_block_chain;
461 free_block_chain = BLOCK_CHAIN (free_block_chain);
462 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
465 newlevel->block = make_node (BLOCK);
467 /* Point the BLOCK we just made to its parent. */
468 if (current_binding_level)
469 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
471 BLOCK_VARS (newlevel->block) = NULL_TREE;
472 BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
473 TREE_USED (newlevel->block) = 1;
475 /* Add this level to the front of the chain (stack) of active levels. */
476 newlevel->chain = current_binding_level;
477 newlevel->jmpbuf_decl = NULL_TREE;
478 current_binding_level = newlevel;
481 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
482 and point FNDECL to this BLOCK. */
485 set_current_block_context (tree fndecl)
487 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
488 DECL_INITIAL (fndecl) = current_binding_level->block;
489 set_block_for_group (current_binding_level->block);
492 /* Set the jmpbuf_decl for the current binding level to DECL. */
495 set_block_jmpbuf_decl (tree decl)
497 current_binding_level->jmpbuf_decl = decl;
500 /* Get the jmpbuf_decl, if any, for the current binding level. */
503 get_block_jmpbuf_decl (void)
505 return current_binding_level->jmpbuf_decl;
508 /* Exit a binding level. Set any BLOCK into the current code group. */
513 struct gnat_binding_level *level = current_binding_level;
514 tree block = level->block;
516 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
517 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
519 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
520 are no variables free the block and merge its subblocks into those of its
521 parent block. Otherwise, add it to the list of its parent. */
522 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
524 else if (!BLOCK_VARS (block))
526 BLOCK_SUBBLOCKS (level->chain->block)
527 = block_chainon (BLOCK_SUBBLOCKS (block),
528 BLOCK_SUBBLOCKS (level->chain->block));
529 BLOCK_CHAIN (block) = free_block_chain;
530 free_block_chain = block;
534 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
535 BLOCK_SUBBLOCKS (level->chain->block) = block;
536 TREE_USED (block) = 1;
537 set_block_for_group (block);
540 /* Free this binding structure. */
541 current_binding_level = level->chain;
542 level->chain = free_binding_level;
543 free_binding_level = level;
546 /* Exit a binding level and discard the associated BLOCK. */
551 struct gnat_binding_level *level = current_binding_level;
552 tree block = level->block;
554 BLOCK_CHAIN (block) = free_block_chain;
555 free_block_chain = block;
557 /* Free this binding structure. */
558 current_binding_level = level->chain;
559 level->chain = free_binding_level;
560 free_binding_level = level;
563 /* Set the context of TYPE and its parallel types (if any) to CONTEXT. */
566 gnat_set_type_context (tree type, tree context)
568 tree decl = TYPE_STUB_DECL (type);
570 TYPE_CONTEXT (type) = context;
572 while (decl && DECL_PARALLEL_TYPE (decl))
574 tree parallel_type = DECL_PARALLEL_TYPE (decl);
576 /* Give a context to the parallel types and their stub decl, if any.
577 Some parallel types seems to be present in multiple parallel type
578 chains, so don't mess with their context if they already have one. */
579 if (!TYPE_CONTEXT (parallel_type))
581 if (TYPE_STUB_DECL (parallel_type))
582 DECL_CONTEXT (TYPE_STUB_DECL (parallel_type)) = context;
583 TYPE_CONTEXT (parallel_type) = context;
586 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
590 /* Return the innermost scope, starting at GNAT_NODE, we are be interested in
591 the debug info, or Empty if there is no such scope. If not NULL, set
592 IS_SUBPROGRAM to whether the returned entity is a subprogram. */
595 get_debug_scope (Node_Id gnat_node, bool *is_subprogram)
597 Entity_Id gnat_entity;
600 *is_subprogram = false;
602 if (Nkind (gnat_node) == N_Defining_Identifier
603 || Nkind (gnat_node) == N_Defining_Operator_Symbol)
604 gnat_entity = Scope (gnat_node);
608 while (Present (gnat_entity))
610 switch (Ekind (gnat_entity))
614 if (Present (Protected_Body_Subprogram (gnat_entity)))
615 gnat_entity = Protected_Body_Subprogram (gnat_entity);
617 /* If the scope is a subprogram, then just rely on
618 current_function_decl, so that we don't have to defer
619 anything. This is needed because other places rely on the
620 validity of the DECL_CONTEXT attribute of FUNCTION_DECL nodes. */
622 *is_subprogram = true;
626 case E_Record_Subtype:
630 /* By default, we are not interested in this particular scope: go to
635 gnat_entity = Scope (gnat_entity);
641 /* If N is NULL, set TYPE's context to CONTEXT. Defer this to the processing
645 defer_or_set_type_context (tree type, tree context,
646 struct deferred_decl_context_node *n)
649 add_deferred_type_context (n, type);
651 gnat_set_type_context (type, context);
654 /* Return global_context, but create it first if need be. */
657 get_global_context (void)
661 global_context = build_translation_unit_decl (NULL_TREE);
662 debug_hooks->register_main_translation_unit (global_context);
665 return global_context;
668 /* Record DECL as belonging to the current lexical scope and use GNAT_NODE
669 for location information and flag propagation. */
672 gnat_pushdecl (tree decl, Node_Id gnat_node)
674 tree context = NULL_TREE;
675 struct deferred_decl_context_node *deferred_decl_context = NULL;
677 /* If explicitely asked to make DECL global or if it's an imported nested
678 object, short-circuit the regular Scope-based context computation. */
679 if (!((TREE_PUBLIC (decl) && DECL_EXTERNAL (decl)) || force_global == 1))
681 /* Rely on the GNAT scope, or fallback to the current_function_decl if
682 the GNAT scope reached the global scope, if it reached a subprogram
683 or the declaration is a subprogram or a variable (for them we skip
684 intermediate context types because the subprogram body elaboration
685 machinery and the inliner both expect a subprogram context).
687 Falling back to current_function_decl is necessary for implicit
688 subprograms created by gigi, such as the elaboration subprograms. */
689 bool context_is_subprogram = false;
690 const Entity_Id gnat_scope
691 = get_debug_scope (gnat_node, &context_is_subprogram);
693 if (Present (gnat_scope)
694 && !context_is_subprogram
695 && TREE_CODE (decl) != FUNCTION_DECL
696 && TREE_CODE (decl) != VAR_DECL)
697 /* Always assume the scope has not been elaborated, thus defer the
698 context propagation to the time its elaboration will be
700 deferred_decl_context
701 = add_deferred_decl_context (decl, gnat_scope, force_global);
703 /* External declarations (when force_global > 0) may not be in a
705 else if (current_function_decl && force_global == 0)
706 context = current_function_decl;
709 /* If either we are forced to be in global mode or if both the GNAT scope and
710 the current_function_decl did not help in determining the context, use the
712 if (!deferred_decl_context && !context)
713 context = get_global_context ();
715 /* Functions imported in another function are not really nested.
716 For really nested functions mark them initially as needing
717 a static chain for uses of that flag before unnesting;
718 lower_nested_functions will then recompute it. */
719 if (TREE_CODE (decl) == FUNCTION_DECL
720 && !TREE_PUBLIC (decl)
722 && (TREE_CODE (context) == FUNCTION_DECL
723 || decl_function_context (context)))
724 DECL_STATIC_CHAIN (decl) = 1;
726 if (!deferred_decl_context)
727 DECL_CONTEXT (decl) = context;
729 TREE_NO_WARNING (decl) = (No (gnat_node) || Warnings_Off (gnat_node));
731 /* Set the location of DECL and emit a declaration for it. */
732 if (Present (gnat_node) && !renaming_from_generic_instantiation_p (gnat_node))
733 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
735 add_decl_expr (decl, gnat_node);
737 /* Put the declaration on the list. The list of declarations is in reverse
738 order. The list will be reversed later. Put global declarations in the
739 globals list and local ones in the current block. But skip TYPE_DECLs
740 for UNCONSTRAINED_ARRAY_TYPE in both cases, as they will cause trouble
741 with the debugger and aren't needed anyway. */
742 if (!(TREE_CODE (decl) == TYPE_DECL
743 && TREE_CODE (TREE_TYPE (decl)) == UNCONSTRAINED_ARRAY_TYPE))
745 if (DECL_EXTERNAL (decl))
747 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
748 vec_safe_push (builtin_decls, decl);
750 else if (global_bindings_p ())
751 vec_safe_push (global_decls, decl);
754 DECL_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
755 BLOCK_VARS (current_binding_level->block) = decl;
759 /* For the declaration of a type, set its name either if it isn't already
760 set or if the previous type name was not derived from a source name.
761 We'd rather have the type named with a real name and all the pointer
762 types to the same object have the same node, except when the names are
763 both derived from source names. */
764 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
766 tree t = TREE_TYPE (decl);
768 /* Array and pointer types aren't tagged types in the C sense so we need
769 to generate a typedef in DWARF for them and make sure it is preserved,
770 unless the type is artificial. */
771 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL)
772 && ((TREE_CODE (t) != ARRAY_TYPE && TREE_CODE (t) != POINTER_TYPE)
773 || DECL_ARTIFICIAL (decl)))
775 /* For array and pointer types, create the DECL_ORIGINAL_TYPE that will
776 generate the typedef in DWARF. Also do that for fat pointer types
777 because, even though they are tagged types in the C sense, they are
778 still XUP types attached to the base array type at this point. */
779 else if (!DECL_ARTIFICIAL (decl)
780 && (TREE_CODE (t) == ARRAY_TYPE
781 || TREE_CODE (t) == POINTER_TYPE
782 || TYPE_IS_FAT_POINTER_P (t)))
785 /* ??? Copy and original type are not supposed to be variant but we
786 really need a variant for the placeholder machinery to work. */
787 if (TYPE_IS_FAT_POINTER_P (t))
788 tt = build_variant_type_copy (t);
791 /* TYPE_NEXT_PTR_TO is a chain of main variants. */
792 tt = build_distinct_type_copy (TYPE_MAIN_VARIANT (t));
793 if (TREE_CODE (t) == POINTER_TYPE)
794 TYPE_NEXT_PTR_TO (TYPE_MAIN_VARIANT (t)) = tt;
795 tt = build_qualified_type (tt, TYPE_QUALS (t));
797 TYPE_NAME (tt) = decl;
798 defer_or_set_type_context (tt,
800 deferred_decl_context);
801 TREE_USED (tt) = TREE_USED (t);
802 TREE_TYPE (decl) = tt;
804 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL
805 && DECL_ORIGINAL_TYPE (TYPE_NAME (t)))
806 DECL_ORIGINAL_TYPE (decl) = DECL_ORIGINAL_TYPE (TYPE_NAME (t));
808 DECL_ORIGINAL_TYPE (decl) = t;
809 /* Array types need to have a name so that they can be related to
810 their GNAT encodings. */
811 if (TREE_CODE (t) == ARRAY_TYPE && !TYPE_NAME (t))
812 TYPE_NAME (t) = DECL_NAME (decl);
815 else if (TYPE_NAME (t)
816 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL
817 && DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
822 /* Propagate the name to all the variants, this is needed for the type
823 qualifiers machinery to work properly (see check_qualified_type).
824 Also propagate the context to them. Note that it will be propagated
825 to all parallel types too thanks to gnat_set_type_context. */
827 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
828 /* ??? Because of the previous kludge, we can have variants of fat
829 pointer types with different names. */
830 if (!(TYPE_IS_FAT_POINTER_P (t)
832 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
834 TYPE_NAME (t) = decl;
835 defer_or_set_type_context (t,
837 deferred_decl_context);
842 /* Create a record type that contains a SIZE bytes long field of TYPE with a
843 starting bit position so that it is aligned to ALIGN bits, and leaving at
844 least ROOM bytes free before the field. BASE_ALIGN is the alignment the
845 record is guaranteed to get. GNAT_NODE is used for the position of the
846 associated TYPE_DECL. */
849 make_aligning_type (tree type, unsigned int align, tree size,
850 unsigned int base_align, int room, Node_Id gnat_node)
852 /* We will be crafting a record type with one field at a position set to be
853 the next multiple of ALIGN past record'address + room bytes. We use a
854 record placeholder to express record'address. */
855 tree record_type = make_node (RECORD_TYPE);
856 tree record = build0 (PLACEHOLDER_EXPR, record_type);
859 = convert (sizetype, build_unary_op (ADDR_EXPR, NULL_TREE, record));
861 /* The diagram below summarizes the shape of what we manipulate:
863 <--------- pos ---------->
864 { +------------+-------------+-----------------+
865 record =>{ |############| ... | field (type) |
866 { +------------+-------------+-----------------+
867 |<-- room -->|<- voffset ->|<---- size ----->|
870 record_addr vblock_addr
872 Every length is in sizetype bytes there, except "pos" which has to be
873 set as a bit position in the GCC tree for the record. */
874 tree room_st = size_int (room);
875 tree vblock_addr_st = size_binop (PLUS_EXPR, record_addr_st, room_st);
876 tree voffset_st, pos, field;
878 tree name = TYPE_IDENTIFIER (type);
880 name = concat_name (name, "ALIGN");
881 TYPE_NAME (record_type) = name;
883 /* Compute VOFFSET and then POS. The next byte position multiple of some
884 alignment after some address is obtained by "and"ing the alignment minus
885 1 with the two's complement of the address. */
886 voffset_st = size_binop (BIT_AND_EXPR,
887 fold_build1 (NEGATE_EXPR, sizetype, vblock_addr_st),
888 size_int ((align / BITS_PER_UNIT) - 1));
890 /* POS = (ROOM + VOFFSET) * BIT_PER_UNIT, in bitsizetype. */
891 pos = size_binop (MULT_EXPR,
892 convert (bitsizetype,
893 size_binop (PLUS_EXPR, room_st, voffset_st)),
896 /* Craft the GCC record representation. We exceptionally do everything
897 manually here because 1) our generic circuitry is not quite ready to
898 handle the complex position/size expressions we are setting up, 2) we
899 have a strong simplifying factor at hand: we know the maximum possible
900 value of voffset, and 3) we have to set/reset at least the sizes in
901 accordance with this maximum value anyway, as we need them to convey
902 what should be "alloc"ated for this type.
904 Use -1 as the 'addressable' indication for the field to prevent the
905 creation of a bitfield. We don't need one, it would have damaging
906 consequences on the alignment computation, and create_field_decl would
907 make one without this special argument, for instance because of the
908 complex position expression. */
909 field = create_field_decl (get_identifier ("F"), type, record_type, size,
911 TYPE_FIELDS (record_type) = field;
913 SET_TYPE_ALIGN (record_type, base_align);
914 TYPE_USER_ALIGN (record_type) = 1;
916 TYPE_SIZE (record_type)
917 = size_binop (PLUS_EXPR,
918 size_binop (MULT_EXPR, convert (bitsizetype, size),
920 bitsize_int (align + room * BITS_PER_UNIT));
921 TYPE_SIZE_UNIT (record_type)
922 = size_binop (PLUS_EXPR, size,
923 size_int (room + align / BITS_PER_UNIT));
925 SET_TYPE_MODE (record_type, BLKmode);
926 relate_alias_sets (record_type, type, ALIAS_SET_COPY);
928 /* Declare it now since it will never be declared otherwise. This is
929 necessary to ensure that its subtrees are properly marked. */
930 create_type_decl (name, record_type, true, false, gnat_node);
935 /* TYPE is a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE that is being used
936 as the field type of a packed record if IN_RECORD is true, or as the
937 component type of a packed array if IN_RECORD is false. See if we can
938 rewrite it either as a type that has a non-BLKmode, which we can pack
939 tighter in the packed record case, or as a smaller type. If so, return
940 the new type. If not, return the original type. */
943 make_packable_type (tree type, bool in_record)
945 unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE (type));
946 unsigned HOST_WIDE_INT new_size;
947 tree new_type, old_field, field_list = NULL_TREE;
950 /* No point in doing anything if the size is zero. */
954 new_type = make_node (TREE_CODE (type));
956 /* Copy the name and flags from the old type to that of the new.
957 Note that we rely on the pointer equality created here for
958 TYPE_NAME to look through conversions in various places. */
959 TYPE_NAME (new_type) = TYPE_NAME (type);
960 TYPE_JUSTIFIED_MODULAR_P (new_type) = TYPE_JUSTIFIED_MODULAR_P (type);
961 TYPE_CONTAINS_TEMPLATE_P (new_type) = TYPE_CONTAINS_TEMPLATE_P (type);
962 TYPE_REVERSE_STORAGE_ORDER (new_type) = TYPE_REVERSE_STORAGE_ORDER (type);
963 if (TREE_CODE (type) == RECORD_TYPE)
964 TYPE_PADDING_P (new_type) = TYPE_PADDING_P (type);
966 /* If we are in a record and have a small size, set the alignment to
967 try for an integral mode. Otherwise set it to try for a smaller
968 type with BLKmode. */
969 if (in_record && size <= MAX_FIXED_MODE_SIZE)
971 align = ceil_pow2 (size);
972 SET_TYPE_ALIGN (new_type, align);
973 new_size = (size + align - 1) & -align;
977 unsigned HOST_WIDE_INT align;
979 /* Do not try to shrink the size if the RM size is not constant. */
980 if (TYPE_CONTAINS_TEMPLATE_P (type)
981 || !tree_fits_uhwi_p (TYPE_ADA_SIZE (type)))
984 /* Round the RM size up to a unit boundary to get the minimal size
985 for a BLKmode record. Give up if it's already the size. */
986 new_size = tree_to_uhwi (TYPE_ADA_SIZE (type));
987 new_size = (new_size + BITS_PER_UNIT - 1) & -BITS_PER_UNIT;
988 if (new_size == size)
991 align = new_size & -new_size;
992 SET_TYPE_ALIGN (new_type, MIN (TYPE_ALIGN (type), align));
995 TYPE_USER_ALIGN (new_type) = 1;
997 /* Now copy the fields, keeping the position and size as we don't want
998 to change the layout by propagating the packedness downwards. */
999 for (old_field = TYPE_FIELDS (type); old_field;
1000 old_field = DECL_CHAIN (old_field))
1002 tree new_field_type = TREE_TYPE (old_field);
1003 tree new_field, new_size;
1005 if (RECORD_OR_UNION_TYPE_P (new_field_type)
1006 && !TYPE_FAT_POINTER_P (new_field_type)
1007 && tree_fits_uhwi_p (TYPE_SIZE (new_field_type)))
1008 new_field_type = make_packable_type (new_field_type, true);
1010 /* However, for the last field in a not already packed record type
1011 that is of an aggregate type, we need to use the RM size in the
1012 packable version of the record type, see finish_record_type. */
1013 if (!DECL_CHAIN (old_field)
1014 && !TYPE_PACKED (type)
1015 && RECORD_OR_UNION_TYPE_P (new_field_type)
1016 && !TYPE_FAT_POINTER_P (new_field_type)
1017 && !TYPE_CONTAINS_TEMPLATE_P (new_field_type)
1018 && TYPE_ADA_SIZE (new_field_type))
1019 new_size = TYPE_ADA_SIZE (new_field_type);
1021 new_size = DECL_SIZE (old_field);
1024 = create_field_decl (DECL_NAME (old_field), new_field_type, new_type,
1025 new_size, bit_position (old_field),
1027 !DECL_NONADDRESSABLE_P (old_field));
1029 DECL_INTERNAL_P (new_field) = DECL_INTERNAL_P (old_field);
1030 SET_DECL_ORIGINAL_FIELD_TO_FIELD (new_field, old_field);
1031 if (TREE_CODE (new_type) == QUAL_UNION_TYPE)
1032 DECL_QUALIFIER (new_field) = DECL_QUALIFIER (old_field);
1034 DECL_CHAIN (new_field) = field_list;
1035 field_list = new_field;
1038 finish_record_type (new_type, nreverse (field_list), 2, false);
1039 relate_alias_sets (new_type, type, ALIAS_SET_COPY);
1040 if (TYPE_STUB_DECL (type))
1041 SET_DECL_PARALLEL_TYPE (TYPE_STUB_DECL (new_type),
1042 DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type)));
1044 /* If this is a padding record, we never want to make the size smaller
1045 than what was specified. For QUAL_UNION_TYPE, also copy the size. */
1046 if (TYPE_IS_PADDING_P (type) || TREE_CODE (type) == QUAL_UNION_TYPE)
1048 TYPE_SIZE (new_type) = TYPE_SIZE (type);
1049 TYPE_SIZE_UNIT (new_type) = TYPE_SIZE_UNIT (type);
1054 TYPE_SIZE (new_type) = bitsize_int (new_size);
1055 TYPE_SIZE_UNIT (new_type)
1056 = size_int ((new_size + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
1059 if (!TYPE_CONTAINS_TEMPLATE_P (type))
1060 SET_TYPE_ADA_SIZE (new_type, TYPE_ADA_SIZE (type));
1062 compute_record_mode (new_type);
1064 /* Try harder to get a packable type if necessary, for example
1065 in case the record itself contains a BLKmode field. */
1066 if (in_record && TYPE_MODE (new_type) == BLKmode)
1067 SET_TYPE_MODE (new_type,
1068 mode_for_size_tree (TYPE_SIZE (new_type), MODE_INT, 1));
1070 /* If neither the mode nor the size has shrunk, return the old type. */
1071 if (TYPE_MODE (new_type) == BLKmode && new_size >= size)
1077 /* Given a type TYPE, return a new type whose size is appropriate for SIZE.
1078 If TYPE is the best type, return it. Otherwise, make a new type. We
1079 only support new integral and pointer types. FOR_BIASED is true if
1080 we are making a biased type. */
1083 make_type_from_size (tree type, tree size_tree, bool for_biased)
1085 unsigned HOST_WIDE_INT size;
1089 /* If size indicates an error, just return TYPE to avoid propagating
1090 the error. Likewise if it's too large to represent. */
1091 if (!size_tree || !tree_fits_uhwi_p (size_tree))
1094 size = tree_to_uhwi (size_tree);
1096 switch (TREE_CODE (type))
1101 biased_p = (TREE_CODE (type) == INTEGER_TYPE
1102 && TYPE_BIASED_REPRESENTATION_P (type));
1104 /* Integer types with precision 0 are forbidden. */
1108 /* Only do something if the type isn't a packed array type and doesn't
1109 already have the proper size and the size isn't too large. */
1110 if (TYPE_IS_PACKED_ARRAY_TYPE_P (type)
1111 || (TYPE_PRECISION (type) == size && biased_p == for_biased)
1112 || size > LONG_LONG_TYPE_SIZE)
1115 biased_p |= for_biased;
1116 if (TYPE_UNSIGNED (type) || biased_p)
1117 new_type = make_unsigned_type (size);
1119 new_type = make_signed_type (size);
1120 TREE_TYPE (new_type) = TREE_TYPE (type) ? TREE_TYPE (type) : type;
1121 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_MIN_VALUE (type));
1122 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_MAX_VALUE (type));
1123 /* Copy the name to show that it's essentially the same type and
1124 not a subrange type. */
1125 TYPE_NAME (new_type) = TYPE_NAME (type);
1126 TYPE_BIASED_REPRESENTATION_P (new_type) = biased_p;
1127 SET_TYPE_RM_SIZE (new_type, bitsize_int (size));
1131 /* Do something if this is a fat pointer, in which case we
1132 may need to return the thin pointer. */
1133 if (TYPE_FAT_POINTER_P (type) && size < POINTER_SIZE * 2)
1135 machine_mode p_mode = mode_for_size (size, MODE_INT, 0);
1136 if (!targetm.valid_pointer_mode (p_mode))
1139 build_pointer_type_for_mode
1140 (TYPE_OBJECT_RECORD_TYPE (TYPE_UNCONSTRAINED_ARRAY (type)),
1146 /* Only do something if this is a thin pointer, in which case we
1147 may need to return the fat pointer. */
1148 if (TYPE_IS_THIN_POINTER_P (type) && size >= POINTER_SIZE * 2)
1150 build_pointer_type (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)));
1160 /* See if the data pointed to by the hash table slot is marked. */
1163 pad_type_hasher::keep_cache_entry (pad_type_hash *&t)
1165 return ggc_marked_p (t->type);
1168 /* Return true iff the padded types are equivalent. */
1171 pad_type_hasher::equal (pad_type_hash *t1, pad_type_hash *t2)
1175 if (t1->hash != t2->hash)
1181 /* We consider that the padded types are equivalent if they pad the same type
1182 and have the same size, alignment, RM size and storage order. Taking the
1183 mode into account is redundant since it is determined by the others. */
1185 TREE_TYPE (TYPE_FIELDS (type1)) == TREE_TYPE (TYPE_FIELDS (type2))
1186 && TYPE_SIZE (type1) == TYPE_SIZE (type2)
1187 && TYPE_ALIGN (type1) == TYPE_ALIGN (type2)
1188 && TYPE_ADA_SIZE (type1) == TYPE_ADA_SIZE (type2)
1189 && TYPE_REVERSE_STORAGE_ORDER (type1) == TYPE_REVERSE_STORAGE_ORDER (type2);
1192 /* Look up the padded TYPE in the hash table and return its canonical version
1193 if it exists; otherwise, insert it into the hash table. */
1196 lookup_and_insert_pad_type (tree type)
1199 struct pad_type_hash in, *h;
1202 = iterative_hash_object (TYPE_HASH (TREE_TYPE (TYPE_FIELDS (type))), 0);
1203 hashcode = iterative_hash_expr (TYPE_SIZE (type), hashcode);
1204 hashcode = iterative_hash_hashval_t (TYPE_ALIGN (type), hashcode);
1205 hashcode = iterative_hash_expr (TYPE_ADA_SIZE (type), hashcode);
1209 h = pad_type_hash_table->find_with_hash (&in, hashcode);
1213 h = ggc_alloc<pad_type_hash> ();
1216 *pad_type_hash_table->find_slot_with_hash (h, hashcode, INSERT) = h;
1220 /* Ensure that TYPE has SIZE and ALIGN. Make and return a new padded type
1221 if needed. We have already verified that SIZE and ALIGN are large enough.
1222 GNAT_ENTITY is used to name the resulting record and to issue a warning.
1223 IS_COMPONENT_TYPE is true if this is being done for the component type of
1224 an array. IS_USER_TYPE is true if the original type needs to be completed.
1225 DEFINITION is true if this type is being defined. SET_RM_SIZE is true if
1226 the RM size of the resulting type is to be set to SIZE too. */
1229 maybe_pad_type (tree type, tree size, unsigned int align,
1230 Entity_Id gnat_entity, bool is_component_type,
1231 bool is_user_type, bool definition, bool set_rm_size)
1233 tree orig_size = TYPE_SIZE (type);
1234 unsigned int orig_align = TYPE_ALIGN (type);
1237 /* If TYPE is a padded type, see if it agrees with any size and alignment
1238 we were given. If so, return the original type. Otherwise, strip
1239 off the padding, since we will either be returning the inner type
1240 or repadding it. If no size or alignment is specified, use that of
1241 the original padded type. */
1242 if (TYPE_IS_PADDING_P (type))
1245 || operand_equal_p (round_up (size, orig_align), orig_size, 0))
1246 && (align == 0 || align == orig_align))
1254 type = TREE_TYPE (TYPE_FIELDS (type));
1255 orig_size = TYPE_SIZE (type);
1256 orig_align = TYPE_ALIGN (type);
1259 /* If the size is either not being changed or is being made smaller (which
1260 is not done here and is only valid for bitfields anyway), show the size
1261 isn't changing. Likewise, clear the alignment if it isn't being
1262 changed. Then return if we aren't doing anything. */
1264 && (operand_equal_p (size, orig_size, 0)
1265 || (TREE_CODE (orig_size) == INTEGER_CST
1266 && tree_int_cst_lt (size, orig_size))))
1269 if (align == orig_align)
1272 if (align == 0 && !size)
1275 /* If requested, complete the original type and give it a name. */
1277 create_type_decl (get_entity_name (gnat_entity), type,
1278 !Comes_From_Source (gnat_entity),
1280 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1281 && DECL_IGNORED_P (TYPE_NAME (type))),
1284 /* We used to modify the record in place in some cases, but that could
1285 generate incorrect debugging information. So make a new record
1287 record = make_node (RECORD_TYPE);
1288 TYPE_PADDING_P (record) = 1;
1289 if (gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
1290 SET_TYPE_DEBUG_TYPE (record, type);
1292 /* ??? Padding types around packed array implementation types will be
1293 considered as root types in the array descriptor language hook (see
1294 gnat_get_array_descr_info). Give them the original packed array type
1295 name so that the one coming from sources appears in the debugging
1297 if (TYPE_IMPL_PACKED_ARRAY_P (type)
1298 && TYPE_ORIGINAL_PACKED_ARRAY (type)
1299 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
1300 TYPE_NAME (record) = TYPE_NAME (TYPE_ORIGINAL_PACKED_ARRAY (type));
1301 else if (Present (gnat_entity))
1302 TYPE_NAME (record) = create_concat_name (gnat_entity, "PAD");
1304 SET_TYPE_ALIGN (record, align ? align : orig_align);
1305 TYPE_SIZE (record) = size ? size : orig_size;
1306 TYPE_SIZE_UNIT (record)
1307 = convert (sizetype,
1308 size_binop (CEIL_DIV_EXPR, TYPE_SIZE (record),
1309 bitsize_unit_node));
1311 /* If we are changing the alignment and the input type is a record with
1312 BLKmode and a small constant size, try to make a form that has an
1313 integral mode. This might allow the padding record to also have an
1314 integral mode, which will be much more efficient. There is no point
1315 in doing so if a size is specified unless it is also a small constant
1316 size and it is incorrect to do so if we cannot guarantee that the mode
1317 will be naturally aligned since the field must always be addressable.
1319 ??? This might not always be a win when done for a stand-alone object:
1320 since the nominal and the effective type of the object will now have
1321 different modes, a VIEW_CONVERT_EXPR will be required for converting
1322 between them and it might be hard to overcome afterwards, including
1323 at the RTL level when the stand-alone object is accessed as a whole. */
1325 && RECORD_OR_UNION_TYPE_P (type)
1326 && TYPE_MODE (type) == BLKmode
1327 && !TYPE_BY_REFERENCE_P (type)
1328 && TREE_CODE (orig_size) == INTEGER_CST
1329 && !TREE_OVERFLOW (orig_size)
1330 && compare_tree_int (orig_size, MAX_FIXED_MODE_SIZE) <= 0
1332 || (TREE_CODE (size) == INTEGER_CST
1333 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) <= 0)))
1335 tree packable_type = make_packable_type (type, true);
1336 if (TYPE_MODE (packable_type) != BLKmode
1337 && align >= TYPE_ALIGN (packable_type))
1338 type = packable_type;
1341 /* Now create the field with the original size. */
1342 field = create_field_decl (get_identifier ("F"), type, record, orig_size,
1343 bitsize_zero_node, 0, 1);
1344 DECL_INTERNAL_P (field) = 1;
1346 /* Do not emit debug info until after the auxiliary record is built. */
1347 finish_record_type (record, field, 1, false);
1349 /* Set the RM size if requested. */
1352 tree canonical_pad_type;
1354 SET_TYPE_ADA_SIZE (record, size ? size : orig_size);
1356 /* If the padded type is complete and has constant size, we canonicalize
1357 it by means of the hash table. This is consistent with the language
1358 semantics and ensures that gigi and the middle-end have a common view
1359 of these padded types. */
1360 if (TREE_CONSTANT (TYPE_SIZE (record))
1361 && (canonical_pad_type = lookup_and_insert_pad_type (record)))
1363 record = canonical_pad_type;
1368 /* Unless debugging information isn't being written for the input type,
1369 write a record that shows what we are a subtype of and also make a
1370 variable that indicates our size, if still variable. */
1371 if (TREE_CODE (orig_size) != INTEGER_CST
1372 && TYPE_NAME (record)
1374 && !(TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1375 && DECL_IGNORED_P (TYPE_NAME (type))))
1377 tree name = TYPE_IDENTIFIER (record);
1378 tree size_unit = TYPE_SIZE_UNIT (record);
1380 /* A variable that holds the size is required even with no encoding since
1381 it will be referenced by debugging information attributes. At global
1382 level, we need a single variable across all translation units. */
1384 && TREE_CODE (size) != INTEGER_CST
1385 && (definition || global_bindings_p ()))
1387 /* Whether or not gnat_entity comes from source, this XVZ variable is
1388 is a compilation artifact. */
1390 = create_var_decl (concat_name (name, "XVZ"), NULL_TREE, sizetype,
1391 size_unit, true, global_bindings_p (),
1392 !definition && global_bindings_p (), false,
1393 false, true, true, NULL, gnat_entity);
1394 TYPE_SIZE_UNIT (record) = size_unit;
1397 /* There is no need to show what we are a subtype of when outputting as
1398 few encodings as possible: regular debugging infomation makes this
1400 if (gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
1402 tree marker = make_node (RECORD_TYPE);
1403 tree orig_name = TYPE_IDENTIFIER (type);
1405 TYPE_NAME (marker) = concat_name (name, "XVS");
1406 finish_record_type (marker,
1407 create_field_decl (orig_name,
1408 build_reference_type (type),
1409 marker, NULL_TREE, NULL_TREE,
1412 TYPE_SIZE_UNIT (marker) = size_unit;
1414 add_parallel_type (record, marker);
1418 rest_of_record_type_compilation (record);
1421 /* If a simple size was explicitly given, maybe issue a warning. */
1423 || TREE_CODE (size) == COND_EXPR
1424 || TREE_CODE (size) == MAX_EXPR
1425 || No (gnat_entity))
1428 /* But don't do it if we are just annotating types and the type is tagged or
1429 concurrent, since these types aren't fully laid out in this mode. */
1430 if (type_annotate_only)
1434 ? Component_Type (gnat_entity) : Etype (gnat_entity);
1436 if (Is_Tagged_Type (gnat_type) || Is_Concurrent_Type (gnat_type))
1440 /* Take the original size as the maximum size of the input if there was an
1441 unconstrained record involved and round it up to the specified alignment,
1442 if one was specified, but only for aggregate types. */
1443 if (CONTAINS_PLACEHOLDER_P (orig_size))
1444 orig_size = max_size (orig_size, true);
1446 if (align && AGGREGATE_TYPE_P (type))
1447 orig_size = round_up (orig_size, align);
1449 if (!operand_equal_p (size, orig_size, 0)
1450 && !(TREE_CODE (size) == INTEGER_CST
1451 && TREE_CODE (orig_size) == INTEGER_CST
1452 && (TREE_OVERFLOW (size)
1453 || TREE_OVERFLOW (orig_size)
1454 || tree_int_cst_lt (size, orig_size))))
1456 Node_Id gnat_error_node = Empty;
1458 /* For a packed array, post the message on the original array type. */
1459 if (Is_Packed_Array_Impl_Type (gnat_entity))
1460 gnat_entity = Original_Array_Type (gnat_entity);
1462 if ((Ekind (gnat_entity) == E_Component
1463 || Ekind (gnat_entity) == E_Discriminant)
1464 && Present (Component_Clause (gnat_entity)))
1465 gnat_error_node = Last_Bit (Component_Clause (gnat_entity));
1466 else if (Present (Size_Clause (gnat_entity)))
1467 gnat_error_node = Expression (Size_Clause (gnat_entity));
1469 /* Generate message only for entities that come from source, since
1470 if we have an entity created by expansion, the message will be
1471 generated for some other corresponding source entity. */
1472 if (Comes_From_Source (gnat_entity))
1474 if (Present (gnat_error_node))
1475 post_error_ne_tree ("{^ }bits of & unused?",
1476 gnat_error_node, gnat_entity,
1477 size_diffop (size, orig_size));
1478 else if (is_component_type)
1479 post_error_ne_tree ("component of& padded{ by ^ bits}?",
1480 gnat_entity, gnat_entity,
1481 size_diffop (size, orig_size));
1488 /* Return a copy of the padded TYPE but with reverse storage order. */
1491 set_reverse_storage_order_on_pad_type (tree type)
1493 tree field, canonical_pad_type;
1497 /* If the inner type is not scalar then the function does nothing. */
1498 tree inner_type = TREE_TYPE (TYPE_FIELDS (type));
1499 gcc_assert (!AGGREGATE_TYPE_P (inner_type)
1500 && !VECTOR_TYPE_P (inner_type));
1503 /* This is required for the canonicalization. */
1504 gcc_assert (TREE_CONSTANT (TYPE_SIZE (type)));
1506 field = copy_node (TYPE_FIELDS (type));
1507 type = copy_type (type);
1508 DECL_CONTEXT (field) = type;
1509 TYPE_FIELDS (type) = field;
1510 TYPE_REVERSE_STORAGE_ORDER (type) = 1;
1511 canonical_pad_type = lookup_and_insert_pad_type (type);
1512 return canonical_pad_type ? canonical_pad_type : type;
1515 /* Relate the alias sets of GNU_NEW_TYPE and GNU_OLD_TYPE according to OP.
1516 If this is a multi-dimensional array type, do this recursively.
1519 - ALIAS_SET_COPY: the new set is made a copy of the old one.
1520 - ALIAS_SET_SUPERSET: the new set is made a superset of the old one.
1521 - ALIAS_SET_SUBSET: the new set is made a subset of the old one. */
1524 relate_alias_sets (tree gnu_new_type, tree gnu_old_type, enum alias_set_op op)
1526 /* Remove any padding from GNU_OLD_TYPE. It doesn't matter in the case
1527 of a one-dimensional array, since the padding has the same alias set
1528 as the field type, but if it's a multi-dimensional array, we need to
1529 see the inner types. */
1530 while (TREE_CODE (gnu_old_type) == RECORD_TYPE
1531 && (TYPE_JUSTIFIED_MODULAR_P (gnu_old_type)
1532 || TYPE_PADDING_P (gnu_old_type)))
1533 gnu_old_type = TREE_TYPE (TYPE_FIELDS (gnu_old_type));
1535 /* Unconstrained array types are deemed incomplete and would thus be given
1536 alias set 0. Retrieve the underlying array type. */
1537 if (TREE_CODE (gnu_old_type) == UNCONSTRAINED_ARRAY_TYPE)
1539 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_old_type))));
1540 if (TREE_CODE (gnu_new_type) == UNCONSTRAINED_ARRAY_TYPE)
1542 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_new_type))));
1544 if (TREE_CODE (gnu_new_type) == ARRAY_TYPE
1545 && TREE_CODE (TREE_TYPE (gnu_new_type)) == ARRAY_TYPE
1546 && TYPE_MULTI_ARRAY_P (TREE_TYPE (gnu_new_type)))
1547 relate_alias_sets (TREE_TYPE (gnu_new_type), TREE_TYPE (gnu_old_type), op);
1551 case ALIAS_SET_COPY:
1552 /* The alias set shouldn't be copied between array types with different
1553 aliasing settings because this can break the aliasing relationship
1554 between the array type and its element type. */
1555 if (flag_checking || flag_strict_aliasing)
1556 gcc_assert (!(TREE_CODE (gnu_new_type) == ARRAY_TYPE
1557 && TREE_CODE (gnu_old_type) == ARRAY_TYPE
1558 && TYPE_NONALIASED_COMPONENT (gnu_new_type)
1559 != TYPE_NONALIASED_COMPONENT (gnu_old_type)));
1561 TYPE_ALIAS_SET (gnu_new_type) = get_alias_set (gnu_old_type);
1564 case ALIAS_SET_SUBSET:
1565 case ALIAS_SET_SUPERSET:
1567 alias_set_type old_set = get_alias_set (gnu_old_type);
1568 alias_set_type new_set = get_alias_set (gnu_new_type);
1570 /* Do nothing if the alias sets conflict. This ensures that we
1571 never call record_alias_subset several times for the same pair
1572 or at all for alias set 0. */
1573 if (!alias_sets_conflict_p (old_set, new_set))
1575 if (op == ALIAS_SET_SUBSET)
1576 record_alias_subset (old_set, new_set);
1578 record_alias_subset (new_set, old_set);
1587 record_component_aliases (gnu_new_type);
1590 /* Record TYPE as a builtin type for Ada. NAME is the name of the type.
1591 ARTIFICIAL_P is true if the type was generated by the compiler. */
1594 record_builtin_type (const char *name, tree type, bool artificial_p)
1596 tree type_decl = build_decl (input_location,
1597 TYPE_DECL, get_identifier (name), type);
1598 DECL_ARTIFICIAL (type_decl) = artificial_p;
1599 TYPE_ARTIFICIAL (type) = artificial_p;
1600 gnat_pushdecl (type_decl, Empty);
1602 if (debug_hooks->type_decl)
1603 debug_hooks->type_decl (type_decl, false);
1606 /* Finish constructing the character type CHAR_TYPE.
1608 In Ada character types are enumeration types and, as a consequence, are
1609 represented in the front-end by integral types holding the positions of
1610 the enumeration values as defined by the language, which means that the
1611 integral types are unsigned.
1613 Unfortunately the signedness of 'char' in C is implementation-defined
1614 and GCC even has the option -fsigned-char to toggle it at run time.
1615 Since GNAT's philosophy is to be compatible with C by default, to wit
1616 Interfaces.C.char is defined as a mere copy of Character, we may need
1617 to declare character types as signed types in GENERIC and generate the
1618 necessary adjustments to make them behave as unsigned types.
1620 The overall strategy is as follows: if 'char' is unsigned, do nothing;
1621 if 'char' is signed, translate character types of CHAR_TYPE_SIZE and
1622 character subtypes with RM_Size = Esize = CHAR_TYPE_SIZE into signed
1623 types. The idea is to ensure that the bit pattern contained in the
1624 Esize'd objects is not changed, even though the numerical value will
1625 be interpreted differently depending on the signedness. */
1628 finish_character_type (tree char_type)
1630 if (TYPE_UNSIGNED (char_type))
1633 /* Make a copy of a generic unsigned version since we'll modify it. */
1634 tree unsigned_char_type
1635 = (char_type == char_type_node
1636 ? unsigned_char_type_node
1637 : copy_type (gnat_unsigned_type_for (char_type)));
1639 /* Create an unsigned version of the type and set it as debug type. */
1640 TYPE_NAME (unsigned_char_type) = TYPE_NAME (char_type);
1641 TYPE_STRING_FLAG (unsigned_char_type) = TYPE_STRING_FLAG (char_type);
1642 TYPE_ARTIFICIAL (unsigned_char_type) = TYPE_ARTIFICIAL (char_type);
1643 SET_TYPE_DEBUG_TYPE (char_type, unsigned_char_type);
1645 /* If this is a subtype, make the debug type a subtype of the debug type
1646 of the base type and convert literal RM bounds to unsigned. */
1647 if (TREE_TYPE (char_type))
1649 tree base_unsigned_char_type = TYPE_DEBUG_TYPE (TREE_TYPE (char_type));
1650 tree min_value = TYPE_RM_MIN_VALUE (char_type);
1651 tree max_value = TYPE_RM_MAX_VALUE (char_type);
1653 if (TREE_CODE (min_value) == INTEGER_CST)
1654 min_value = fold_convert (base_unsigned_char_type, min_value);
1655 if (TREE_CODE (max_value) == INTEGER_CST)
1656 max_value = fold_convert (base_unsigned_char_type, max_value);
1658 TREE_TYPE (unsigned_char_type) = base_unsigned_char_type;
1659 SET_TYPE_RM_MIN_VALUE (unsigned_char_type, min_value);
1660 SET_TYPE_RM_MAX_VALUE (unsigned_char_type, max_value);
1663 /* Adjust the RM bounds of the original type to unsigned; that's especially
1664 important for types since they are implicit in this case. */
1665 SET_TYPE_RM_MIN_VALUE (char_type, TYPE_MIN_VALUE (unsigned_char_type));
1666 SET_TYPE_RM_MAX_VALUE (char_type, TYPE_MAX_VALUE (unsigned_char_type));
1669 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1670 finish constructing the record type as a fat pointer type. */
1673 finish_fat_pointer_type (tree record_type, tree field_list)
1675 /* Make sure we can put it into a register. */
1676 if (STRICT_ALIGNMENT)
1677 SET_TYPE_ALIGN (record_type, MIN (BIGGEST_ALIGNMENT, 2 * POINTER_SIZE));
1679 /* Show what it really is. */
1680 TYPE_FAT_POINTER_P (record_type) = 1;
1682 /* Do not emit debug info for it since the types of its fields may still be
1683 incomplete at this point. */
1684 finish_record_type (record_type, field_list, 0, false);
1686 /* Force type_contains_placeholder_p to return true on it. Although the
1687 PLACEHOLDER_EXPRs are referenced only indirectly, this isn't a pointer
1688 type but the representation of the unconstrained array. */
1689 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (record_type) = 2;
1692 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1693 finish constructing the record or union type. If REP_LEVEL is zero, this
1694 record has no representation clause and so will be entirely laid out here.
1695 If REP_LEVEL is one, this record has a representation clause and has been
1696 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
1697 this record is derived from a parent record and thus inherits its layout;
1698 only make a pass on the fields to finalize them. DEBUG_INFO_P is true if
1699 we need to write debug information about this type. */
1702 finish_record_type (tree record_type, tree field_list, int rep_level,
1705 enum tree_code code = TREE_CODE (record_type);
1706 tree name = TYPE_IDENTIFIER (record_type);
1707 tree ada_size = bitsize_zero_node;
1708 tree size = bitsize_zero_node;
1709 bool had_size = TYPE_SIZE (record_type) != 0;
1710 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
1711 bool had_align = TYPE_ALIGN (record_type) != 0;
1714 TYPE_FIELDS (record_type) = field_list;
1716 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
1717 generate debug info and have a parallel type. */
1718 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
1720 /* Globally initialize the record first. If this is a rep'ed record,
1721 that just means some initializations; otherwise, layout the record. */
1724 SET_TYPE_ALIGN (record_type, MAX (BITS_PER_UNIT,
1725 TYPE_ALIGN (record_type)));
1728 TYPE_SIZE_UNIT (record_type) = size_zero_node;
1731 TYPE_SIZE (record_type) = bitsize_zero_node;
1733 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
1734 out just like a UNION_TYPE, since the size will be fixed. */
1735 else if (code == QUAL_UNION_TYPE)
1740 /* Ensure there isn't a size already set. There can be in an error
1741 case where there is a rep clause but all fields have errors and
1742 no longer have a position. */
1743 TYPE_SIZE (record_type) = 0;
1745 /* Ensure we use the traditional GCC layout for bitfields when we need
1746 to pack the record type or have a representation clause. The other
1747 possible layout (Microsoft C compiler), if available, would prevent
1748 efficient packing in almost all cases. */
1749 #ifdef TARGET_MS_BITFIELD_LAYOUT
1750 if (TARGET_MS_BITFIELD_LAYOUT && TYPE_PACKED (record_type))
1751 decl_attributes (&record_type,
1752 tree_cons (get_identifier ("gcc_struct"),
1753 NULL_TREE, NULL_TREE),
1754 ATTR_FLAG_TYPE_IN_PLACE);
1757 layout_type (record_type);
1760 /* At this point, the position and size of each field is known. It was
1761 either set before entry by a rep clause, or by laying out the type above.
1763 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
1764 to compute the Ada size; the GCC size and alignment (for rep'ed records
1765 that are not padding types); and the mode (for rep'ed records). We also
1766 clear the DECL_BIT_FIELD indication for the cases we know have not been
1767 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
1769 if (code == QUAL_UNION_TYPE)
1770 field_list = nreverse (field_list);
1772 for (field = field_list; field; field = DECL_CHAIN (field))
1774 tree type = TREE_TYPE (field);
1775 tree pos = bit_position (field);
1776 tree this_size = DECL_SIZE (field);
1779 if (RECORD_OR_UNION_TYPE_P (type)
1780 && !TYPE_FAT_POINTER_P (type)
1781 && !TYPE_CONTAINS_TEMPLATE_P (type)
1782 && TYPE_ADA_SIZE (type))
1783 this_ada_size = TYPE_ADA_SIZE (type);
1785 this_ada_size = this_size;
1787 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
1788 if (DECL_BIT_FIELD (field)
1789 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
1791 unsigned int align = TYPE_ALIGN (type);
1793 /* In the general case, type alignment is required. */
1794 if (value_factor_p (pos, align))
1796 /* The enclosing record type must be sufficiently aligned.
1797 Otherwise, if no alignment was specified for it and it
1798 has been laid out already, bump its alignment to the
1799 desired one if this is compatible with its size and
1800 maximum alignment, if any. */
1801 if (TYPE_ALIGN (record_type) >= align)
1803 SET_DECL_ALIGN (field, MAX (DECL_ALIGN (field), align));
1804 DECL_BIT_FIELD (field) = 0;
1808 && value_factor_p (TYPE_SIZE (record_type), align)
1809 && (!TYPE_MAX_ALIGN (record_type)
1810 || TYPE_MAX_ALIGN (record_type) >= align))
1812 SET_TYPE_ALIGN (record_type, align);
1813 SET_DECL_ALIGN (field, MAX (DECL_ALIGN (field), align));
1814 DECL_BIT_FIELD (field) = 0;
1818 /* In the non-strict alignment case, only byte alignment is. */
1819 if (!STRICT_ALIGNMENT
1820 && DECL_BIT_FIELD (field)
1821 && value_factor_p (pos, BITS_PER_UNIT))
1822 DECL_BIT_FIELD (field) = 0;
1825 /* If we still have DECL_BIT_FIELD set at this point, we know that the
1826 field is technically not addressable. Except that it can actually
1827 be addressed if it is BLKmode and happens to be properly aligned. */
1828 if (DECL_BIT_FIELD (field)
1829 && !(DECL_MODE (field) == BLKmode
1830 && value_factor_p (pos, BITS_PER_UNIT)))
1831 DECL_NONADDRESSABLE_P (field) = 1;
1833 /* A type must be as aligned as its most aligned field that is not
1834 a bit-field. But this is already enforced by layout_type. */
1835 if (rep_level > 0 && !DECL_BIT_FIELD (field))
1836 SET_TYPE_ALIGN (record_type,
1837 MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field)));
1842 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
1843 size = size_binop (MAX_EXPR, size, this_size);
1846 case QUAL_UNION_TYPE:
1848 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1849 this_ada_size, ada_size);
1850 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1855 /* Since we know here that all fields are sorted in order of
1856 increasing bit position, the size of the record is one
1857 higher than the ending bit of the last field processed
1858 unless we have a rep clause, since in that case we might
1859 have a field outside a QUAL_UNION_TYPE that has a higher ending
1860 position. So use a MAX in that case. Also, if this field is a
1861 QUAL_UNION_TYPE, we need to take into account the previous size in
1862 the case of empty variants. */
1864 = merge_sizes (ada_size, pos, this_ada_size,
1865 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1867 = merge_sizes (size, pos, this_size,
1868 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1876 if (code == QUAL_UNION_TYPE)
1877 nreverse (field_list);
1881 /* If this is a padding record, we never want to make the size smaller
1882 than what was specified in it, if any. */
1883 if (TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
1884 size = TYPE_SIZE (record_type);
1886 /* Now set any of the values we've just computed that apply. */
1887 if (!TYPE_FAT_POINTER_P (record_type)
1888 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
1889 SET_TYPE_ADA_SIZE (record_type, ada_size);
1893 tree size_unit = had_size_unit
1894 ? TYPE_SIZE_UNIT (record_type)
1895 : convert (sizetype,
1896 size_binop (CEIL_DIV_EXPR, size,
1897 bitsize_unit_node));
1898 unsigned int align = TYPE_ALIGN (record_type);
1900 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
1901 TYPE_SIZE_UNIT (record_type)
1902 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
1904 compute_record_mode (record_type);
1908 /* Reset the TYPE_MAX_ALIGN field since it's private to gigi. */
1909 TYPE_MAX_ALIGN (record_type) = 0;
1912 rest_of_record_type_compilation (record_type);
1915 /* Append PARALLEL_TYPE on the chain of parallel types of TYPE. If
1916 PARRALEL_TYPE has no context and its computation is not deferred yet, also
1917 propagate TYPE's context to PARALLEL_TYPE's or defer its propagation to the
1918 moment TYPE will get a context. */
1921 add_parallel_type (tree type, tree parallel_type)
1923 tree decl = TYPE_STUB_DECL (type);
1925 while (DECL_PARALLEL_TYPE (decl))
1926 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
1928 SET_DECL_PARALLEL_TYPE (decl, parallel_type);
1930 /* If PARALLEL_TYPE already has a context, we are done. */
1931 if (TYPE_CONTEXT (parallel_type))
1934 /* Otherwise, try to get one from TYPE's context. If so, simply propagate
1935 it to PARALLEL_TYPE. */
1936 if (TYPE_CONTEXT (type))
1937 gnat_set_type_context (parallel_type, TYPE_CONTEXT (type));
1939 /* Otherwise TYPE has not context yet. We know it will have one thanks to
1940 gnat_pushdecl and then its context will be propagated to PARALLEL_TYPE,
1941 so we have nothing to do in this case. */
1944 /* Return true if TYPE has a parallel type. */
1947 has_parallel_type (tree type)
1949 tree decl = TYPE_STUB_DECL (type);
1951 return DECL_PARALLEL_TYPE (decl) != NULL_TREE;
1954 /* Wrap up compilation of RECORD_TYPE, i.e. output all the debug information
1955 associated with it. It need not be invoked directly in most cases since
1956 finish_record_type takes care of doing so, but this can be necessary if
1957 a parallel type is to be attached to the record type. */
1960 rest_of_record_type_compilation (tree record_type)
1962 bool var_size = false;
1965 /* If this is a padded type, the bulk of the debug info has already been
1966 generated for the field's type. */
1967 if (TYPE_IS_PADDING_P (record_type))
1970 /* If the type already has a parallel type (XVS type), then we're done. */
1971 if (has_parallel_type (record_type))
1974 for (field = TYPE_FIELDS (record_type); field; field = DECL_CHAIN (field))
1976 /* We need to make an XVE/XVU record if any field has variable size,
1977 whether or not the record does. For example, if we have a union,
1978 it may be that all fields, rounded up to the alignment, have the
1979 same size, in which case we'll use that size. But the debug
1980 output routines (except Dwarf2) won't be able to output the fields,
1981 so we need to make the special record. */
1982 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
1983 /* If a field has a non-constant qualifier, the record will have
1984 variable size too. */
1985 || (TREE_CODE (record_type) == QUAL_UNION_TYPE
1986 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
1993 /* If this record type is of variable size, make a parallel record type that
1994 will tell the debugger how the former is laid out (see exp_dbug.ads). */
1995 if (var_size && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
1997 tree new_record_type
1998 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
1999 ? UNION_TYPE : TREE_CODE (record_type));
2000 tree orig_name = TYPE_IDENTIFIER (record_type), new_name;
2001 tree last_pos = bitsize_zero_node;
2002 tree old_field, prev_old_field = NULL_TREE;
2005 = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
2007 TYPE_NAME (new_record_type) = new_name;
2008 SET_TYPE_ALIGN (new_record_type, BIGGEST_ALIGNMENT);
2009 TYPE_STUB_DECL (new_record_type)
2010 = create_type_stub_decl (new_name, new_record_type);
2011 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
2012 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
2013 gnat_pushdecl (TYPE_STUB_DECL (new_record_type), Empty);
2014 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
2015 TYPE_SIZE_UNIT (new_record_type)
2016 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
2018 /* Now scan all the fields, replacing each field with a new field
2019 corresponding to the new encoding. */
2020 for (old_field = TYPE_FIELDS (record_type); old_field;
2021 old_field = DECL_CHAIN (old_field))
2023 tree field_type = TREE_TYPE (old_field);
2024 tree field_name = DECL_NAME (old_field);
2025 tree curpos = bit_position (old_field);
2026 tree pos, new_field;
2028 unsigned int align = 0;
2030 /* We're going to do some pattern matching below so remove as many
2031 conversions as possible. */
2032 curpos = remove_conversions (curpos, true);
2034 /* See how the position was modified from the last position.
2036 There are two basic cases we support: a value was added
2037 to the last position or the last position was rounded to
2038 a boundary and they something was added. Check for the
2039 first case first. If not, see if there is any evidence
2040 of rounding. If so, round the last position and retry.
2042 If this is a union, the position can be taken as zero. */
2043 if (TREE_CODE (new_record_type) == UNION_TYPE)
2044 pos = bitsize_zero_node;
2046 pos = compute_related_constant (curpos, last_pos);
2049 && TREE_CODE (curpos) == MULT_EXPR
2050 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1)))
2052 tree offset = TREE_OPERAND (curpos, 0);
2053 align = tree_to_uhwi (TREE_OPERAND (curpos, 1));
2054 align = scale_by_factor_of (offset, align);
2055 last_pos = round_up (last_pos, align);
2056 pos = compute_related_constant (curpos, last_pos);
2059 && TREE_CODE (curpos) == PLUS_EXPR
2060 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1))
2061 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
2063 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1)))
2065 tree offset = TREE_OPERAND (TREE_OPERAND (curpos, 0), 0);
2066 unsigned HOST_WIDE_INT addend
2067 = tree_to_uhwi (TREE_OPERAND (curpos, 1));
2069 = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1));
2070 align = scale_by_factor_of (offset, align);
2071 align = MIN (align, addend & -addend);
2072 last_pos = round_up (last_pos, align);
2073 pos = compute_related_constant (curpos, last_pos);
2075 else if (potential_alignment_gap (prev_old_field, old_field, pos))
2077 align = TYPE_ALIGN (field_type);
2078 last_pos = round_up (last_pos, align);
2079 pos = compute_related_constant (curpos, last_pos);
2082 /* If we can't compute a position, set it to zero.
2084 ??? We really should abort here, but it's too much work
2085 to get this correct for all cases. */
2087 pos = bitsize_zero_node;
2089 /* See if this type is variable-sized and make a pointer type
2090 and indicate the indirection if so. Beware that the debug
2091 back-end may adjust the position computed above according
2092 to the alignment of the field type, i.e. the pointer type
2093 in this case, if we don't preventively counter that. */
2094 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
2096 field_type = build_pointer_type (field_type);
2097 if (align != 0 && TYPE_ALIGN (field_type) > align)
2099 field_type = copy_node (field_type);
2100 SET_TYPE_ALIGN (field_type, align);
2105 /* Make a new field name, if necessary. */
2106 if (var || align != 0)
2111 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
2112 align / BITS_PER_UNIT);
2114 strcpy (suffix, "XVL");
2116 field_name = concat_name (field_name, suffix);
2120 = create_field_decl (field_name, field_type, new_record_type,
2121 DECL_SIZE (old_field), pos, 0, 0);
2122 DECL_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
2123 TYPE_FIELDS (new_record_type) = new_field;
2125 /* If old_field is a QUAL_UNION_TYPE, take its size as being
2126 zero. The only time it's not the last field of the record
2127 is when there are other components at fixed positions after
2128 it (meaning there was a rep clause for every field) and we
2129 want to be able to encode them. */
2130 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
2131 (TREE_CODE (TREE_TYPE (old_field))
2134 : DECL_SIZE (old_field));
2135 prev_old_field = old_field;
2138 TYPE_FIELDS (new_record_type) = nreverse (TYPE_FIELDS (new_record_type));
2140 add_parallel_type (record_type, new_record_type);
2144 /* Utility function of above to merge LAST_SIZE, the previous size of a record
2145 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
2146 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
2147 replace a value of zero with the old size. If HAS_REP is true, we take the
2148 MAX of the end position of this field with LAST_SIZE. In all other cases,
2149 we use FIRST_BIT plus SIZE. Return an expression for the size. */
2152 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
2155 tree type = TREE_TYPE (last_size);
2158 if (!special || TREE_CODE (size) != COND_EXPR)
2160 new_size = size_binop (PLUS_EXPR, first_bit, size);
2162 new_size = size_binop (MAX_EXPR, last_size, new_size);
2166 new_size = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
2167 integer_zerop (TREE_OPERAND (size, 1))
2168 ? last_size : merge_sizes (last_size, first_bit,
2169 TREE_OPERAND (size, 1),
2171 integer_zerop (TREE_OPERAND (size, 2))
2172 ? last_size : merge_sizes (last_size, first_bit,
2173 TREE_OPERAND (size, 2),
2176 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
2177 when fed through substitute_in_expr) into thinking that a constant
2178 size is not constant. */
2179 while (TREE_CODE (new_size) == NON_LVALUE_EXPR)
2180 new_size = TREE_OPERAND (new_size, 0);
2185 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
2186 related by the addition of a constant. Return that constant if so. */
2189 compute_related_constant (tree op0, tree op1)
2191 tree op0_var, op1_var;
2192 tree op0_con = split_plus (op0, &op0_var);
2193 tree op1_con = split_plus (op1, &op1_var);
2194 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
2196 if (operand_equal_p (op0_var, op1_var, 0))
2198 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
2204 /* Utility function of above to split a tree OP which may be a sum, into a
2205 constant part, which is returned, and a variable part, which is stored
2206 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
2210 split_plus (tree in, tree *pvar)
2212 /* Strip conversions in order to ease the tree traversal and maximize the
2213 potential for constant or plus/minus discovery. We need to be careful
2214 to always return and set *pvar to bitsizetype trees, but it's worth
2216 in = remove_conversions (in, false);
2218 *pvar = convert (bitsizetype, in);
2220 if (TREE_CODE (in) == INTEGER_CST)
2222 *pvar = bitsize_zero_node;
2223 return convert (bitsizetype, in);
2225 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
2227 tree lhs_var, rhs_var;
2228 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
2229 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
2231 if (lhs_var == TREE_OPERAND (in, 0)
2232 && rhs_var == TREE_OPERAND (in, 1))
2233 return bitsize_zero_node;
2235 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
2236 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
2239 return bitsize_zero_node;
2242 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
2243 subprogram. If it is VOID_TYPE, then we are dealing with a procedure,
2244 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
2245 PARM_DECL nodes that are the subprogram parameters. CICO_LIST is the
2246 copy-in/copy-out list to be stored into the TYPE_CICO_LIST field.
2247 RETURN_UNCONSTRAINED_P is true if the function returns an unconstrained
2248 object. RETURN_BY_DIRECT_REF_P is true if the function returns by direct
2249 reference. RETURN_BY_INVISI_REF_P is true if the function returns by
2250 invisible reference. */
2253 create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
2254 bool return_unconstrained_p, bool return_by_direct_ref_p,
2255 bool return_by_invisi_ref_p)
2257 /* A list of the data type nodes of the subprogram formal parameters.
2258 This list is generated by traversing the input list of PARM_DECL
2260 vec<tree, va_gc> *param_type_list = NULL;
2263 for (t = param_decl_list; t; t = DECL_CHAIN (t))
2264 vec_safe_push (param_type_list, TREE_TYPE (t));
2266 type = build_function_type_vec (return_type, param_type_list);
2268 /* TYPE may have been shared since GCC hashes types. If it has a different
2269 CICO_LIST, make a copy. Likewise for the various flags. */
2270 if (!fntype_same_flags_p (type, cico_list, return_unconstrained_p,
2271 return_by_direct_ref_p, return_by_invisi_ref_p))
2273 type = copy_type (type);
2274 TYPE_CI_CO_LIST (type) = cico_list;
2275 TYPE_RETURN_UNCONSTRAINED_P (type) = return_unconstrained_p;
2276 TYPE_RETURN_BY_DIRECT_REF_P (type) = return_by_direct_ref_p;
2277 TREE_ADDRESSABLE (type) = return_by_invisi_ref_p;
2283 /* Return a copy of TYPE but safe to modify in any way. */
2286 copy_type (tree type)
2288 tree new_type = copy_node (type);
2290 /* Unshare the language-specific data. */
2291 if (TYPE_LANG_SPECIFIC (type))
2293 TYPE_LANG_SPECIFIC (new_type) = NULL;
2294 SET_TYPE_LANG_SPECIFIC (new_type, GET_TYPE_LANG_SPECIFIC (type));
2297 /* And the contents of the language-specific slot if needed. */
2298 if ((INTEGRAL_TYPE_P (type) || TREE_CODE (type) == REAL_TYPE)
2299 && TYPE_RM_VALUES (type))
2301 TYPE_RM_VALUES (new_type) = NULL_TREE;
2302 SET_TYPE_RM_SIZE (new_type, TYPE_RM_SIZE (type));
2303 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_RM_MIN_VALUE (type));
2304 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_RM_MAX_VALUE (type));
2307 /* copy_node clears this field instead of copying it, because it is
2308 aliased with TREE_CHAIN. */
2309 TYPE_STUB_DECL (new_type) = TYPE_STUB_DECL (type);
2311 TYPE_POINTER_TO (new_type) = 0;
2312 TYPE_REFERENCE_TO (new_type) = 0;
2313 TYPE_MAIN_VARIANT (new_type) = new_type;
2314 TYPE_NEXT_VARIANT (new_type) = 0;
2315 TYPE_CANONICAL (new_type) = new_type;
2320 /* Return a subtype of sizetype with range MIN to MAX and whose
2321 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
2322 of the associated TYPE_DECL. */
2325 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
2327 /* First build a type for the desired range. */
2328 tree type = build_nonshared_range_type (sizetype, min, max);
2330 /* Then set the index type. */
2331 SET_TYPE_INDEX_TYPE (type, index);
2332 create_type_decl (NULL_TREE, type, true, false, gnat_node);
2337 /* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
2338 sizetype is used. */
2341 create_range_type (tree type, tree min, tree max)
2348 /* First build a type with the base range. */
2349 range_type = build_nonshared_range_type (type, TYPE_MIN_VALUE (type),
2350 TYPE_MAX_VALUE (type));
2352 /* Then set the actual range. */
2353 SET_TYPE_RM_MIN_VALUE (range_type, min);
2354 SET_TYPE_RM_MAX_VALUE (range_type, max);
2359 /* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of TYPE.
2360 NAME gives the name of the type to be used in the declaration. */
2363 create_type_stub_decl (tree name, tree type)
2365 tree type_decl = build_decl (input_location, TYPE_DECL, name, type);
2366 DECL_ARTIFICIAL (type_decl) = 1;
2367 TYPE_ARTIFICIAL (type) = 1;
2371 /* Return a TYPE_DECL node for TYPE. NAME gives the name of the type to be
2372 used in the declaration. ARTIFICIAL_P is true if the declaration was
2373 generated by the compiler. DEBUG_INFO_P is true if we need to write
2374 debug information about this type. GNAT_NODE is used for the position
2378 create_type_decl (tree name, tree type, bool artificial_p, bool debug_info_p,
2381 enum tree_code code = TREE_CODE (type);
2383 = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
2386 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
2387 gcc_assert (!TYPE_IS_DUMMY_P (type));
2389 /* If the type hasn't been named yet, we're naming it; preserve an existing
2390 TYPE_STUB_DECL that has been attached to it for some purpose. */
2391 if (!is_named && TYPE_STUB_DECL (type))
2393 type_decl = TYPE_STUB_DECL (type);
2394 DECL_NAME (type_decl) = name;
2397 type_decl = build_decl (input_location, TYPE_DECL, name, type);
2399 DECL_ARTIFICIAL (type_decl) = artificial_p;
2400 TYPE_ARTIFICIAL (type) = artificial_p;
2402 /* Add this decl to the current binding level. */
2403 gnat_pushdecl (type_decl, gnat_node);
2405 /* If we're naming the type, equate the TYPE_STUB_DECL to the name. This
2406 causes the name to be also viewed as a "tag" by the debug back-end, with
2407 the advantage that no DW_TAG_typedef is emitted for artificial "tagged"
2410 Note that if "type" is used as a DECL_ORIGINAL_TYPE, it may be referenced
2411 from multiple contexts, and "type_decl" references a copy of it: in such a
2412 case, do not mess TYPE_STUB_DECL: we do not want to re-use the TYPE_DECL
2413 with the mechanism above. */
2414 if (!is_named && type != DECL_ORIGINAL_TYPE (type_decl))
2415 TYPE_STUB_DECL (type) = type_decl;
2417 /* Do not generate debug info for UNCONSTRAINED_ARRAY_TYPE that the
2418 back-end doesn't support, and for others if we don't need to. */
2419 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
2420 DECL_IGNORED_P (type_decl) = 1;
2425 /* Return a VAR_DECL or CONST_DECL node.
2427 NAME gives the name of the variable. ASM_NAME is its assembler name
2428 (if provided). TYPE is its data type (a GCC ..._TYPE node). INIT is
2429 the GCC tree for an optional initial expression; NULL_TREE if none.
2431 CONST_FLAG is true if this variable is constant, in which case we might
2432 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
2434 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
2435 definition to be made visible outside of the current compilation unit, for
2436 instance variable definitions in a package specification.
2438 EXTERN_FLAG is true when processing an external variable declaration (as
2439 opposed to a definition: no storage is to be allocated for the variable).
2441 STATIC_FLAG is only relevant when not at top level and indicates whether
2442 to always allocate storage to the variable.
2444 VOLATILE_FLAG is true if this variable is declared as volatile.
2446 ARTIFICIAL_P is true if the variable was generated by the compiler.
2448 DEBUG_INFO_P is true if we need to write debug information for it.
2450 ATTR_LIST is the list of attributes to be attached to the variable.
2452 GNAT_NODE is used for the position of the decl. */
2455 create_var_decl (tree name, tree asm_name, tree type, tree init,
2456 bool const_flag, bool public_flag, bool extern_flag,
2457 bool static_flag, bool volatile_flag, bool artificial_p,
2458 bool debug_info_p, struct attrib *attr_list,
2459 Node_Id gnat_node, bool const_decl_allowed_p)
2461 /* Whether the object has static storage duration, either explicitly or by
2462 virtue of being declared at the global level. */
2463 const bool static_storage = static_flag || global_bindings_p ();
2465 /* Whether the initializer is constant: for an external object or an object
2466 with static storage duration, we check that the initializer is a valid
2467 constant expression for initializing a static variable; otherwise, we
2468 only check that it is constant. */
2469 const bool init_const
2471 && gnat_types_compatible_p (type, TREE_TYPE (init))
2472 && (extern_flag || static_storage
2473 ? initializer_constant_valid_p (init, TREE_TYPE (init))
2475 : TREE_CONSTANT (init)));
2477 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
2478 case the initializer may be used in lieu of the DECL node (as done in
2479 Identifier_to_gnu). This is useful to prevent the need of elaboration
2480 code when an identifier for which such a DECL is made is in turn used
2481 as an initializer. We used to rely on CONST_DECL vs VAR_DECL for this,
2482 but extra constraints apply to this choice (see below) and they are not
2483 relevant to the distinction we wish to make. */
2484 const bool constant_p = const_flag && init_const;
2486 /* The actual DECL node. CONST_DECL was initially intended for enumerals
2487 and may be used for scalars in general but not for aggregates. */
2489 = build_decl (input_location,
2490 (constant_p && const_decl_allowed_p
2491 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
2494 /* Detect constants created by the front-end to hold 'reference to function
2495 calls for stabilization purposes. This is needed for renaming. */
2496 if (const_flag && init && POINTER_TYPE_P (type))
2499 if (TREE_CODE (inner) == COMPOUND_EXPR)
2500 inner = TREE_OPERAND (inner, 1);
2501 inner = remove_conversions (inner, true);
2502 if (TREE_CODE (inner) == ADDR_EXPR
2503 && ((TREE_CODE (TREE_OPERAND (inner, 0)) == CALL_EXPR
2504 && !call_is_atomic_load (TREE_OPERAND (inner, 0)))
2505 || (TREE_CODE (TREE_OPERAND (inner, 0)) == VAR_DECL
2506 && DECL_RETURN_VALUE_P (TREE_OPERAND (inner, 0)))))
2507 DECL_RETURN_VALUE_P (var_decl) = 1;
2510 /* If this is external, throw away any initializations (they will be done
2511 elsewhere) unless this is a constant for which we would like to remain
2512 able to get the initializer. If we are defining a global here, leave a
2513 constant initialization and save any variable elaborations for the
2514 elaboration routine. If we are just annotating types, throw away the
2515 initialization if it isn't a constant. */
2516 if ((extern_flag && !constant_p)
2517 || (type_annotate_only && init && !TREE_CONSTANT (init)))
2520 /* At the global level, a non-constant initializer generates elaboration
2521 statements. Check that such statements are allowed, that is to say,
2522 not violating a No_Elaboration_Code restriction. */
2523 if (init && !init_const && global_bindings_p ())
2524 Check_Elaboration_Code_Allowed (gnat_node);
2526 /* Attach the initializer, if any. */
2527 DECL_INITIAL (var_decl) = init;
2529 /* Directly set some flags. */
2530 DECL_ARTIFICIAL (var_decl) = artificial_p;
2531 DECL_EXTERNAL (var_decl) = extern_flag;
2533 TREE_CONSTANT (var_decl) = constant_p;
2534 TREE_READONLY (var_decl) = const_flag;
2536 /* The object is public if it is external or if it is declared public
2537 and has static storage duration. */
2538 TREE_PUBLIC (var_decl) = extern_flag || (public_flag && static_storage);
2540 /* We need to allocate static storage for an object with static storage
2541 duration if it isn't external. */
2542 TREE_STATIC (var_decl) = !extern_flag && static_storage;
2544 TREE_SIDE_EFFECTS (var_decl)
2545 = TREE_THIS_VOLATILE (var_decl)
2546 = TYPE_VOLATILE (type) | volatile_flag;
2548 if (TREE_SIDE_EFFECTS (var_decl))
2549 TREE_ADDRESSABLE (var_decl) = 1;
2551 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
2552 try to fiddle with DECL_COMMON. However, on platforms that don't
2553 support global BSS sections, uninitialized global variables would
2554 go in DATA instead, thus increasing the size of the executable. */
2556 && TREE_CODE (var_decl) == VAR_DECL
2557 && TREE_PUBLIC (var_decl)
2558 && !have_global_bss_p ())
2559 DECL_COMMON (var_decl) = 1;
2561 /* Do not emit debug info for a CONST_DECL if optimization isn't enabled,
2562 since we will create an associated variable. Likewise for an external
2563 constant whose initializer is not absolute, because this would mean a
2564 global relocation in a read-only section which runs afoul of the PE-COFF
2565 run-time relocation mechanism. */
2567 || (TREE_CODE (var_decl) == CONST_DECL && !optimize)
2571 && initializer_constant_valid_p (init, TREE_TYPE (init))
2572 != null_pointer_node))
2573 DECL_IGNORED_P (var_decl) = 1;
2575 /* ??? Some attributes cannot be applied to CONST_DECLs. */
2576 if (TREE_CODE (var_decl) == VAR_DECL)
2577 process_attributes (&var_decl, &attr_list, true, gnat_node);
2579 /* Add this decl to the current binding level. */
2580 gnat_pushdecl (var_decl, gnat_node);
2582 if (TREE_CODE (var_decl) == VAR_DECL && asm_name)
2584 /* Let the target mangle the name if this isn't a verbatim asm. */
2585 if (*IDENTIFIER_POINTER (asm_name) != '*')
2586 asm_name = targetm.mangle_decl_assembler_name (var_decl, asm_name);
2588 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
2594 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
2597 aggregate_type_contains_array_p (tree type)
2599 switch (TREE_CODE (type))
2603 case QUAL_UNION_TYPE:
2606 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
2607 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
2608 && aggregate_type_contains_array_p (TREE_TYPE (field)))
2621 /* Return a FIELD_DECL node. NAME is the field's name, TYPE is its type and
2622 RECORD_TYPE is the type of the enclosing record. If SIZE is nonzero, it
2623 is the specified size of the field. If POS is nonzero, it is the bit
2624 position. PACKED is 1 if the enclosing record is packed, -1 if it has
2625 Component_Alignment of Storage_Unit. If ADDRESSABLE is nonzero, it
2626 means we are allowed to take the address of the field; if it is negative,
2627 we should not make a bitfield, which is used by make_aligning_type. */
2630 create_field_decl (tree name, tree type, tree record_type, tree size, tree pos,
2631 int packed, int addressable)
2633 tree field_decl = build_decl (input_location, FIELD_DECL, name, type);
2635 DECL_CONTEXT (field_decl) = record_type;
2636 TREE_READONLY (field_decl) = TYPE_READONLY (type);
2638 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
2639 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
2640 Likewise for an aggregate without specified position that contains an
2641 array, because in this case slices of variable length of this array
2642 must be handled by GCC and variable-sized objects need to be aligned
2643 to at least a byte boundary. */
2644 if (packed && (TYPE_MODE (type) == BLKmode
2646 && AGGREGATE_TYPE_P (type)
2647 && aggregate_type_contains_array_p (type))))
2648 SET_DECL_ALIGN (field_decl, BITS_PER_UNIT);
2650 /* If a size is specified, use it. Otherwise, if the record type is packed
2651 compute a size to use, which may differ from the object's natural size.
2652 We always set a size in this case to trigger the checks for bitfield
2653 creation below, which is typically required when no position has been
2656 size = convert (bitsizetype, size);
2657 else if (packed == 1)
2659 size = rm_size (type);
2660 if (TYPE_MODE (type) == BLKmode)
2661 size = round_up (size, BITS_PER_UNIT);
2664 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
2665 specified for two reasons: first if the size differs from the natural
2666 size. Second, if the alignment is insufficient. There are a number of
2667 ways the latter can be true.
2669 We never make a bitfield if the type of the field has a nonconstant size,
2670 because no such entity requiring bitfield operations should reach here.
2672 We do *preventively* make a bitfield when there might be the need for it
2673 but we don't have all the necessary information to decide, as is the case
2674 of a field with no specified position in a packed record.
2676 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
2677 in layout_decl or finish_record_type to clear the bit_field indication if
2678 it is in fact not needed. */
2679 if (addressable >= 0
2681 && TREE_CODE (size) == INTEGER_CST
2682 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
2683 && (!tree_int_cst_equal (size, TYPE_SIZE (type))
2684 || (pos && !value_factor_p (pos, TYPE_ALIGN (type)))
2686 || (TYPE_ALIGN (record_type) != 0
2687 && TYPE_ALIGN (record_type) < TYPE_ALIGN (type))))
2689 DECL_BIT_FIELD (field_decl) = 1;
2690 DECL_SIZE (field_decl) = size;
2691 if (!packed && !pos)
2693 if (TYPE_ALIGN (record_type) != 0
2694 && TYPE_ALIGN (record_type) < TYPE_ALIGN (type))
2695 SET_DECL_ALIGN (field_decl, TYPE_ALIGN (record_type));
2697 SET_DECL_ALIGN (field_decl, TYPE_ALIGN (type));
2701 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
2703 /* Bump the alignment if need be, either for bitfield/packing purposes or
2704 to satisfy the type requirements if no such consideration applies. When
2705 we get the alignment from the type, indicate if this is from an explicit
2706 user request, which prevents stor-layout from lowering it later on. */
2708 unsigned int bit_align
2709 = (DECL_BIT_FIELD (field_decl) ? 1
2710 : packed && TYPE_MODE (type) != BLKmode ? BITS_PER_UNIT : 0);
2712 if (bit_align > DECL_ALIGN (field_decl))
2713 SET_DECL_ALIGN (field_decl, bit_align);
2714 else if (!bit_align && TYPE_ALIGN (type) > DECL_ALIGN (field_decl))
2716 SET_DECL_ALIGN (field_decl, TYPE_ALIGN (type));
2717 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (type);
2723 /* We need to pass in the alignment the DECL is known to have.
2724 This is the lowest-order bit set in POS, but no more than
2725 the alignment of the record, if one is specified. Note
2726 that an alignment of 0 is taken as infinite. */
2727 unsigned int known_align;
2729 if (tree_fits_uhwi_p (pos))
2730 known_align = tree_to_uhwi (pos) & - tree_to_uhwi (pos);
2732 known_align = BITS_PER_UNIT;
2734 if (TYPE_ALIGN (record_type)
2735 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
2736 known_align = TYPE_ALIGN (record_type);
2738 layout_decl (field_decl, known_align);
2739 SET_DECL_OFFSET_ALIGN (field_decl,
2740 tree_fits_uhwi_p (pos) ? BIGGEST_ALIGNMENT
2742 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
2743 &DECL_FIELD_BIT_OFFSET (field_decl),
2744 DECL_OFFSET_ALIGN (field_decl), pos);
2747 /* In addition to what our caller says, claim the field is addressable if we
2748 know that its type is not suitable.
2750 The field may also be "technically" nonaddressable, meaning that even if
2751 we attempt to take the field's address we will actually get the address
2752 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
2753 value we have at this point is not accurate enough, so we don't account
2754 for this here and let finish_record_type decide. */
2755 if (!addressable && !type_for_nonaliased_component_p (type))
2758 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
2763 /* Return a PARM_DECL node. NAME is the name of the parameter and TYPE is
2764 its type. READONLY is true if the parameter is readonly (either an In
2765 parameter or an address of a pass-by-ref parameter). */
2768 create_param_decl (tree name, tree type, bool readonly)
2770 tree param_decl = build_decl (input_location, PARM_DECL, name, type);
2772 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
2773 can lead to various ABI violations. */
2774 if (targetm.calls.promote_prototypes (NULL_TREE)
2775 && INTEGRAL_TYPE_P (type)
2776 && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node))
2778 /* We have to be careful about biased types here. Make a subtype
2779 of integer_type_node with the proper biasing. */
2780 if (TREE_CODE (type) == INTEGER_TYPE
2781 && TYPE_BIASED_REPRESENTATION_P (type))
2784 = make_unsigned_type (TYPE_PRECISION (integer_type_node));
2785 TREE_TYPE (subtype) = integer_type_node;
2786 TYPE_BIASED_REPRESENTATION_P (subtype) = 1;
2787 SET_TYPE_RM_MIN_VALUE (subtype, TYPE_MIN_VALUE (type));
2788 SET_TYPE_RM_MAX_VALUE (subtype, TYPE_MAX_VALUE (type));
2792 type = integer_type_node;
2795 DECL_ARG_TYPE (param_decl) = type;
2796 TREE_READONLY (param_decl) = readonly;
2800 /* Process the attributes in ATTR_LIST for NODE, which is either a DECL or
2801 a TYPE. If IN_PLACE is true, the tree pointed to by NODE should not be
2802 changed. GNAT_NODE is used for the position of error messages. */
2805 process_attributes (tree *node, struct attrib **attr_list, bool in_place,
2808 struct attrib *attr;
2810 for (attr = *attr_list; attr; attr = attr->next)
2813 case ATTR_MACHINE_ATTRIBUTE:
2814 Sloc_to_locus (Sloc (gnat_node), &input_location);
2815 decl_attributes (node, tree_cons (attr->name, attr->args, NULL_TREE),
2816 in_place ? ATTR_FLAG_TYPE_IN_PLACE : 0);
2819 case ATTR_LINK_ALIAS:
2820 if (!DECL_EXTERNAL (*node))
2822 TREE_STATIC (*node) = 1;
2823 assemble_alias (*node, attr->name);
2827 case ATTR_WEAK_EXTERNAL:
2829 declare_weak (*node);
2831 post_error ("?weak declarations not supported on this target",
2835 case ATTR_LINK_SECTION:
2836 if (targetm_common.have_named_sections)
2838 set_decl_section_name (*node, IDENTIFIER_POINTER (attr->name));
2839 DECL_COMMON (*node) = 0;
2842 post_error ("?section attributes are not supported for this target",
2846 case ATTR_LINK_CONSTRUCTOR:
2847 DECL_STATIC_CONSTRUCTOR (*node) = 1;
2848 TREE_USED (*node) = 1;
2851 case ATTR_LINK_DESTRUCTOR:
2852 DECL_STATIC_DESTRUCTOR (*node) = 1;
2853 TREE_USED (*node) = 1;
2856 case ATTR_THREAD_LOCAL_STORAGE:
2857 set_decl_tls_model (*node, decl_default_tls_model (*node));
2858 DECL_COMMON (*node) = 0;
2865 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
2869 value_factor_p (tree value, HOST_WIDE_INT factor)
2871 if (tree_fits_uhwi_p (value))
2872 return tree_to_uhwi (value) % factor == 0;
2874 if (TREE_CODE (value) == MULT_EXPR)
2875 return (value_factor_p (TREE_OPERAND (value, 0), factor)
2876 || value_factor_p (TREE_OPERAND (value, 1), factor));
2881 /* Return whether GNAT_NODE is a defining identifier for a renaming that comes
2882 from the parameter association for the instantiation of a generic. We do
2883 not want to emit source location for them: the code generated for their
2884 initialization is likely to disturb debugging. */
2887 renaming_from_generic_instantiation_p (Node_Id gnat_node)
2889 if (Nkind (gnat_node) != N_Defining_Identifier
2890 || !IN (Ekind (gnat_node), Object_Kind)
2891 || Comes_From_Source (gnat_node)
2892 || !Present (Renamed_Object (gnat_node)))
2895 /* Get the object declaration of the renamed object, if any and if the
2896 renamed object is a mere identifier. */
2897 gnat_node = Renamed_Object (gnat_node);
2898 if (Nkind (gnat_node) != N_Identifier)
2901 gnat_node = Entity (gnat_node);
2902 if (!Present (Parent (gnat_node)))
2905 gnat_node = Parent (gnat_node);
2907 (Present (gnat_node)
2908 && Nkind (gnat_node) == N_Object_Declaration
2909 && Present (Corresponding_Generic_Association (gnat_node)));
2912 /* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
2913 feed it with the elaboration of GNAT_SCOPE. */
2915 static struct deferred_decl_context_node *
2916 add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global)
2918 struct deferred_decl_context_node *new_node;
2921 = (struct deferred_decl_context_node * ) xmalloc (sizeof (*new_node));
2922 new_node->decl = decl;
2923 new_node->gnat_scope = gnat_scope;
2924 new_node->force_global = force_global;
2925 new_node->types.create (1);
2926 new_node->next = deferred_decl_context_queue;
2927 deferred_decl_context_queue = new_node;
2931 /* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
2932 feed it with the DECL_CONTEXT computed as part of N as soon as it is
2936 add_deferred_type_context (struct deferred_decl_context_node *n, tree type)
2938 n->types.safe_push (type);
2941 /* Get the GENERIC node corresponding to GNAT_SCOPE, if available. Return
2942 NULL_TREE if it is not available. */
2945 compute_deferred_decl_context (Entity_Id gnat_scope)
2949 if (present_gnu_tree (gnat_scope))
2950 context = get_gnu_tree (gnat_scope);
2954 if (TREE_CODE (context) == TYPE_DECL)
2956 const tree context_type = TREE_TYPE (context);
2958 /* Skip dummy types: only the final ones can appear in the context
2960 if (TYPE_DUMMY_P (context_type))
2963 /* ..._TYPE nodes are more useful than TYPE_DECL nodes in the context
2966 context = context_type;
2972 /* Try to process all deferred nodes in the queue. Keep in the queue the ones
2973 that cannot be processed yet, remove the other ones. If FORCE is true,
2974 force the processing for all nodes, use the global context when nodes don't
2975 have a GNU translation. */
2978 process_deferred_decl_context (bool force)
2980 struct deferred_decl_context_node **it = &deferred_decl_context_queue;
2981 struct deferred_decl_context_node *node;
2985 bool processed = false;
2986 tree context = NULL_TREE;
2987 Entity_Id gnat_scope;
2991 /* If FORCE, get the innermost elaborated scope. Otherwise, just try to
2992 get the first scope. */
2993 gnat_scope = node->gnat_scope;
2994 while (Present (gnat_scope))
2996 context = compute_deferred_decl_context (gnat_scope);
2997 if (!force || context)
2999 gnat_scope = get_debug_scope (gnat_scope, NULL);
3002 /* Imported declarations must not be in a local context (i.e. not inside
3004 if (context && node->force_global > 0)
3010 gcc_assert (TREE_CODE (ctx) != FUNCTION_DECL);
3011 ctx = DECL_P (ctx) ? DECL_CONTEXT (ctx) : TYPE_CONTEXT (ctx);
3015 /* If FORCE, we want to get rid of all nodes in the queue: in case there
3016 was no elaborated scope, use the global context. */
3017 if (force && !context)
3018 context = get_global_context ();
3025 DECL_CONTEXT (node->decl) = context;
3027 /* Propagate it to the TYPE_CONTEXT attributes of the requested
3029 FOR_EACH_VEC_ELT (node->types, i, t)
3031 gnat_set_type_context (t, context);
3036 /* If this node has been successfuly processed, remove it from the
3037 queue. Then move to the next node. */
3041 node->types.release ();
3050 /* Return VALUE scaled by the biggest power-of-2 factor of EXPR. */
3053 scale_by_factor_of (tree expr, unsigned int value)
3055 unsigned HOST_WIDE_INT addend = 0;
3056 unsigned HOST_WIDE_INT factor = 1;
3058 /* Peel conversions around EXPR and try to extract bodies from function
3059 calls: it is possible to get the scale factor from size functions. */
3060 expr = remove_conversions (expr, true);
3061 if (TREE_CODE (expr) == CALL_EXPR)
3062 expr = maybe_inline_call_in_expr (expr);
3064 /* Sometimes we get PLUS_EXPR (BIT_AND_EXPR (..., X), Y), where Y is a
3065 multiple of the scale factor we are looking for. */
3066 if (TREE_CODE (expr) == PLUS_EXPR
3067 && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
3068 && tree_fits_uhwi_p (TREE_OPERAND (expr, 1)))
3070 addend = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
3071 expr = TREE_OPERAND (expr, 0);
3074 /* An expression which is a bitwise AND with a mask has a power-of-2 factor
3075 corresponding to the number of trailing zeros of the mask. */
3076 if (TREE_CODE (expr) == BIT_AND_EXPR
3077 && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST)
3079 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
3082 while ((mask & 1) == 0 && i < HOST_BITS_PER_WIDE_INT)
3090 /* If the addend is not a multiple of the factor we found, give up. In
3091 theory we could find a smaller common factor but it's useless for our
3092 needs. This situation arises when dealing with a field F1 with no
3093 alignment requirement but that is following a field F2 with such
3094 requirements. As long as we have F2's offset, we don't need alignment
3095 information to compute F1's. */
3096 if (addend % factor != 0)
3099 return factor * value;
3102 /* Given two consecutive field decls PREV_FIELD and CURR_FIELD, return true
3103 unless we can prove these 2 fields are laid out in such a way that no gap
3104 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
3105 is the distance in bits between the end of PREV_FIELD and the starting
3106 position of CURR_FIELD. It is ignored if null. */
3109 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
3111 /* If this is the first field of the record, there cannot be any gap */
3115 /* If the previous field is a union type, then return false: The only
3116 time when such a field is not the last field of the record is when
3117 there are other components at fixed positions after it (meaning there
3118 was a rep clause for every field), in which case we don't want the
3119 alignment constraint to override them. */
3120 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
3123 /* If the distance between the end of prev_field and the beginning of
3124 curr_field is constant, then there is a gap if the value of this
3125 constant is not null. */
3126 if (offset && tree_fits_uhwi_p (offset))
3127 return !integer_zerop (offset);
3129 /* If the size and position of the previous field are constant,
3130 then check the sum of this size and position. There will be a gap
3131 iff it is not multiple of the current field alignment. */
3132 if (tree_fits_uhwi_p (DECL_SIZE (prev_field))
3133 && tree_fits_uhwi_p (bit_position (prev_field)))
3134 return ((tree_to_uhwi (bit_position (prev_field))
3135 + tree_to_uhwi (DECL_SIZE (prev_field)))
3136 % DECL_ALIGN (curr_field) != 0);
3138 /* If both the position and size of the previous field are multiples
3139 of the current field alignment, there cannot be any gap. */
3140 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
3141 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
3144 /* Fallback, return that there may be a potential gap */
3148 /* Return a LABEL_DECL with NAME. GNAT_NODE is used for the position of
3152 create_label_decl (tree name, Node_Id gnat_node)
3155 = build_decl (input_location, LABEL_DECL, name, void_type_node);
3157 DECL_MODE (label_decl) = VOIDmode;
3159 /* Add this decl to the current binding level. */
3160 gnat_pushdecl (label_decl, gnat_node);
3165 /* Return a FUNCTION_DECL node. NAME is the name of the subprogram, ASM_NAME
3166 its assembler name, TYPE its type (a FUNCTION_TYPE node), PARAM_DECL_LIST
3167 the list of its parameters (a list of PARM_DECL nodes chained through the
3170 INLINE_STATUS describes the inline flags to be set on the FUNCTION_DECL.
3172 CONST_FLAG, PUBLIC_FLAG, EXTERN_FLAG, VOLATILE_FLAG are used to set the
3173 appropriate flags on the FUNCTION_DECL.
3175 ARTIFICIAL_P is true if the subprogram was generated by the compiler.
3177 DEBUG_INFO_P is true if we need to write debug information for it.
3179 ATTR_LIST is the list of attributes to be attached to the subprogram.
3181 GNAT_NODE is used for the position of the decl. */
3184 create_subprog_decl (tree name, tree asm_name, tree type, tree param_decl_list,
3185 enum inline_status_t inline_status, bool const_flag,
3186 bool public_flag, bool extern_flag, bool volatile_flag,
3187 bool artificial_p, bool debug_info_p,
3188 struct attrib *attr_list, Node_Id gnat_node)
3190 tree subprog_decl = build_decl (input_location, FUNCTION_DECL, name, type);
3192 = build_decl (input_location, RESULT_DECL, NULL_TREE, TREE_TYPE (type));
3193 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
3195 DECL_ARTIFICIAL (subprog_decl) = artificial_p;
3196 DECL_EXTERNAL (subprog_decl) = extern_flag;
3198 switch (inline_status)
3201 DECL_UNINLINABLE (subprog_decl) = 1;
3208 if (Back_End_Inlining)
3209 decl_attributes (&subprog_decl,
3210 tree_cons (get_identifier ("always_inline"),
3211 NULL_TREE, NULL_TREE),
3212 ATTR_FLAG_TYPE_IN_PLACE);
3214 /* ... fall through ... */
3217 DECL_DECLARED_INLINE_P (subprog_decl) = 1;
3218 DECL_NO_INLINE_WARNING_P (subprog_decl) = artificial_p;
3226 DECL_IGNORED_P (subprog_decl) = 1;
3228 TREE_READONLY (subprog_decl) = TYPE_READONLY (type) | const_flag;
3229 TREE_PUBLIC (subprog_decl) = public_flag;
3230 TREE_SIDE_EFFECTS (subprog_decl)
3231 = TREE_THIS_VOLATILE (subprog_decl)
3232 = TYPE_VOLATILE (type) | volatile_flag;
3234 DECL_ARTIFICIAL (result_decl) = 1;
3235 DECL_IGNORED_P (result_decl) = 1;
3236 DECL_BY_REFERENCE (result_decl) = TREE_ADDRESSABLE (type);
3237 DECL_RESULT (subprog_decl) = result_decl;
3239 process_attributes (&subprog_decl, &attr_list, true, gnat_node);
3241 /* Add this decl to the current binding level. */
3242 gnat_pushdecl (subprog_decl, gnat_node);
3246 /* Let the target mangle the name if this isn't a verbatim asm. */
3247 if (*IDENTIFIER_POINTER (asm_name) != '*')
3248 asm_name = targetm.mangle_decl_assembler_name (subprog_decl, asm_name);
3250 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
3252 /* The expand_main_function circuitry expects "main_identifier_node" to
3253 designate the DECL_NAME of the 'main' entry point, in turn expected
3254 to be declared as the "main" function literally by default. Ada
3255 program entry points are typically declared with a different name
3256 within the binder generated file, exported as 'main' to satisfy the
3257 system expectations. Force main_identifier_node in this case. */
3258 if (asm_name == main_identifier_node)
3259 DECL_NAME (subprog_decl) = main_identifier_node;
3262 /* Output the assembler code and/or RTL for the declaration. */
3263 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
3265 return subprog_decl;
3268 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
3269 body. This routine needs to be invoked before processing the declarations
3270 appearing in the subprogram. */
3273 begin_subprog_body (tree subprog_decl)
3277 announce_function (subprog_decl);
3279 /* This function is being defined. */
3280 TREE_STATIC (subprog_decl) = 1;
3282 /* The failure of this assertion will likely come from a wrong context for
3283 the subprogram body, e.g. another procedure for a procedure declared at
3285 gcc_assert (current_function_decl == decl_function_context (subprog_decl));
3287 current_function_decl = subprog_decl;
3289 /* Enter a new binding level and show that all the parameters belong to
3293 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
3294 param_decl = DECL_CHAIN (param_decl))
3295 DECL_CONTEXT (param_decl) = subprog_decl;
3297 make_decl_rtl (subprog_decl);
3300 /* Finish translating the current subprogram and set its BODY. */
3303 end_subprog_body (tree body)
3305 tree fndecl = current_function_decl;
3307 /* Attach the BLOCK for this level to the function and pop the level. */
3308 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
3309 DECL_INITIAL (fndecl) = current_binding_level->block;
3312 /* Mark the RESULT_DECL as being in this subprogram. */
3313 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
3315 /* The body should be a BIND_EXPR whose BLOCK is the top-level one. */
3316 if (TREE_CODE (body) == BIND_EXPR)
3318 BLOCK_SUPERCONTEXT (BIND_EXPR_BLOCK (body)) = fndecl;
3319 DECL_INITIAL (fndecl) = BIND_EXPR_BLOCK (body);
3322 DECL_SAVED_TREE (fndecl) = body;
3324 current_function_decl = decl_function_context (fndecl);
3327 /* Wrap up compilation of SUBPROG_DECL, a subprogram body. */
3330 rest_of_subprog_body_compilation (tree subprog_decl)
3332 /* We cannot track the location of errors past this point. */
3333 error_gnat_node = Empty;
3335 /* If we're only annotating types, don't actually compile this function. */
3336 if (type_annotate_only)
3339 /* Dump functions before gimplification. */
3340 dump_function (TDI_original, subprog_decl);
3342 if (!decl_function_context (subprog_decl))
3343 cgraph_node::finalize_function (subprog_decl, false);
3345 /* Register this function with cgraph just far enough to get it
3346 added to our parent's nested function list. */
3347 (void) cgraph_node::get_create (subprog_decl);
3351 gnat_builtin_function (tree decl)
3353 gnat_pushdecl (decl, Empty);
3357 /* Return an integer type with the number of bits of precision given by
3358 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
3359 it is a signed type. */
3362 gnat_type_for_size (unsigned precision, int unsignedp)
3367 if (precision <= 2 * MAX_BITS_PER_WORD
3368 && signed_and_unsigned_types[precision][unsignedp])
3369 return signed_and_unsigned_types[precision][unsignedp];
3372 t = make_unsigned_type (precision);
3374 t = make_signed_type (precision);
3376 if (precision <= 2 * MAX_BITS_PER_WORD)
3377 signed_and_unsigned_types[precision][unsignedp] = t;
3381 sprintf (type_name, "%sSIGNED_%u", unsignedp ? "UN" : "", precision);
3382 TYPE_NAME (t) = get_identifier (type_name);
3388 /* Likewise for floating-point types. */
3391 float_type_for_precision (int precision, machine_mode mode)
3396 if (float_types[(int) mode])
3397 return float_types[(int) mode];
3399 float_types[(int) mode] = t = make_node (REAL_TYPE);
3400 TYPE_PRECISION (t) = precision;
3403 gcc_assert (TYPE_MODE (t) == mode);
3406 sprintf (type_name, "FLOAT_%d", precision);
3407 TYPE_NAME (t) = get_identifier (type_name);
3413 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
3414 an unsigned type; otherwise a signed type is returned. */
3417 gnat_type_for_mode (machine_mode mode, int unsignedp)
3419 if (mode == BLKmode)
3422 if (mode == VOIDmode)
3423 return void_type_node;
3425 if (COMPLEX_MODE_P (mode))
3428 if (SCALAR_FLOAT_MODE_P (mode))
3429 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
3431 if (SCALAR_INT_MODE_P (mode))
3432 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
3434 if (VECTOR_MODE_P (mode))
3436 machine_mode inner_mode = GET_MODE_INNER (mode);
3437 tree inner_type = gnat_type_for_mode (inner_mode, unsignedp);
3439 return build_vector_type_for_mode (inner_type, mode);
3445 /* Return the signed or unsigned version of TYPE_NODE, a scalar type, the
3446 signedness being specified by UNSIGNEDP. */
3449 gnat_signed_or_unsigned_type_for (int unsignedp, tree type_node)
3451 if (type_node == char_type_node)
3452 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
3454 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), unsignedp);
3456 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
3458 type = copy_node (type);
3459 TREE_TYPE (type) = type_node;
3461 else if (TREE_TYPE (type_node)
3462 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
3463 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
3465 type = copy_node (type);
3466 TREE_TYPE (type) = TREE_TYPE (type_node);
3472 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
3473 transparently converted to each other. */
3476 gnat_types_compatible_p (tree t1, tree t2)
3478 enum tree_code code;
3480 /* This is the default criterion. */
3481 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
3484 /* We only check structural equivalence here. */
3485 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
3488 /* Vector types are also compatible if they have the same number of subparts
3489 and the same form of (scalar) element type. */
3490 if (code == VECTOR_TYPE
3491 && TYPE_VECTOR_SUBPARTS (t1) == TYPE_VECTOR_SUBPARTS (t2)
3492 && TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2))
3493 && TYPE_PRECISION (TREE_TYPE (t1)) == TYPE_PRECISION (TREE_TYPE (t2)))
3496 /* Array types are also compatible if they are constrained and have the same
3497 domain(s), the same component type and the same scalar storage order. */
3498 if (code == ARRAY_TYPE
3499 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
3500 || (TYPE_DOMAIN (t1)
3502 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
3503 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
3504 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
3505 TYPE_MAX_VALUE (TYPE_DOMAIN (t2)))))
3506 && (TREE_TYPE (t1) == TREE_TYPE (t2)
3507 || (TREE_CODE (TREE_TYPE (t1)) == ARRAY_TYPE
3508 && gnat_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2))))
3509 && TYPE_REVERSE_STORAGE_ORDER (t1) == TYPE_REVERSE_STORAGE_ORDER (t2))
3515 /* Return true if EXPR is a useless type conversion. */
3518 gnat_useless_type_conversion (tree expr)
3520 if (CONVERT_EXPR_P (expr)
3521 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
3522 || TREE_CODE (expr) == NON_LVALUE_EXPR)
3523 return gnat_types_compatible_p (TREE_TYPE (expr),
3524 TREE_TYPE (TREE_OPERAND (expr, 0)));
3529 /* Return true if T, a FUNCTION_TYPE, has the specified list of flags. */
3532 fntype_same_flags_p (const_tree t, tree cico_list, bool return_unconstrained_p,
3533 bool return_by_direct_ref_p, bool return_by_invisi_ref_p)
3535 return TYPE_CI_CO_LIST (t) == cico_list
3536 && TYPE_RETURN_UNCONSTRAINED_P (t) == return_unconstrained_p
3537 && TYPE_RETURN_BY_DIRECT_REF_P (t) == return_by_direct_ref_p
3538 && TREE_ADDRESSABLE (t) == return_by_invisi_ref_p;
3541 /* EXP is an expression for the size of an object. If this size contains
3542 discriminant references, replace them with the maximum (if MAX_P) or
3543 minimum (if !MAX_P) possible value of the discriminant. */
3546 max_size (tree exp, bool max_p)
3548 enum tree_code code = TREE_CODE (exp);
3549 tree type = TREE_TYPE (exp);
3551 switch (TREE_CODE_CLASS (code))
3553 case tcc_declaration:
3558 if (code == CALL_EXPR)
3563 t = maybe_inline_call_in_expr (exp);
3565 return max_size (t, max_p);
3567 n = call_expr_nargs (exp);
3569 argarray = XALLOCAVEC (tree, n);
3570 for (i = 0; i < n; i++)
3571 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
3572 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
3577 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
3578 modify. Otherwise, we treat it like a variable. */
3579 if (CONTAINS_PLACEHOLDER_P (exp))
3581 tree val_type = TREE_TYPE (TREE_OPERAND (exp, 1));
3582 tree val = (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type));
3583 return max_size (convert (get_base_type (val_type), val), true);
3588 case tcc_comparison:
3589 return max_p ? size_one_node : size_zero_node;
3592 if (code == NON_LVALUE_EXPR)
3593 return max_size (TREE_OPERAND (exp, 0), max_p);
3595 return fold_build1 (code, type,
3596 max_size (TREE_OPERAND (exp, 0),
3597 code == NEGATE_EXPR ? !max_p : max_p));
3601 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
3602 tree rhs = max_size (TREE_OPERAND (exp, 1),
3603 code == MINUS_EXPR ? !max_p : max_p);
3605 /* Special-case wanting the maximum value of a MIN_EXPR.
3606 In that case, if one side overflows, return the other. */
3607 if (max_p && code == MIN_EXPR)
3609 if (TREE_CODE (rhs) == INTEGER_CST && TREE_OVERFLOW (rhs))
3612 if (TREE_CODE (lhs) == INTEGER_CST && TREE_OVERFLOW (lhs))
3616 /* Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
3617 overflowing and the RHS a variable. */
3618 if ((code == MINUS_EXPR || code == PLUS_EXPR)
3619 && TREE_CODE (lhs) == INTEGER_CST
3620 && TREE_OVERFLOW (lhs)
3621 && TREE_CODE (rhs) != INTEGER_CST)
3624 /* If we are going to subtract a "negative" value in an unsigned type,
3625 do the operation as an addition of the negated value, in order to
3626 avoid creating a spurious overflow below. */
3627 if (code == MINUS_EXPR
3628 && TYPE_UNSIGNED (type)
3629 && TREE_CODE (rhs) == INTEGER_CST
3630 && !TREE_OVERFLOW (rhs)
3631 && tree_int_cst_sign_bit (rhs) != 0)
3633 rhs = fold_build1 (NEGATE_EXPR, type, rhs);
3637 /* We need to detect overflows so we call size_binop here. */
3638 return size_binop (code, lhs, rhs);
3641 case tcc_expression:
3642 switch (TREE_CODE_LENGTH (code))
3645 if (code == SAVE_EXPR)
3648 return fold_build1 (code, type,
3649 max_size (TREE_OPERAND (exp, 0), max_p));
3652 if (code == COMPOUND_EXPR)
3653 return max_size (TREE_OPERAND (exp, 1), max_p);
3655 return fold_build2 (code, type,
3656 max_size (TREE_OPERAND (exp, 0), max_p),
3657 max_size (TREE_OPERAND (exp, 1), max_p));
3660 if (code == COND_EXPR)
3661 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
3662 max_size (TREE_OPERAND (exp, 1), max_p),
3663 max_size (TREE_OPERAND (exp, 2), max_p));
3669 /* Other tree classes cannot happen. */
3677 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
3678 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
3679 Return a constructor for the template. */
3682 build_template (tree template_type, tree array_type, tree expr)
3684 vec<constructor_elt, va_gc> *template_elts = NULL;
3685 tree bound_list = NULL_TREE;
3688 while (TREE_CODE (array_type) == RECORD_TYPE
3689 && (TYPE_PADDING_P (array_type)
3690 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
3691 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
3693 if (TREE_CODE (array_type) == ARRAY_TYPE
3694 || (TREE_CODE (array_type) == INTEGER_TYPE
3695 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
3696 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
3698 /* First make the list for a CONSTRUCTOR for the template. Go down the
3699 field list of the template instead of the type chain because this
3700 array might be an Ada array of arrays and we can't tell where the
3701 nested arrays stop being the underlying object. */
3703 for (field = TYPE_FIELDS (template_type); field;
3705 ? (bound_list = TREE_CHAIN (bound_list))
3706 : (array_type = TREE_TYPE (array_type))),
3707 field = DECL_CHAIN (DECL_CHAIN (field)))
3709 tree bounds, min, max;
3711 /* If we have a bound list, get the bounds from there. Likewise
3712 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
3713 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
3714 This will give us a maximum range. */
3716 bounds = TREE_VALUE (bound_list);
3717 else if (TREE_CODE (array_type) == ARRAY_TYPE)
3718 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
3719 else if (expr && TREE_CODE (expr) == PARM_DECL
3720 && DECL_BY_COMPONENT_PTR_P (expr))
3721 bounds = TREE_TYPE (field);
3725 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
3726 max = convert (TREE_TYPE (DECL_CHAIN (field)), TYPE_MAX_VALUE (bounds));
3728 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
3729 substitute it from OBJECT. */
3730 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
3731 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
3733 CONSTRUCTOR_APPEND_ELT (template_elts, field, min);
3734 CONSTRUCTOR_APPEND_ELT (template_elts, DECL_CHAIN (field), max);
3737 return gnat_build_constructor (template_type, template_elts);
3740 /* Return true if TYPE is suitable for the element type of a vector. */
3743 type_for_vector_element_p (tree type)
3747 if (!INTEGRAL_TYPE_P (type)
3748 && !SCALAR_FLOAT_TYPE_P (type)
3749 && !FIXED_POINT_TYPE_P (type))
3752 mode = TYPE_MODE (type);
3753 if (GET_MODE_CLASS (mode) != MODE_INT
3754 && !SCALAR_FLOAT_MODE_P (mode)
3755 && !ALL_SCALAR_FIXED_POINT_MODE_P (mode))
3761 /* Return a vector type given the SIZE and the INNER_TYPE, or NULL_TREE if
3762 this is not possible. If ATTRIBUTE is non-zero, we are processing the
3763 attribute declaration and want to issue error messages on failure. */
3766 build_vector_type_for_size (tree inner_type, tree size, tree attribute)
3768 unsigned HOST_WIDE_INT size_int, inner_size_int;
3771 /* Silently punt on variable sizes. We can't make vector types for them,
3772 need to ignore them on front-end generated subtypes of unconstrained
3773 base types, and this attribute is for binding implementors, not end
3774 users, so we should never get there from legitimate explicit uses. */
3775 if (!tree_fits_uhwi_p (size))
3777 size_int = tree_to_uhwi (size);
3779 if (!type_for_vector_element_p (inner_type))
3782 error ("invalid element type for attribute %qs",
3783 IDENTIFIER_POINTER (attribute));
3786 inner_size_int = tree_to_uhwi (TYPE_SIZE_UNIT (inner_type));
3788 if (size_int % inner_size_int)
3791 error ("vector size not an integral multiple of component size");
3798 error ("zero vector size");
3802 nunits = size_int / inner_size_int;
3803 if (nunits & (nunits - 1))
3806 error ("number of components of vector not a power of two");
3810 return build_vector_type (inner_type, nunits);
3813 /* Return a vector type whose representative array type is ARRAY_TYPE, or
3814 NULL_TREE if this is not possible. If ATTRIBUTE is non-zero, we are
3815 processing the attribute and want to issue error messages on failure. */
3818 build_vector_type_for_array (tree array_type, tree attribute)
3820 tree vector_type = build_vector_type_for_size (TREE_TYPE (array_type),
3821 TYPE_SIZE_UNIT (array_type),
3826 TYPE_REPRESENTATIVE_ARRAY (vector_type) = array_type;
3830 /* Build a type to be used to represent an aliased object whose nominal type
3831 is an unconstrained array. This consists of a RECORD_TYPE containing a
3832 field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an ARRAY_TYPE.
3833 If ARRAY_TYPE is that of an unconstrained array, this is used to represent
3834 an arbitrary unconstrained object. Use NAME as the name of the record.
3835 DEBUG_INFO_P is true if we need to write debug information for the type. */
3838 build_unc_object_type (tree template_type, tree object_type, tree name,
3842 tree type = make_node (RECORD_TYPE);
3844 = create_field_decl (get_identifier ("BOUNDS"), template_type, type,
3845 NULL_TREE, NULL_TREE, 0, 1);
3847 = create_field_decl (get_identifier ("ARRAY"), object_type, type,
3848 NULL_TREE, NULL_TREE, 0, 1);
3850 TYPE_NAME (type) = name;
3851 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
3852 DECL_CHAIN (template_field) = array_field;
3853 finish_record_type (type, template_field, 0, true);
3855 /* Declare it now since it will never be declared otherwise. This is
3856 necessary to ensure that its subtrees are properly marked. */
3857 decl = create_type_decl (name, type, true, debug_info_p, Empty);
3859 /* template_type will not be used elsewhere than here, so to keep the debug
3860 info clean and in order to avoid scoping issues, make decl its
3862 gnat_set_type_context (template_type, decl);
3867 /* Same, taking a thin or fat pointer type instead of a template type. */
3870 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
3871 tree name, bool debug_info_p)
3875 gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
3878 = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type)
3879 ? TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
3880 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
3883 build_unc_object_type (template_type, object_type, name, debug_info_p);
3886 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
3887 In the normal case this is just two adjustments, but we have more to
3888 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
3891 update_pointer_to (tree old_type, tree new_type)
3893 tree ptr = TYPE_POINTER_TO (old_type);
3894 tree ref = TYPE_REFERENCE_TO (old_type);
3897 /* If this is the main variant, process all the other variants first. */
3898 if (TYPE_MAIN_VARIANT (old_type) == old_type)
3899 for (t = TYPE_NEXT_VARIANT (old_type); t; t = TYPE_NEXT_VARIANT (t))
3900 update_pointer_to (t, new_type);
3902 /* If no pointers and no references, we are done. */
3906 /* Merge the old type qualifiers in the new type.
3908 Each old variant has qualifiers for specific reasons, and the new
3909 designated type as well. Each set of qualifiers represents useful
3910 information grabbed at some point, and merging the two simply unifies
3911 these inputs into the final type description.
3913 Consider for instance a volatile type frozen after an access to constant
3914 type designating it; after the designated type's freeze, we get here with
3915 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
3916 when the access type was processed. We will make a volatile and readonly
3917 designated type, because that's what it really is.
3919 We might also get here for a non-dummy OLD_TYPE variant with different
3920 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
3921 to private record type elaboration (see the comments around the call to
3922 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
3923 the qualifiers in those cases too, to avoid accidentally discarding the
3924 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
3926 = build_qualified_type (new_type,
3927 TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
3929 /* If old type and new type are identical, there is nothing to do. */
3930 if (old_type == new_type)
3933 /* Otherwise, first handle the simple case. */
3934 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
3936 tree new_ptr, new_ref;
3938 /* If pointer or reference already points to new type, nothing to do.
3939 This can happen as update_pointer_to can be invoked multiple times
3940 on the same couple of types because of the type variants. */
3941 if ((ptr && TREE_TYPE (ptr) == new_type)
3942 || (ref && TREE_TYPE (ref) == new_type))
3945 /* Chain PTR and its variants at the end. */
3946 new_ptr = TYPE_POINTER_TO (new_type);
3949 while (TYPE_NEXT_PTR_TO (new_ptr))
3950 new_ptr = TYPE_NEXT_PTR_TO (new_ptr);
3951 TYPE_NEXT_PTR_TO (new_ptr) = ptr;
3954 TYPE_POINTER_TO (new_type) = ptr;
3956 /* Now adjust them. */
3957 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
3958 for (t = TYPE_MAIN_VARIANT (ptr); t; t = TYPE_NEXT_VARIANT (t))
3960 TREE_TYPE (t) = new_type;
3961 if (TYPE_NULL_BOUNDS (t))
3962 TREE_TYPE (TREE_OPERAND (TYPE_NULL_BOUNDS (t), 0)) = new_type;
3965 /* Chain REF and its variants at the end. */
3966 new_ref = TYPE_REFERENCE_TO (new_type);
3969 while (TYPE_NEXT_REF_TO (new_ref))
3970 new_ref = TYPE_NEXT_REF_TO (new_ref);
3971 TYPE_NEXT_REF_TO (new_ref) = ref;
3974 TYPE_REFERENCE_TO (new_type) = ref;
3976 /* Now adjust them. */
3977 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
3978 for (t = TYPE_MAIN_VARIANT (ref); t; t = TYPE_NEXT_VARIANT (t))
3979 TREE_TYPE (t) = new_type;
3981 TYPE_POINTER_TO (old_type) = NULL_TREE;
3982 TYPE_REFERENCE_TO (old_type) = NULL_TREE;
3985 /* Now deal with the unconstrained array case. In this case the pointer
3986 is actually a record where both fields are pointers to dummy nodes.
3987 Turn them into pointers to the correct types using update_pointer_to.
3988 Likewise for the pointer to the object record (thin pointer). */
3991 tree new_ptr = TYPE_POINTER_TO (new_type);
3993 gcc_assert (TYPE_IS_FAT_POINTER_P (ptr));
3995 /* If PTR already points to NEW_TYPE, nothing to do. This can happen
3996 since update_pointer_to can be invoked multiple times on the same
3997 couple of types because of the type variants. */
3998 if (TYPE_UNCONSTRAINED_ARRAY (ptr) == new_type)
4002 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
4003 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))));
4006 (TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (ptr)))),
4007 TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (new_ptr)))));
4009 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type),
4010 TYPE_OBJECT_RECORD_TYPE (new_type));
4012 TYPE_POINTER_TO (old_type) = NULL_TREE;
4016 /* Convert EXPR, a pointer to a constrained array, into a pointer to an
4017 unconstrained one. This involves making or finding a template. */
4020 convert_to_fat_pointer (tree type, tree expr)
4022 tree template_type = TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))));
4023 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
4024 tree etype = TREE_TYPE (expr);
4026 vec<constructor_elt, va_gc> *v;
4029 /* If EXPR is null, make a fat pointer that contains a null pointer to the
4030 array (compare_fat_pointers ensures that this is the full discriminant)
4031 and a valid pointer to the bounds. This latter property is necessary
4032 since the compiler can hoist the load of the bounds done through it. */
4033 if (integer_zerop (expr))
4035 tree ptr_template_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4036 tree null_bounds, t;
4038 if (TYPE_NULL_BOUNDS (ptr_template_type))
4039 null_bounds = TYPE_NULL_BOUNDS (ptr_template_type);
4042 /* The template type can still be dummy at this point so we build an
4043 empty constructor. The middle-end will fill it in with zeros. */
4044 t = build_constructor (template_type, NULL);
4045 TREE_CONSTANT (t) = TREE_STATIC (t) = 1;
4046 null_bounds = build_unary_op (ADDR_EXPR, NULL_TREE, t);
4047 SET_TYPE_NULL_BOUNDS (ptr_template_type, null_bounds);
4050 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4051 fold_convert (p_array_type, null_pointer_node));
4052 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), null_bounds);
4053 t = build_constructor (type, v);
4054 /* Do not set TREE_CONSTANT so as to force T to static memory. */
4055 TREE_CONSTANT (t) = 0;
4056 TREE_STATIC (t) = 1;
4061 /* If EXPR is a thin pointer, make template and data from the record. */
4062 if (TYPE_IS_THIN_POINTER_P (etype))
4064 tree field = TYPE_FIELDS (TREE_TYPE (etype));
4066 expr = gnat_protect_expr (expr);
4068 /* If we have a TYPE_UNCONSTRAINED_ARRAY attached to the RECORD_TYPE,
4069 the thin pointer value has been shifted so we shift it back to get
4070 the template address. */
4071 if (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)))
4074 = build_binary_op (POINTER_PLUS_EXPR, etype, expr,
4075 fold_build1 (NEGATE_EXPR, sizetype,
4077 (DECL_CHAIN (field))));
4079 = fold_convert (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))),
4083 /* Otherwise we explicitly take the address of the fields. */
4086 expr = build_unary_op (INDIRECT_REF, NULL_TREE, expr);
4088 = build_unary_op (ADDR_EXPR, NULL_TREE,
4089 build_component_ref (expr, field, false));
4090 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
4091 build_component_ref (expr, DECL_CHAIN (field),
4096 /* Otherwise, build the constructor for the template. */
4099 = build_unary_op (ADDR_EXPR, NULL_TREE,
4100 build_template (template_type, TREE_TYPE (etype),
4103 /* The final result is a constructor for the fat pointer.
4105 If EXPR is an argument of a foreign convention subprogram, the type it
4106 points to is directly the component type. In this case, the expression
4107 type may not match the corresponding FIELD_DECL type at this point, so we
4108 call "convert" here to fix that up if necessary. This type consistency is
4109 required, for instance because it ensures that possible later folding of
4110 COMPONENT_REFs against this constructor always yields something of the
4111 same type as the initial reference.
4113 Note that the call to "build_template" above is still fine because it
4114 will only refer to the provided TEMPLATE_TYPE in this case. */
4115 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type), convert (p_array_type, expr));
4116 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), template_addr);
4117 return gnat_build_constructor (type, v);
4120 /* Create an expression whose value is that of EXPR,
4121 converted to type TYPE. The TREE_TYPE of the value
4122 is always TYPE. This function implements all reasonable
4123 conversions; callers should filter out those that are
4124 not permitted by the language being compiled. */
4127 convert (tree type, tree expr)
4129 tree etype = TREE_TYPE (expr);
4130 enum tree_code ecode = TREE_CODE (etype);
4131 enum tree_code code = TREE_CODE (type);
4133 /* If the expression is already of the right type, we are done. */
4137 /* If both input and output have padding and are of variable size, do this
4138 as an unchecked conversion. Likewise if one is a mere variant of the
4139 other, so we avoid a pointless unpad/repad sequence. */
4140 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
4141 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4142 && (!TREE_CONSTANT (TYPE_SIZE (type))
4143 || !TREE_CONSTANT (TYPE_SIZE (etype))
4144 || TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
4145 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
4146 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
4149 /* If the output type has padding, convert to the inner type and make a
4150 constructor to build the record, unless a variable size is involved. */
4151 else if (code == RECORD_TYPE && TYPE_PADDING_P (type))
4153 vec<constructor_elt, va_gc> *v;
4155 /* If we previously converted from another type and our type is
4156 of variable size, remove the conversion to avoid the need for
4157 variable-sized temporaries. Likewise for a conversion between
4158 original and packable version. */
4159 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4160 && (!TREE_CONSTANT (TYPE_SIZE (type))
4161 || (ecode == RECORD_TYPE
4162 && TYPE_NAME (etype)
4163 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
4164 expr = TREE_OPERAND (expr, 0);
4166 /* If we are just removing the padding from expr, convert the original
4167 object if we have variable size in order to avoid the need for some
4168 variable-sized temporaries. Likewise if the padding is a variant
4169 of the other, so we avoid a pointless unpad/repad sequence. */
4170 if (TREE_CODE (expr) == COMPONENT_REF
4171 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
4172 && (!TREE_CONSTANT (TYPE_SIZE (type))
4173 || TYPE_MAIN_VARIANT (type)
4174 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr, 0)))
4175 || (ecode == RECORD_TYPE
4176 && TYPE_NAME (etype)
4177 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
4178 return convert (type, TREE_OPERAND (expr, 0));
4180 /* If the inner type is of self-referential size and the expression type
4181 is a record, do this as an unchecked conversion. But first pad the
4182 expression if possible to have the same size on both sides. */
4183 if (ecode == RECORD_TYPE
4184 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
4186 if (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST)
4187 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4188 false, false, false, true),
4190 return unchecked_convert (type, expr, false);
4193 /* If we are converting between array types with variable size, do the
4194 final conversion as an unchecked conversion, again to avoid the need
4195 for some variable-sized temporaries. If valid, this conversion is
4196 very likely purely technical and without real effects. */
4197 if (ecode == ARRAY_TYPE
4198 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == ARRAY_TYPE
4199 && !TREE_CONSTANT (TYPE_SIZE (etype))
4200 && !TREE_CONSTANT (TYPE_SIZE (type)))
4201 return unchecked_convert (type,
4202 convert (TREE_TYPE (TYPE_FIELDS (type)),
4207 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4208 convert (TREE_TYPE (TYPE_FIELDS (type)), expr));
4209 return gnat_build_constructor (type, v);
4212 /* If the input type has padding, remove it and convert to the output type.
4213 The conditions ordering is arranged to ensure that the output type is not
4214 a padding type here, as it is not clear whether the conversion would
4215 always be correct if this was to happen. */
4216 else if (ecode == RECORD_TYPE && TYPE_PADDING_P (etype))
4220 /* If we have just converted to this padded type, just get the
4221 inner expression. */
4222 if (TREE_CODE (expr) == CONSTRUCTOR)
4223 unpadded = CONSTRUCTOR_ELT (expr, 0)->value;
4225 /* Otherwise, build an explicit component reference. */
4227 unpadded = build_component_ref (expr, TYPE_FIELDS (etype), false);
4229 return convert (type, unpadded);
4232 /* If the input is a biased type, convert first to the base type and add
4233 the bias. Note that the bias must go through a full conversion to the
4234 base type, lest it is itself a biased value; this happens for subtypes
4236 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4237 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
4238 fold_convert (TREE_TYPE (etype), expr),
4239 convert (TREE_TYPE (etype),
4240 TYPE_MIN_VALUE (etype))));
4242 /* If the input is a justified modular type, we need to extract the actual
4243 object before converting it to any other type with the exceptions of an
4244 unconstrained array or of a mere type variant. It is useful to avoid the
4245 extraction and conversion in the type variant case because it could end
4246 up replacing a VAR_DECL expr by a constructor and we might be about the
4247 take the address of the result. */
4248 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
4249 && code != UNCONSTRAINED_ARRAY_TYPE
4250 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
4252 convert (type, build_component_ref (expr, TYPE_FIELDS (etype), false));
4254 /* If converting to a type that contains a template, convert to the data
4255 type and then build the template. */
4256 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
4258 tree obj_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4259 vec<constructor_elt, va_gc> *v;
4262 /* If the source already has a template, get a reference to the
4263 associated array only, as we are going to rebuild a template
4264 for the target type anyway. */
4265 expr = maybe_unconstrained_array (expr);
4267 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4268 build_template (TREE_TYPE (TYPE_FIELDS (type)),
4269 obj_type, NULL_TREE));
4271 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
4272 convert (obj_type, expr));
4273 return gnat_build_constructor (type, v);
4276 /* There are some cases of expressions that we process specially. */
4277 switch (TREE_CODE (expr))
4283 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
4284 conversion in gnat_expand_expr. NULL_EXPR does not represent
4285 and actual value, so no conversion is needed. */
4286 expr = copy_node (expr);
4287 TREE_TYPE (expr) = type;
4291 /* If we are converting a STRING_CST to another constrained array type,
4292 just make a new one in the proper type. */
4293 if (code == ecode && AGGREGATE_TYPE_P (etype)
4294 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
4295 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
4297 expr = copy_node (expr);
4298 TREE_TYPE (expr) = type;
4304 /* If we are converting a VECTOR_CST to a mere type variant, just make
4305 a new one in the proper type. */
4306 if (code == ecode && gnat_types_compatible_p (type, etype))
4308 expr = copy_node (expr);
4309 TREE_TYPE (expr) = type;
4314 /* If we are converting a CONSTRUCTOR to a mere type variant, or to
4315 another padding type around the same type, just make a new one in
4318 && (gnat_types_compatible_p (type, etype)
4319 || (code == RECORD_TYPE
4320 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4321 && TREE_TYPE (TYPE_FIELDS (type))
4322 == TREE_TYPE (TYPE_FIELDS (etype)))))
4324 expr = copy_node (expr);
4325 TREE_TYPE (expr) = type;
4326 CONSTRUCTOR_ELTS (expr) = vec_safe_copy (CONSTRUCTOR_ELTS (expr));
4330 /* Likewise for a conversion between original and packable version, or
4331 conversion between types of the same size and with the same list of
4332 fields, but we have to work harder to preserve type consistency. */
4334 && code == RECORD_TYPE
4335 && (TYPE_NAME (type) == TYPE_NAME (etype)
4336 || tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (etype))))
4339 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4340 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4341 vec<constructor_elt, va_gc> *v;
4343 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
4344 unsigned HOST_WIDE_INT idx;
4347 /* Whether we need to clear TREE_CONSTANT et al. on the output
4348 constructor when we convert in place. */
4349 bool clear_constant = false;
4351 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
4353 /* Skip the missing fields in the CONSTRUCTOR. */
4354 while (efield && field && !SAME_FIELD_P (efield, index))
4356 efield = DECL_CHAIN (efield);
4357 field = DECL_CHAIN (field);
4359 /* The field must be the same. */
4360 if (!(efield && field && SAME_FIELD_P (efield, field)))
4363 = {field, convert (TREE_TYPE (field), value)};
4364 v->quick_push (elt);
4366 /* If packing has made this field a bitfield and the input
4367 value couldn't be emitted statically any more, we need to
4368 clear TREE_CONSTANT on our output. */
4370 && TREE_CONSTANT (expr)
4371 && !CONSTRUCTOR_BITFIELD_P (efield)
4372 && CONSTRUCTOR_BITFIELD_P (field)
4373 && !initializer_constant_valid_for_bitfield_p (value))
4374 clear_constant = true;
4376 efield = DECL_CHAIN (efield);
4377 field = DECL_CHAIN (field);
4380 /* If we have been able to match and convert all the input fields
4381 to their output type, convert in place now. We'll fallback to a
4382 view conversion downstream otherwise. */
4385 expr = copy_node (expr);
4386 TREE_TYPE (expr) = type;
4387 CONSTRUCTOR_ELTS (expr) = v;
4389 TREE_CONSTANT (expr) = TREE_STATIC (expr) = 0;
4394 /* Likewise for a conversion between array type and vector type with a
4395 compatible representative array. */
4396 else if (code == VECTOR_TYPE
4397 && ecode == ARRAY_TYPE
4398 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4401 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4402 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4403 vec<constructor_elt, va_gc> *v;
4404 unsigned HOST_WIDE_INT ix;
4407 /* Build a VECTOR_CST from a *constant* array constructor. */
4408 if (TREE_CONSTANT (expr))
4410 bool constant_p = true;
4412 /* Iterate through elements and check if all constructor
4413 elements are *_CSTs. */
4414 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4415 if (!CONSTANT_CLASS_P (value))
4422 return build_vector_from_ctor (type,
4423 CONSTRUCTOR_ELTS (expr));
4426 /* Otherwise, build a regular vector constructor. */
4428 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4430 constructor_elt elt = {NULL_TREE, value};
4431 v->quick_push (elt);
4433 expr = copy_node (expr);
4434 TREE_TYPE (expr) = type;
4435 CONSTRUCTOR_ELTS (expr) = v;
4440 case UNCONSTRAINED_ARRAY_REF:
4441 /* First retrieve the underlying array. */
4442 expr = maybe_unconstrained_array (expr);
4443 etype = TREE_TYPE (expr);
4444 ecode = TREE_CODE (etype);
4447 case VIEW_CONVERT_EXPR:
4449 /* GCC 4.x is very sensitive to type consistency overall, and view
4450 conversions thus are very frequent. Even though just "convert"ing
4451 the inner operand to the output type is fine in most cases, it
4452 might expose unexpected input/output type mismatches in special
4453 circumstances so we avoid such recursive calls when we can. */
4454 tree op0 = TREE_OPERAND (expr, 0);
4456 /* If we are converting back to the original type, we can just
4457 lift the input conversion. This is a common occurrence with
4458 switches back-and-forth amongst type variants. */
4459 if (type == TREE_TYPE (op0))
4462 /* Otherwise, if we're converting between two aggregate or vector
4463 types, we might be allowed to substitute the VIEW_CONVERT_EXPR
4464 target type in place or to just convert the inner expression. */
4465 if ((AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
4466 || (VECTOR_TYPE_P (type) && VECTOR_TYPE_P (etype)))
4468 /* If we are converting between mere variants, we can just
4469 substitute the VIEW_CONVERT_EXPR in place. */
4470 if (gnat_types_compatible_p (type, etype))
4471 return build1 (VIEW_CONVERT_EXPR, type, op0);
4473 /* Otherwise, we may just bypass the input view conversion unless
4474 one of the types is a fat pointer, which is handled by
4475 specialized code below which relies on exact type matching. */
4476 else if (!TYPE_IS_FAT_POINTER_P (type)
4477 && !TYPE_IS_FAT_POINTER_P (etype))
4478 return convert (type, op0);
4488 /* Check for converting to a pointer to an unconstrained array. */
4489 if (TYPE_IS_FAT_POINTER_P (type) && !TYPE_IS_FAT_POINTER_P (etype))
4490 return convert_to_fat_pointer (type, expr);
4492 /* If we are converting between two aggregate or vector types that are mere
4493 variants, just make a VIEW_CONVERT_EXPR. Likewise when we are converting
4494 to a vector type from its representative array type. */
4495 else if ((code == ecode
4496 && (AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type))
4497 && gnat_types_compatible_p (type, etype))
4498 || (code == VECTOR_TYPE
4499 && ecode == ARRAY_TYPE
4500 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4502 return build1 (VIEW_CONVERT_EXPR, type, expr);
4504 /* If we are converting between tagged types, try to upcast properly. */
4505 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4506 && TYPE_ALIGN_OK (etype) && TYPE_ALIGN_OK (type))
4508 tree child_etype = etype;
4510 tree field = TYPE_FIELDS (child_etype);
4511 if (DECL_NAME (field) == parent_name_id && TREE_TYPE (field) == type)
4512 return build_component_ref (expr, field, false);
4513 child_etype = TREE_TYPE (field);
4514 } while (TREE_CODE (child_etype) == RECORD_TYPE);
4517 /* If we are converting from a smaller form of record type back to it, just
4518 make a VIEW_CONVERT_EXPR. But first pad the expression to have the same
4519 size on both sides. */
4520 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4521 && smaller_form_type_p (etype, type))
4523 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4524 false, false, false, true),
4526 return build1 (VIEW_CONVERT_EXPR, type, expr);
4529 /* In all other cases of related types, make a NOP_EXPR. */
4530 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
4531 return fold_convert (type, expr);
4536 return fold_build1 (CONVERT_EXPR, type, expr);
4539 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4540 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4541 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4542 return unchecked_convert (type, expr, false);
4544 /* If the output is a biased type, convert first to the base type and
4545 subtract the bias. Note that the bias itself must go through a full
4546 conversion to the base type, lest it is a biased value; this happens
4547 for subtypes of biased types. */
4548 if (TYPE_BIASED_REPRESENTATION_P (type))
4549 return fold_convert (type,
4550 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4551 convert (TREE_TYPE (type), expr),
4552 convert (TREE_TYPE (type),
4553 TYPE_MIN_VALUE (type))));
4555 /* ... fall through ... */
4559 /* If we are converting an additive expression to an integer type
4560 with lower precision, be wary of the optimization that can be
4561 applied by convert_to_integer. There are 2 problematic cases:
4562 - if the first operand was originally of a biased type,
4563 because we could be recursively called to convert it
4564 to an intermediate type and thus rematerialize the
4565 additive operator endlessly,
4566 - if the expression contains a placeholder, because an
4567 intermediate conversion that changes the sign could
4568 be inserted and thus introduce an artificial overflow
4569 at compile time when the placeholder is substituted. */
4570 if (code == INTEGER_TYPE
4571 && ecode == INTEGER_TYPE
4572 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4573 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4575 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4577 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4578 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4579 || CONTAINS_PLACEHOLDER_P (expr))
4580 return build1 (NOP_EXPR, type, expr);
4583 return fold (convert_to_integer (type, expr));
4586 case REFERENCE_TYPE:
4587 /* If converting between two thin pointers, adjust if needed to account
4588 for differing offsets from the base pointer, depending on whether
4589 there is a TYPE_UNCONSTRAINED_ARRAY attached to the record type. */
4590 if (TYPE_IS_THIN_POINTER_P (etype) && TYPE_IS_THIN_POINTER_P (type))
4593 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype))
4594 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (etype))))
4597 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))
4598 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (type))))
4600 tree byte_diff = size_diffop (type_pos, etype_pos);
4602 expr = build1 (NOP_EXPR, type, expr);
4603 if (integer_zerop (byte_diff))
4606 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
4607 fold_convert (sizetype, byte_diff));
4610 /* If converting fat pointer to normal or thin pointer, get the pointer
4611 to the array and then convert it. */
4612 if (TYPE_IS_FAT_POINTER_P (etype))
4613 expr = build_component_ref (expr, TYPE_FIELDS (etype), false);
4615 return fold (convert_to_pointer (type, expr));
4618 return fold (convert_to_real (type, expr));
4621 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
4623 vec<constructor_elt, va_gc> *v;
4626 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4627 convert (TREE_TYPE (TYPE_FIELDS (type)),
4629 return gnat_build_constructor (type, v);
4632 /* ... fall through ... */
4635 /* In these cases, assume the front-end has validated the conversion.
4636 If the conversion is valid, it will be a bit-wise conversion, so
4637 it can be viewed as an unchecked conversion. */
4638 return unchecked_convert (type, expr, false);
4641 /* This is a either a conversion between a tagged type and some
4642 subtype, which we have to mark as a UNION_TYPE because of
4643 overlapping fields or a conversion of an Unchecked_Union. */
4644 return unchecked_convert (type, expr, false);
4646 case UNCONSTRAINED_ARRAY_TYPE:
4647 /* If the input is a VECTOR_TYPE, convert to the representative
4648 array type first. */
4649 if (ecode == VECTOR_TYPE)
4651 expr = convert (TYPE_REPRESENTATIVE_ARRAY (etype), expr);
4652 etype = TREE_TYPE (expr);
4653 ecode = TREE_CODE (etype);
4656 /* If EXPR is a constrained array, take its address, convert it to a
4657 fat pointer, and then dereference it. Likewise if EXPR is a
4658 record containing both a template and a constrained array.
4659 Note that a record representing a justified modular type
4660 always represents a packed constrained array. */
4661 if (ecode == ARRAY_TYPE
4662 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4663 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4664 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4667 (INDIRECT_REF, NULL_TREE,
4668 convert_to_fat_pointer (TREE_TYPE (type),
4669 build_unary_op (ADDR_EXPR,
4672 /* Do something very similar for converting one unconstrained
4673 array to another. */
4674 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
4676 build_unary_op (INDIRECT_REF, NULL_TREE,
4677 convert (TREE_TYPE (type),
4678 build_unary_op (ADDR_EXPR,
4684 return fold (convert_to_complex (type, expr));
4691 /* Create an expression whose value is that of EXPR converted to the common
4692 index type, which is sizetype. EXPR is supposed to be in the base type
4693 of the GNAT index type. Calling it is equivalent to doing
4695 convert (sizetype, expr)
4697 but we try to distribute the type conversion with the knowledge that EXPR
4698 cannot overflow in its type. This is a best-effort approach and we fall
4699 back to the above expression as soon as difficulties are encountered.
4701 This is necessary to overcome issues that arise when the GNAT base index
4702 type and the GCC common index type (sizetype) don't have the same size,
4703 which is quite frequent on 64-bit architectures. In this case, and if
4704 the GNAT base index type is signed but the iteration type of the loop has
4705 been forced to unsigned, the loop scalar evolution engine cannot compute
4706 a simple evolution for the general induction variables associated with the
4707 array indices, because it will preserve the wrap-around semantics in the
4708 unsigned type of their "inner" part. As a result, many loop optimizations
4711 The solution is to use a special (basic) induction variable that is at
4712 least as large as sizetype, and to express the aforementioned general
4713 induction variables in terms of this induction variable, eliminating
4714 the problematic intermediate truncation to the GNAT base index type.
4715 This is possible as long as the original expression doesn't overflow
4716 and if the middle-end hasn't introduced artificial overflows in the
4717 course of the various simplification it can make to the expression. */
4720 convert_to_index_type (tree expr)
4722 enum tree_code code = TREE_CODE (expr);
4723 tree type = TREE_TYPE (expr);
4725 /* If the type is unsigned, overflow is allowed so we cannot be sure that
4726 EXPR doesn't overflow. Keep it simple if optimization is disabled. */
4727 if (TYPE_UNSIGNED (type) || !optimize)
4728 return convert (sizetype, expr);
4733 /* The main effect of the function: replace a loop parameter with its
4734 associated special induction variable. */
4735 if (DECL_LOOP_PARM_P (expr) && DECL_INDUCTION_VAR (expr))
4736 expr = DECL_INDUCTION_VAR (expr);
4741 tree otype = TREE_TYPE (TREE_OPERAND (expr, 0));
4742 /* Bail out as soon as we suspect some sort of type frobbing. */
4743 if (TYPE_PRECISION (type) != TYPE_PRECISION (otype)
4744 || TYPE_UNSIGNED (type) != TYPE_UNSIGNED (otype))
4748 /* ... fall through ... */
4750 case NON_LVALUE_EXPR:
4751 return fold_build1 (code, sizetype,
4752 convert_to_index_type (TREE_OPERAND (expr, 0)));
4757 return fold_build2 (code, sizetype,
4758 convert_to_index_type (TREE_OPERAND (expr, 0)),
4759 convert_to_index_type (TREE_OPERAND (expr, 1)));
4762 return fold_build2 (code, sizetype, TREE_OPERAND (expr, 0),
4763 convert_to_index_type (TREE_OPERAND (expr, 1)));
4766 return fold_build3 (code, sizetype, TREE_OPERAND (expr, 0),
4767 convert_to_index_type (TREE_OPERAND (expr, 1)),
4768 convert_to_index_type (TREE_OPERAND (expr, 2)));
4774 return convert (sizetype, expr);
4777 /* Remove all conversions that are done in EXP. This includes converting
4778 from a padded type or to a justified modular type. If TRUE_ADDRESS
4779 is true, always return the address of the containing object even if
4780 the address is not bit-aligned. */
4783 remove_conversions (tree exp, bool true_address)
4785 switch (TREE_CODE (exp))
4789 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4790 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
4792 remove_conversions (CONSTRUCTOR_ELT (exp, 0)->value, true);
4796 if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
4797 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4801 case VIEW_CONVERT_EXPR:
4802 case NON_LVALUE_EXPR:
4803 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4812 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
4813 refers to the underlying array. If it has TYPE_CONTAINS_TEMPLATE_P,
4814 likewise return an expression pointing to the underlying array. */
4817 maybe_unconstrained_array (tree exp)
4819 enum tree_code code = TREE_CODE (exp);
4820 tree type = TREE_TYPE (exp);
4822 switch (TREE_CODE (type))
4824 case UNCONSTRAINED_ARRAY_TYPE:
4825 if (code == UNCONSTRAINED_ARRAY_REF)
4827 const bool read_only = TREE_READONLY (exp);
4828 const bool no_trap = TREE_THIS_NOTRAP (exp);
4830 exp = TREE_OPERAND (exp, 0);
4831 type = TREE_TYPE (exp);
4833 if (TREE_CODE (exp) == COND_EXPR)
4836 = build_unary_op (INDIRECT_REF, NULL_TREE,
4837 build_component_ref (TREE_OPERAND (exp, 1),
4841 = build_unary_op (INDIRECT_REF, NULL_TREE,
4842 build_component_ref (TREE_OPERAND (exp, 2),
4846 exp = build3 (COND_EXPR,
4847 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))),
4848 TREE_OPERAND (exp, 0), op1, op2);
4852 exp = build_unary_op (INDIRECT_REF, NULL_TREE,
4853 build_component_ref (exp,
4856 TREE_READONLY (exp) = read_only;
4857 TREE_THIS_NOTRAP (exp) = no_trap;
4861 else if (code == NULL_EXPR)
4862 exp = build1 (NULL_EXPR,
4863 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type)))),
4864 TREE_OPERAND (exp, 0));
4868 /* If this is a padded type and it contains a template, convert to the
4869 unpadded type first. */
4870 if (TYPE_PADDING_P (type)
4871 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
4872 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
4874 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
4875 code = TREE_CODE (exp);
4876 type = TREE_TYPE (exp);
4879 if (TYPE_CONTAINS_TEMPLATE_P (type))
4881 /* If the array initializer is a box, return NULL_TREE. */
4882 if (code == CONSTRUCTOR && CONSTRUCTOR_NELTS (exp) < 2)
4885 exp = build_component_ref (exp, DECL_CHAIN (TYPE_FIELDS (type)),
4887 type = TREE_TYPE (exp);
4889 /* If the array type is padded, convert to the unpadded type. */
4890 if (TYPE_IS_PADDING_P (type))
4891 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
4902 /* Return true if EXPR is an expression that can be folded as an operand
4903 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
4906 can_fold_for_view_convert_p (tree expr)
4910 /* The folder will fold NOP_EXPRs between integral types with the same
4911 precision (in the middle-end's sense). We cannot allow it if the
4912 types don't have the same precision in the Ada sense as well. */
4913 if (TREE_CODE (expr) != NOP_EXPR)
4916 t1 = TREE_TYPE (expr);
4917 t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
4919 /* Defer to the folder for non-integral conversions. */
4920 if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
4923 /* Only fold conversions that preserve both precisions. */
4924 if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
4925 && operand_equal_p (rm_size (t1), rm_size (t2), 0))
4931 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
4932 If NOTRUNC_P is true, truncation operations should be suppressed.
4934 Special care is required with (source or target) integral types whose
4935 precision is not equal to their size, to make sure we fetch or assign
4936 the value bits whose location might depend on the endianness, e.g.
4938 Rmsize : constant := 8;
4939 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
4941 type Bit_Array is array (1 .. Rmsize) of Boolean;
4942 pragma Pack (Bit_Array);
4944 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
4946 Value : Int := 2#1000_0001#;
4947 Vbits : Bit_Array := To_Bit_Array (Value);
4949 we expect the 8 bits at Vbits'Address to always contain Value, while
4950 their original location depends on the endianness, at Value'Address
4951 on a little-endian architecture but not on a big-endian one. */
4954 unchecked_convert (tree type, tree expr, bool notrunc_p)
4956 tree etype = TREE_TYPE (expr);
4957 enum tree_code ecode = TREE_CODE (etype);
4958 enum tree_code code = TREE_CODE (type);
4962 /* If the expression is already of the right type, we are done. */
4966 /* If both types are integral just do a normal conversion.
4967 Likewise for a conversion to an unconstrained array. */
4968 if (((INTEGRAL_TYPE_P (type)
4969 || (POINTER_TYPE_P (type) && !TYPE_IS_THIN_POINTER_P (type))
4970 || (code == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (type)))
4971 && (INTEGRAL_TYPE_P (etype)
4972 || (POINTER_TYPE_P (etype) && !TYPE_IS_THIN_POINTER_P (etype))
4973 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype))))
4974 || code == UNCONSTRAINED_ARRAY_TYPE)
4976 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4978 tree ntype = copy_type (etype);
4979 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
4980 TYPE_MAIN_VARIANT (ntype) = ntype;
4981 expr = build1 (NOP_EXPR, ntype, expr);
4984 if (code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
4986 tree rtype = copy_type (type);
4987 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
4988 TYPE_MAIN_VARIANT (rtype) = rtype;
4989 expr = convert (rtype, expr);
4990 expr = build1 (NOP_EXPR, type, expr);
4993 expr = convert (type, expr);
4996 /* If we are converting to an integral type whose precision is not equal
4997 to its size, first unchecked convert to a record type that contains a
4998 field of the given precision. Then extract the result from the field.
5000 There is a subtlety if the source type is an aggregate type with reverse
5001 storage order because its representation is not contiguous in the native
5002 storage order, i.e. a direct unchecked conversion to an integral type
5003 with N bits of precision cannot read the first N bits of the aggregate
5004 type. To overcome it, we do an unchecked conversion to an integral type
5005 with reverse storage order and return the resulting value. This also
5006 ensures that the result of the unchecked conversion doesn't depend on
5007 the endianness of the target machine, but only on the storage order of
5010 Finally, for the sake of consistency, we do the unchecked conversion
5011 to an integral type with reverse storage order as soon as the source
5012 type is an aggregate type with reverse storage order, even if there
5013 are no considerations of precision or size involved. */
5014 else if (INTEGRAL_TYPE_P (type)
5015 && TYPE_RM_SIZE (type)
5016 && (tree_int_cst_compare (TYPE_RM_SIZE (type),
5017 TYPE_SIZE (type)) < 0
5018 || (AGGREGATE_TYPE_P (etype)
5019 && TYPE_REVERSE_STORAGE_ORDER (etype))))
5021 tree rec_type = make_node (RECORD_TYPE);
5022 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (type));
5023 tree field_type, field;
5025 if (AGGREGATE_TYPE_P (etype))
5026 TYPE_REVERSE_STORAGE_ORDER (rec_type)
5027 = TYPE_REVERSE_STORAGE_ORDER (etype);
5029 if (TYPE_UNSIGNED (type))
5030 field_type = make_unsigned_type (prec);
5032 field_type = make_signed_type (prec);
5033 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (type));
5035 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
5036 NULL_TREE, bitsize_zero_node, 1, 0);
5038 finish_record_type (rec_type, field, 1, false);
5040 expr = unchecked_convert (rec_type, expr, notrunc_p);
5041 expr = build_component_ref (expr, field, false);
5042 expr = fold_build1 (NOP_EXPR, type, expr);
5045 /* Similarly if we are converting from an integral type whose precision is
5046 not equal to its size, first copy into a field of the given precision
5047 and unchecked convert the record type.
5049 The same considerations as above apply if the target type is an aggregate
5050 type with reverse storage order and we also proceed similarly. */
5051 else if (INTEGRAL_TYPE_P (etype)
5052 && TYPE_RM_SIZE (etype)
5053 && (tree_int_cst_compare (TYPE_RM_SIZE (etype),
5054 TYPE_SIZE (etype)) < 0
5055 || (AGGREGATE_TYPE_P (type)
5056 && TYPE_REVERSE_STORAGE_ORDER (type))))
5058 tree rec_type = make_node (RECORD_TYPE);
5059 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (etype));
5060 vec<constructor_elt, va_gc> *v;
5062 tree field_type, field;
5064 if (AGGREGATE_TYPE_P (type))
5065 TYPE_REVERSE_STORAGE_ORDER (rec_type)
5066 = TYPE_REVERSE_STORAGE_ORDER (type);
5068 if (TYPE_UNSIGNED (etype))
5069 field_type = make_unsigned_type (prec);
5071 field_type = make_signed_type (prec);
5072 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (etype));
5074 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
5075 NULL_TREE, bitsize_zero_node, 1, 0);
5077 finish_record_type (rec_type, field, 1, false);
5079 expr = fold_build1 (NOP_EXPR, field_type, expr);
5080 CONSTRUCTOR_APPEND_ELT (v, field, expr);
5081 expr = gnat_build_constructor (rec_type, v);
5082 expr = unchecked_convert (type, expr, notrunc_p);
5085 /* If we are converting from a scalar type to a type with a different size,
5086 we need to pad to have the same size on both sides.
5088 ??? We cannot do it unconditionally because unchecked conversions are
5089 used liberally by the front-end to implement polymorphism, e.g. in:
5091 S191s : constant ada__tags__addr_ptr := ada__tags__addr_ptr!(S190s);
5092 return p___size__4 (p__object!(S191s.all));
5094 so we skip all expressions that are references. */
5095 else if (!REFERENCE_CLASS_P (expr)
5096 && !AGGREGATE_TYPE_P (etype)
5097 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5098 && (c = tree_int_cst_compare (TYPE_SIZE (etype), TYPE_SIZE (type))))
5102 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
5103 false, false, false, true),
5105 expr = unchecked_convert (type, expr, notrunc_p);
5109 tree rec_type = maybe_pad_type (type, TYPE_SIZE (etype), 0, Empty,
5110 false, false, false, true);
5111 expr = unchecked_convert (rec_type, expr, notrunc_p);
5112 expr = build_component_ref (expr, TYPE_FIELDS (rec_type), false);
5116 /* We have a special case when we are converting between two unconstrained
5117 array types. In that case, take the address, convert the fat pointer
5118 types, and dereference. */
5119 else if (ecode == code && code == UNCONSTRAINED_ARRAY_TYPE)
5120 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
5121 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
5122 build_unary_op (ADDR_EXPR, NULL_TREE,
5125 /* Another special case is when we are converting to a vector type from its
5126 representative array type; this a regular conversion. */
5127 else if (code == VECTOR_TYPE
5128 && ecode == ARRAY_TYPE
5129 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
5131 expr = convert (type, expr);
5133 /* And, if the array type is not the representative, we try to build an
5134 intermediate vector type of which the array type is the representative
5135 and to do the unchecked conversion between the vector types, in order
5136 to enable further simplifications in the middle-end. */
5137 else if (code == VECTOR_TYPE
5138 && ecode == ARRAY_TYPE
5139 && (tem = build_vector_type_for_array (etype, NULL_TREE)))
5141 expr = convert (tem, expr);
5142 return unchecked_convert (type, expr, notrunc_p);
5145 /* If we are converting a CONSTRUCTOR to a more aligned RECORD_TYPE, bump
5146 the alignment of the CONSTRUCTOR to speed up the copy operation. */
5147 else if (TREE_CODE (expr) == CONSTRUCTOR
5148 && code == RECORD_TYPE
5149 && TYPE_ALIGN (etype) < TYPE_ALIGN (type))
5151 expr = convert (maybe_pad_type (etype, NULL_TREE, TYPE_ALIGN (type),
5152 Empty, false, false, false, true),
5154 return unchecked_convert (type, expr, notrunc_p);
5157 /* Otherwise, just build a VIEW_CONVERT_EXPR of the expression. */
5160 expr = maybe_unconstrained_array (expr);
5161 etype = TREE_TYPE (expr);
5162 ecode = TREE_CODE (etype);
5163 if (can_fold_for_view_convert_p (expr))
5164 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
5166 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
5169 /* If the result is an integral type whose precision is not equal to its
5170 size, sign- or zero-extend the result. We need not do this if the input
5171 is an integral type of the same precision and signedness or if the output
5172 is a biased type or if both the input and output are unsigned. */
5174 && INTEGRAL_TYPE_P (type)
5175 && TYPE_RM_SIZE (type)
5176 && tree_int_cst_compare (TYPE_RM_SIZE (type), TYPE_SIZE (type)) < 0
5177 && !(INTEGRAL_TYPE_P (etype)
5178 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
5179 && tree_int_cst_compare (TYPE_RM_SIZE (type),
5180 TYPE_RM_SIZE (etype)
5181 ? TYPE_RM_SIZE (etype)
5182 : TYPE_SIZE (etype)) == 0)
5183 && !(code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
5184 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
5187 = gnat_type_for_size (TREE_INT_CST_LOW (TYPE_SIZE (type)),
5188 TYPE_UNSIGNED (type));
5190 = convert (base_type,
5191 size_binop (MINUS_EXPR,
5192 TYPE_SIZE (type), TYPE_RM_SIZE (type)));
5195 build_binary_op (RSHIFT_EXPR, base_type,
5196 build_binary_op (LSHIFT_EXPR, base_type,
5197 convert (base_type, expr),
5202 /* An unchecked conversion should never raise Constraint_Error. The code
5203 below assumes that GCC's conversion routines overflow the same way that
5204 the underlying hardware does. This is probably true. In the rare case
5205 when it is false, we can rely on the fact that such conversions are
5206 erroneous anyway. */
5207 if (TREE_CODE (expr) == INTEGER_CST)
5208 TREE_OVERFLOW (expr) = 0;
5210 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
5211 show no longer constant. */
5212 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
5213 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
5215 TREE_CONSTANT (expr) = 0;
5220 /* Return the appropriate GCC tree code for the specified GNAT_TYPE,
5221 the latter being a record type as predicated by Is_Record_Type. */
5224 tree_code_for_record_type (Entity_Id gnat_type)
5226 Node_Id component_list, component;
5228 /* Return UNION_TYPE if it's an Unchecked_Union whose non-discriminant
5229 fields are all in the variant part. Otherwise, return RECORD_TYPE. */
5230 if (!Is_Unchecked_Union (gnat_type))
5233 gnat_type = Implementation_Base_Type (gnat_type);
5235 = Component_List (Type_Definition (Declaration_Node (gnat_type)));
5237 for (component = First_Non_Pragma (Component_Items (component_list));
5238 Present (component);
5239 component = Next_Non_Pragma (component))
5240 if (Ekind (Defining_Entity (component)) == E_Component)
5246 /* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
5247 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
5248 according to the presence of an alignment clause on the type or, if it
5249 is an array, on the component type. */
5252 is_double_float_or_array (Entity_Id gnat_type, bool *align_clause)
5254 gnat_type = Underlying_Type (gnat_type);
5256 *align_clause = Present (Alignment_Clause (gnat_type));
5258 if (Is_Array_Type (gnat_type))
5260 gnat_type = Underlying_Type (Component_Type (gnat_type));
5261 if (Present (Alignment_Clause (gnat_type)))
5262 *align_clause = true;
5265 if (!Is_Floating_Point_Type (gnat_type))
5268 if (UI_To_Int (Esize (gnat_type)) != 64)
5274 /* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
5275 size is greater or equal to 64 bits, or an array of such a type. Set
5276 ALIGN_CLAUSE according to the presence of an alignment clause on the
5277 type or, if it is an array, on the component type. */
5280 is_double_scalar_or_array (Entity_Id gnat_type, bool *align_clause)
5282 gnat_type = Underlying_Type (gnat_type);
5284 *align_clause = Present (Alignment_Clause (gnat_type));
5286 if (Is_Array_Type (gnat_type))
5288 gnat_type = Underlying_Type (Component_Type (gnat_type));
5289 if (Present (Alignment_Clause (gnat_type)))
5290 *align_clause = true;
5293 if (!Is_Scalar_Type (gnat_type))
5296 if (UI_To_Int (Esize (gnat_type)) < 64)
5302 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
5303 component of an aggregate type. */
5306 type_for_nonaliased_component_p (tree gnu_type)
5308 /* If the type is passed by reference, we may have pointers to the
5309 component so it cannot be made non-aliased. */
5310 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
5313 /* We used to say that any component of aggregate type is aliased
5314 because the front-end may take 'Reference of it. The front-end
5315 has been enhanced in the meantime so as to use a renaming instead
5316 in most cases, but the back-end can probably take the address of
5317 such a component too so we go for the conservative stance.
5319 For instance, we might need the address of any array type, even
5320 if normally passed by copy, to construct a fat pointer if the
5321 component is used as an actual for an unconstrained formal.
5323 Likewise for record types: even if a specific record subtype is
5324 passed by copy, the parent type might be passed by ref (e.g. if
5325 it's of variable size) and we might take the address of a child
5326 component to pass to a parent formal. We have no way to check
5327 for such conditions here. */
5328 if (AGGREGATE_TYPE_P (gnu_type))
5334 /* Return true if TYPE is a smaller form of ORIG_TYPE. */
5337 smaller_form_type_p (tree type, tree orig_type)
5341 /* We're not interested in variants here. */
5342 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig_type))
5345 /* Like a variant, a packable version keeps the original TYPE_NAME. */
5346 if (TYPE_NAME (type) != TYPE_NAME (orig_type))
5349 size = TYPE_SIZE (type);
5350 osize = TYPE_SIZE (orig_type);
5352 if (!(TREE_CODE (size) == INTEGER_CST && TREE_CODE (osize) == INTEGER_CST))
5355 return tree_int_cst_lt (size, osize) != 0;
5358 /* Perform final processing on global declarations. */
5360 static GTY (()) tree dummy_global;
5363 gnat_write_global_declarations (void)
5368 /* If we have declared types as used at the global level, insert them in
5369 the global hash table. We use a dummy variable for this purpose, but
5370 we need to build it unconditionally to avoid -fcompare-debug issues. */
5371 if (first_global_object_name)
5373 struct varpool_node *node;
5376 ASM_FORMAT_PRIVATE_NAME (label, first_global_object_name, 0);
5378 = build_decl (BUILTINS_LOCATION, VAR_DECL, get_identifier (label),
5380 DECL_HARD_REGISTER (dummy_global) = 1;
5381 TREE_STATIC (dummy_global) = 1;
5382 node = varpool_node::get_create (dummy_global);
5383 node->definition = 1;
5384 node->force_output = 1;
5386 if (types_used_by_cur_var_decl)
5387 while (!types_used_by_cur_var_decl->is_empty ())
5389 tree t = types_used_by_cur_var_decl->pop ();
5390 types_used_by_var_decl_insert (t, dummy_global);
5394 /* Output debug information for all global type declarations first. This
5395 ensures that global types whose compilation hasn't been finalized yet,
5396 for example pointers to Taft amendment types, have their compilation
5397 finalized in the right context. */
5398 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5399 if (TREE_CODE (iter) == TYPE_DECL && !DECL_IGNORED_P (iter))
5400 debug_hooks->type_decl (iter, false);
5402 /* Then output the global variables. We need to do that after the debug
5403 information for global types is emitted so that they are finalized. */
5404 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5405 if (TREE_CODE (iter) == VAR_DECL)
5406 rest_of_decl_compilation (iter, true, 0);
5408 /* Output the imported modules/declarations. In GNAT, these are only
5409 materializing subprogram. */
5410 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5411 if (TREE_CODE (iter) == IMPORTED_DECL && !DECL_IGNORED_P (iter))
5412 debug_hooks->imported_module_or_decl (iter, DECL_NAME (iter),
5413 DECL_CONTEXT (iter), 0);
5416 /* ************************************************************************
5417 * * GCC builtins support *
5418 * ************************************************************************ */
5420 /* The general scheme is fairly simple:
5422 For each builtin function/type to be declared, gnat_install_builtins calls
5423 internal facilities which eventually get to gnat_pushdecl, which in turn
5424 tracks the so declared builtin function decls in the 'builtin_decls' global
5425 datastructure. When an Intrinsic subprogram declaration is processed, we
5426 search this global datastructure to retrieve the associated BUILT_IN DECL
5429 /* Search the chain of currently available builtin declarations for a node
5430 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
5431 found, if any, or NULL_TREE otherwise. */
5433 builtin_decl_for (tree name)
5438 FOR_EACH_VEC_SAFE_ELT (builtin_decls, i, decl)
5439 if (DECL_NAME (decl) == name)
5445 /* The code below eventually exposes gnat_install_builtins, which declares
5446 the builtin types and functions we might need, either internally or as
5447 user accessible facilities.
5449 ??? This is a first implementation shot, still in rough shape. It is
5450 heavily inspired from the "C" family implementation, with chunks copied
5451 verbatim from there.
5453 Two obvious improvement candidates are:
5454 o Use a more efficient name/decl mapping scheme
5455 o Devise a middle-end infrastructure to avoid having to copy
5456 pieces between front-ends. */
5458 /* ----------------------------------------------------------------------- *
5459 * BUILTIN ELEMENTARY TYPES *
5460 * ----------------------------------------------------------------------- */
5462 /* Standard data types to be used in builtin argument declarations. */
5466 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
5468 CTI_CONST_STRING_TYPE,
5473 static tree c_global_trees[CTI_MAX];
5475 #define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
5476 #define string_type_node c_global_trees[CTI_STRING_TYPE]
5477 #define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
5479 /* ??? In addition some attribute handlers, we currently don't support a
5480 (small) number of builtin-types, which in turns inhibits support for a
5481 number of builtin functions. */
5482 #define wint_type_node void_type_node
5483 #define intmax_type_node void_type_node
5484 #define uintmax_type_node void_type_node
5486 /* Build the void_list_node (void_type_node having been created). */
5489 build_void_list_node (void)
5491 tree t = build_tree_list (NULL_TREE, void_type_node);
5495 /* Used to help initialize the builtin-types.def table. When a type of
5496 the correct size doesn't exist, use error_mark_node instead of NULL.
5497 The later results in segfaults even when a decl using the type doesn't
5501 builtin_type_for_size (int size, bool unsignedp)
5503 tree type = gnat_type_for_size (size, unsignedp);
5504 return type ? type : error_mark_node;
5507 /* Build/push the elementary type decls that builtin functions/types
5511 install_builtin_elementary_types (void)
5513 signed_size_type_node = gnat_signed_type_for (size_type_node);
5514 pid_type_node = integer_type_node;
5515 void_list_node = build_void_list_node ();
5517 string_type_node = build_pointer_type (char_type_node);
5518 const_string_type_node
5519 = build_pointer_type (build_qualified_type
5520 (char_type_node, TYPE_QUAL_CONST));
5523 /* ----------------------------------------------------------------------- *
5524 * BUILTIN FUNCTION TYPES *
5525 * ----------------------------------------------------------------------- */
5527 /* Now, builtin function types per se. */
5531 #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
5532 #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
5533 #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
5534 #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
5535 #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5536 #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5537 #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
5538 #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5540 #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5542 #define DEF_FUNCTION_TYPE_8(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5543 ARG6, ARG7, ARG8) NAME,
5544 #define DEF_FUNCTION_TYPE_9(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5545 ARG6, ARG7, ARG8, ARG9) NAME,
5546 #define DEF_FUNCTION_TYPE_10(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5547 ARG6, ARG7, ARG8, ARG9, ARG10) NAME,
5548 #define DEF_FUNCTION_TYPE_11(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5549 ARG6, ARG7, ARG8, ARG9, ARG10, ARG11) NAME,
5550 #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
5551 #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
5552 #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
5553 #define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5554 #define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5555 #define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5557 #define DEF_FUNCTION_TYPE_VAR_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5559 #define DEF_FUNCTION_TYPE_VAR_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5561 #define DEF_POINTER_TYPE(NAME, TYPE) NAME,
5562 #include "builtin-types.def"
5563 #undef DEF_PRIMITIVE_TYPE
5564 #undef DEF_FUNCTION_TYPE_0
5565 #undef DEF_FUNCTION_TYPE_1
5566 #undef DEF_FUNCTION_TYPE_2
5567 #undef DEF_FUNCTION_TYPE_3
5568 #undef DEF_FUNCTION_TYPE_4
5569 #undef DEF_FUNCTION_TYPE_5
5570 #undef DEF_FUNCTION_TYPE_6
5571 #undef DEF_FUNCTION_TYPE_7
5572 #undef DEF_FUNCTION_TYPE_8
5573 #undef DEF_FUNCTION_TYPE_9
5574 #undef DEF_FUNCTION_TYPE_10
5575 #undef DEF_FUNCTION_TYPE_11
5576 #undef DEF_FUNCTION_TYPE_VAR_0
5577 #undef DEF_FUNCTION_TYPE_VAR_1
5578 #undef DEF_FUNCTION_TYPE_VAR_2
5579 #undef DEF_FUNCTION_TYPE_VAR_3
5580 #undef DEF_FUNCTION_TYPE_VAR_4
5581 #undef DEF_FUNCTION_TYPE_VAR_5
5582 #undef DEF_FUNCTION_TYPE_VAR_6
5583 #undef DEF_FUNCTION_TYPE_VAR_7
5584 #undef DEF_POINTER_TYPE
5588 typedef enum c_builtin_type builtin_type;
5590 /* A temporary array used in communication with def_fn_type. */
5591 static GTY(()) tree builtin_types[(int) BT_LAST + 1];
5593 /* A helper function for install_builtin_types. Build function type
5594 for DEF with return type RET and N arguments. If VAR is true, then the
5595 function should be variadic after those N arguments.
5597 Takes special care not to ICE if any of the types involved are
5598 error_mark_node, which indicates that said type is not in fact available
5599 (see builtin_type_for_size). In which case the function type as a whole
5600 should be error_mark_node. */
5603 def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
5606 tree *args = XALLOCAVEC (tree, n);
5611 for (i = 0; i < n; ++i)
5613 builtin_type a = (builtin_type) va_arg (list, int);
5614 t = builtin_types[a];
5615 if (t == error_mark_node)
5620 t = builtin_types[ret];
5621 if (t == error_mark_node)
5624 t = build_varargs_function_type_array (t, n, args);
5626 t = build_function_type_array (t, n, args);
5629 builtin_types[def] = t;
5633 /* Build the builtin function types and install them in the builtin_types
5634 array for later use in builtin function decls. */
5637 install_builtin_function_types (void)
5639 tree va_list_ref_type_node;
5640 tree va_list_arg_type_node;
5642 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
5644 va_list_arg_type_node = va_list_ref_type_node =
5645 build_pointer_type (TREE_TYPE (va_list_type_node));
5649 va_list_arg_type_node = va_list_type_node;
5650 va_list_ref_type_node = build_reference_type (va_list_type_node);
5653 #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
5654 builtin_types[ENUM] = VALUE;
5655 #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
5656 def_fn_type (ENUM, RETURN, 0, 0);
5657 #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
5658 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
5659 #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
5660 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
5661 #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5662 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
5663 #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5664 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
5665 #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5666 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5667 #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5669 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5670 #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5672 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
5673 #define DEF_FUNCTION_TYPE_8(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5675 def_fn_type (ENUM, RETURN, 0, 8, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5677 #define DEF_FUNCTION_TYPE_9(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5678 ARG6, ARG7, ARG8, ARG9) \
5679 def_fn_type (ENUM, RETURN, 0, 9, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5681 #define DEF_FUNCTION_TYPE_10(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5,\
5682 ARG6, ARG7, ARG8, ARG9, ARG10) \
5683 def_fn_type (ENUM, RETURN, 0, 10, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5684 ARG7, ARG8, ARG9, ARG10);
5685 #define DEF_FUNCTION_TYPE_11(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5,\
5686 ARG6, ARG7, ARG8, ARG9, ARG10, ARG11) \
5687 def_fn_type (ENUM, RETURN, 0, 11, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5688 ARG7, ARG8, ARG9, ARG10, ARG11);
5689 #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
5690 def_fn_type (ENUM, RETURN, 1, 0);
5691 #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
5692 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
5693 #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
5694 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
5695 #define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5696 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
5697 #define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5698 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
5699 #define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5700 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5701 #define DEF_FUNCTION_TYPE_VAR_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5703 def_fn_type (ENUM, RETURN, 1, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5704 #define DEF_FUNCTION_TYPE_VAR_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5706 def_fn_type (ENUM, RETURN, 1, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
5707 #define DEF_POINTER_TYPE(ENUM, TYPE) \
5708 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
5710 #include "builtin-types.def"
5712 #undef DEF_PRIMITIVE_TYPE
5713 #undef DEF_FUNCTION_TYPE_0
5714 #undef DEF_FUNCTION_TYPE_1
5715 #undef DEF_FUNCTION_TYPE_2
5716 #undef DEF_FUNCTION_TYPE_3
5717 #undef DEF_FUNCTION_TYPE_4
5718 #undef DEF_FUNCTION_TYPE_5
5719 #undef DEF_FUNCTION_TYPE_6
5720 #undef DEF_FUNCTION_TYPE_7
5721 #undef DEF_FUNCTION_TYPE_8
5722 #undef DEF_FUNCTION_TYPE_9
5723 #undef DEF_FUNCTION_TYPE_10
5724 #undef DEF_FUNCTION_TYPE_11
5725 #undef DEF_FUNCTION_TYPE_VAR_0
5726 #undef DEF_FUNCTION_TYPE_VAR_1
5727 #undef DEF_FUNCTION_TYPE_VAR_2
5728 #undef DEF_FUNCTION_TYPE_VAR_3
5729 #undef DEF_FUNCTION_TYPE_VAR_4
5730 #undef DEF_FUNCTION_TYPE_VAR_5
5731 #undef DEF_FUNCTION_TYPE_VAR_6
5732 #undef DEF_FUNCTION_TYPE_VAR_7
5733 #undef DEF_POINTER_TYPE
5734 builtin_types[(int) BT_LAST] = NULL_TREE;
5737 /* ----------------------------------------------------------------------- *
5738 * BUILTIN ATTRIBUTES *
5739 * ----------------------------------------------------------------------- */
5741 enum built_in_attribute
5743 #define DEF_ATTR_NULL_TREE(ENUM) ENUM,
5744 #define DEF_ATTR_INT(ENUM, VALUE) ENUM,
5745 #define DEF_ATTR_STRING(ENUM, VALUE) ENUM,
5746 #define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
5747 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
5748 #include "builtin-attrs.def"
5749 #undef DEF_ATTR_NULL_TREE
5751 #undef DEF_ATTR_STRING
5752 #undef DEF_ATTR_IDENT
5753 #undef DEF_ATTR_TREE_LIST
5757 static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
5760 install_builtin_attributes (void)
5762 /* Fill in the built_in_attributes array. */
5763 #define DEF_ATTR_NULL_TREE(ENUM) \
5764 built_in_attributes[(int) ENUM] = NULL_TREE;
5765 #define DEF_ATTR_INT(ENUM, VALUE) \
5766 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
5767 #define DEF_ATTR_STRING(ENUM, VALUE) \
5768 built_in_attributes[(int) ENUM] = build_string (strlen (VALUE), VALUE);
5769 #define DEF_ATTR_IDENT(ENUM, STRING) \
5770 built_in_attributes[(int) ENUM] = get_identifier (STRING);
5771 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
5772 built_in_attributes[(int) ENUM] \
5773 = tree_cons (built_in_attributes[(int) PURPOSE], \
5774 built_in_attributes[(int) VALUE], \
5775 built_in_attributes[(int) CHAIN]);
5776 #include "builtin-attrs.def"
5777 #undef DEF_ATTR_NULL_TREE
5779 #undef DEF_ATTR_STRING
5780 #undef DEF_ATTR_IDENT
5781 #undef DEF_ATTR_TREE_LIST
5784 /* Handle a "const" attribute; arguments as in
5785 struct attribute_spec.handler. */
5788 handle_const_attribute (tree *node, tree ARG_UNUSED (name),
5789 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5792 if (TREE_CODE (*node) == FUNCTION_DECL)
5793 TREE_READONLY (*node) = 1;
5795 *no_add_attrs = true;
5800 /* Handle a "nothrow" attribute; arguments as in
5801 struct attribute_spec.handler. */
5804 handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
5805 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5808 if (TREE_CODE (*node) == FUNCTION_DECL)
5809 TREE_NOTHROW (*node) = 1;
5811 *no_add_attrs = true;
5816 /* Handle a "pure" attribute; arguments as in
5817 struct attribute_spec.handler. */
5820 handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5821 int ARG_UNUSED (flags), bool *no_add_attrs)
5823 if (TREE_CODE (*node) == FUNCTION_DECL)
5824 DECL_PURE_P (*node) = 1;
5825 /* TODO: support types. */
5828 warning (OPT_Wattributes, "%qs attribute ignored",
5829 IDENTIFIER_POINTER (name));
5830 *no_add_attrs = true;
5836 /* Handle a "no vops" attribute; arguments as in
5837 struct attribute_spec.handler. */
5840 handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
5841 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5842 bool *ARG_UNUSED (no_add_attrs))
5844 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
5845 DECL_IS_NOVOPS (*node) = 1;
5849 /* Helper for nonnull attribute handling; fetch the operand number
5850 from the attribute argument list. */
5853 get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
5855 /* Verify the arg number is a constant. */
5856 if (!tree_fits_uhwi_p (arg_num_expr))
5859 *valp = TREE_INT_CST_LOW (arg_num_expr);
5863 /* Handle the "nonnull" attribute. */
5865 handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
5866 tree args, int ARG_UNUSED (flags),
5870 unsigned HOST_WIDE_INT attr_arg_num;
5872 /* If no arguments are specified, all pointer arguments should be
5873 non-null. Verify a full prototype is given so that the arguments
5874 will have the correct types when we actually check them later. */
5877 if (!prototype_p (type))
5879 error ("nonnull attribute without arguments on a non-prototype");
5880 *no_add_attrs = true;
5885 /* Argument list specified. Verify that each argument number references
5886 a pointer argument. */
5887 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
5889 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
5891 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
5893 error ("nonnull argument has invalid operand number (argument %lu)",
5894 (unsigned long) attr_arg_num);
5895 *no_add_attrs = true;
5899 if (prototype_p (type))
5901 function_args_iterator iter;
5904 function_args_iter_init (&iter, type);
5905 for (ck_num = 1; ; ck_num++, function_args_iter_next (&iter))
5907 argument = function_args_iter_cond (&iter);
5908 if (!argument || ck_num == arg_num)
5913 || TREE_CODE (argument) == VOID_TYPE)
5915 error ("nonnull argument with out-of-range operand number "
5916 "(argument %lu, operand %lu)",
5917 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5918 *no_add_attrs = true;
5922 if (TREE_CODE (argument) != POINTER_TYPE)
5924 error ("nonnull argument references non-pointer operand "
5925 "(argument %lu, operand %lu)",
5926 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5927 *no_add_attrs = true;
5936 /* Handle a "sentinel" attribute. */
5939 handle_sentinel_attribute (tree *node, tree name, tree args,
5940 int ARG_UNUSED (flags), bool *no_add_attrs)
5942 if (!prototype_p (*node))
5944 warning (OPT_Wattributes,
5945 "%qs attribute requires prototypes with named arguments",
5946 IDENTIFIER_POINTER (name));
5947 *no_add_attrs = true;
5951 if (!stdarg_p (*node))
5953 warning (OPT_Wattributes,
5954 "%qs attribute only applies to variadic functions",
5955 IDENTIFIER_POINTER (name));
5956 *no_add_attrs = true;
5962 tree position = TREE_VALUE (args);
5964 if (TREE_CODE (position) != INTEGER_CST)
5966 warning (0, "requested position is not an integer constant");
5967 *no_add_attrs = true;
5971 if (tree_int_cst_lt (position, integer_zero_node))
5973 warning (0, "requested position is less than zero");
5974 *no_add_attrs = true;
5982 /* Handle a "noreturn" attribute; arguments as in
5983 struct attribute_spec.handler. */
5986 handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5987 int ARG_UNUSED (flags), bool *no_add_attrs)
5989 tree type = TREE_TYPE (*node);
5991 /* See FIXME comment in c_common_attribute_table. */
5992 if (TREE_CODE (*node) == FUNCTION_DECL)
5993 TREE_THIS_VOLATILE (*node) = 1;
5994 else if (TREE_CODE (type) == POINTER_TYPE
5995 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
5997 = build_pointer_type
5998 (build_type_variant (TREE_TYPE (type),
5999 TYPE_READONLY (TREE_TYPE (type)), 1));
6002 warning (OPT_Wattributes, "%qs attribute ignored",
6003 IDENTIFIER_POINTER (name));
6004 *no_add_attrs = true;
6010 /* Handle a "noinline" attribute; arguments as in
6011 struct attribute_spec.handler. */
6014 handle_noinline_attribute (tree *node, tree name,
6015 tree ARG_UNUSED (args),
6016 int ARG_UNUSED (flags), bool *no_add_attrs)
6018 if (TREE_CODE (*node) == FUNCTION_DECL)
6020 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (*node)))
6022 warning (OPT_Wattributes, "%qE attribute ignored due to conflict "
6023 "with attribute %qs", name, "always_inline");
6024 *no_add_attrs = true;
6027 DECL_UNINLINABLE (*node) = 1;
6031 warning (OPT_Wattributes, "%qE attribute ignored", name);
6032 *no_add_attrs = true;
6038 /* Handle a "noclone" attribute; arguments as in
6039 struct attribute_spec.handler. */
6042 handle_noclone_attribute (tree *node, tree name,
6043 tree ARG_UNUSED (args),
6044 int ARG_UNUSED (flags), bool *no_add_attrs)
6046 if (TREE_CODE (*node) != FUNCTION_DECL)
6048 warning (OPT_Wattributes, "%qE attribute ignored", name);
6049 *no_add_attrs = true;
6055 /* Handle a "leaf" attribute; arguments as in
6056 struct attribute_spec.handler. */
6059 handle_leaf_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6060 int ARG_UNUSED (flags), bool *no_add_attrs)
6062 if (TREE_CODE (*node) != FUNCTION_DECL)
6064 warning (OPT_Wattributes, "%qE attribute ignored", name);
6065 *no_add_attrs = true;
6067 if (!TREE_PUBLIC (*node))
6069 warning (OPT_Wattributes, "%qE attribute has no effect", name);
6070 *no_add_attrs = true;
6076 /* Handle a "always_inline" attribute; arguments as in
6077 struct attribute_spec.handler. */
6080 handle_always_inline_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6081 int ARG_UNUSED (flags), bool *no_add_attrs)
6083 if (TREE_CODE (*node) == FUNCTION_DECL)
6085 /* Set the attribute and mark it for disregarding inline limits. */
6086 DECL_DISREGARD_INLINE_LIMITS (*node) = 1;
6090 warning (OPT_Wattributes, "%qE attribute ignored", name);
6091 *no_add_attrs = true;
6097 /* Handle a "malloc" attribute; arguments as in
6098 struct attribute_spec.handler. */
6101 handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6102 int ARG_UNUSED (flags), bool *no_add_attrs)
6104 if (TREE_CODE (*node) == FUNCTION_DECL
6105 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
6106 DECL_IS_MALLOC (*node) = 1;
6109 warning (OPT_Wattributes, "%qs attribute ignored",
6110 IDENTIFIER_POINTER (name));
6111 *no_add_attrs = true;
6117 /* Fake handler for attributes we don't properly support. */
6120 fake_attribute_handler (tree * ARG_UNUSED (node),
6121 tree ARG_UNUSED (name),
6122 tree ARG_UNUSED (args),
6123 int ARG_UNUSED (flags),
6124 bool * ARG_UNUSED (no_add_attrs))
6129 /* Handle a "type_generic" attribute. */
6132 handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
6133 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6134 bool * ARG_UNUSED (no_add_attrs))
6136 /* Ensure we have a function type. */
6137 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
6139 /* Ensure we have a variadic function. */
6140 gcc_assert (!prototype_p (*node) || stdarg_p (*node));
6145 /* Handle a "vector_size" attribute; arguments as in
6146 struct attribute_spec.handler. */
6149 handle_vector_size_attribute (tree *node, tree name, tree args,
6150 int ARG_UNUSED (flags), bool *no_add_attrs)
6155 *no_add_attrs = true;
6157 /* We need to provide for vector pointers, vector arrays, and
6158 functions returning vectors. For example:
6160 __attribute__((vector_size(16))) short *foo;
6162 In this case, the mode is SI, but the type being modified is
6163 HI, so we need to look further. */
6164 while (POINTER_TYPE_P (type)
6165 || TREE_CODE (type) == FUNCTION_TYPE
6166 || TREE_CODE (type) == ARRAY_TYPE)
6167 type = TREE_TYPE (type);
6169 vector_type = build_vector_type_for_size (type, TREE_VALUE (args), name);
6173 /* Build back pointers if needed. */
6174 *node = reconstruct_complex_type (*node, vector_type);
6179 /* Handle a "vector_type" attribute; arguments as in
6180 struct attribute_spec.handler. */
6183 handle_vector_type_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6184 int ARG_UNUSED (flags), bool *no_add_attrs)
6189 *no_add_attrs = true;
6191 if (TREE_CODE (type) != ARRAY_TYPE)
6193 error ("attribute %qs applies to array types only",
6194 IDENTIFIER_POINTER (name));
6198 vector_type = build_vector_type_for_array (type, name);
6202 TYPE_REPRESENTATIVE_ARRAY (vector_type) = type;
6203 *node = vector_type;
6208 /* ----------------------------------------------------------------------- *
6209 * BUILTIN FUNCTIONS *
6210 * ----------------------------------------------------------------------- */
6212 /* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
6213 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
6214 if nonansi_p and flag_no_nonansi_builtin. */
6217 def_builtin_1 (enum built_in_function fncode,
6219 enum built_in_class fnclass,
6220 tree fntype, tree libtype,
6221 bool both_p, bool fallback_p,
6222 bool nonansi_p ATTRIBUTE_UNUSED,
6223 tree fnattrs, bool implicit_p)
6226 const char *libname;
6228 /* Preserve an already installed decl. It most likely was setup in advance
6229 (e.g. as part of the internal builtins) for specific reasons. */
6230 if (builtin_decl_explicit (fncode))
6233 gcc_assert ((!both_p && !fallback_p)
6234 || !strncmp (name, "__builtin_",
6235 strlen ("__builtin_")));
6237 libname = name + strlen ("__builtin_");
6238 decl = add_builtin_function (name, fntype, fncode, fnclass,
6239 (fallback_p ? libname : NULL),
6242 /* ??? This is normally further controlled by command-line options
6243 like -fno-builtin, but we don't have them for Ada. */
6244 add_builtin_function (libname, libtype, fncode, fnclass,
6247 set_builtin_decl (fncode, decl, implicit_p);
6250 static int flag_isoc94 = 0;
6251 static int flag_isoc99 = 0;
6252 static int flag_isoc11 = 0;
6254 /* Install what the common builtins.def offers. */
6257 install_builtin_functions (void)
6259 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
6260 NONANSI_P, ATTRS, IMPLICIT, COND) \
6262 def_builtin_1 (ENUM, NAME, CLASS, \
6263 builtin_types[(int) TYPE], \
6264 builtin_types[(int) LIBTYPE], \
6265 BOTH_P, FALLBACK_P, NONANSI_P, \
6266 built_in_attributes[(int) ATTRS], IMPLICIT);
6267 #include "builtins.def"
6270 /* ----------------------------------------------------------------------- *
6271 * BUILTIN FUNCTIONS *
6272 * ----------------------------------------------------------------------- */
6274 /* Install the builtin functions we might need. */
6277 gnat_install_builtins (void)
6279 install_builtin_elementary_types ();
6280 install_builtin_function_types ();
6281 install_builtin_attributes ();
6283 /* Install builtins used by generic middle-end pieces first. Some of these
6284 know about internal specificities and control attributes accordingly, for
6285 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
6286 the generic definition from builtins.def. */
6287 build_common_builtin_nodes ();
6289 /* Now, install the target specific builtins, such as the AltiVec family on
6290 ppc, and the common set as exposed by builtins.def. */
6291 targetm.init_builtins ();
6292 install_builtin_functions ();
6295 #include "gt-ada-utils.h"
6296 #include "gtype-ada.h"