1 /****************************************************************************
3 * GNAT COMPILER COMPONENTS *
7 * C Implementation File *
9 * Copyright (C) 1992-2018, Free Software Foundation, Inc. *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
24 ****************************************************************************/
28 #include "coretypes.h"
32 #include "stringpool.h"
34 #include "diagnostic.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
44 #include "common/common-target.h"
45 #include "langhooks.h"
46 #include "tree-dump.h"
47 #include "tree-inline.h"
60 /* If nonzero, pretend we are allocating at global level. */
63 /* The default alignment of "double" floating-point types, i.e. floating
64 point types whose size is equal to 64 bits, or 0 if this alignment is
65 not specifically capped. */
66 int double_float_alignment;
68 /* The default alignment of "double" or larger scalar types, i.e. scalar
69 types whose size is greater or equal to 64 bits, or 0 if this alignment
70 is not specifically capped. */
71 int double_scalar_alignment;
73 /* True if floating-point arithmetics may use wider intermediate results. */
74 bool fp_arith_may_widen = true;
76 /* Tree nodes for the various types and decls we create. */
77 tree gnat_std_decls[(int) ADT_LAST];
79 /* Functions to call for each of the possible raise reasons. */
80 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
82 /* Likewise, but with extra info for each of the possible raise reasons. */
83 tree gnat_raise_decls_ext[(int) LAST_REASON_CODE + 1];
85 /* Forward declarations for handlers of attributes. */
86 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
87 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
88 static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
89 static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
90 static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
91 static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
92 static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
93 static tree handle_noinline_attribute (tree *, tree, tree, int, bool *);
94 static tree handle_noclone_attribute (tree *, tree, tree, int, bool *);
95 static tree handle_leaf_attribute (tree *, tree, tree, int, bool *);
96 static tree handle_always_inline_attribute (tree *, tree, tree, int, bool *);
97 static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
98 static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
99 static tree handle_vector_size_attribute (tree *, tree, tree, int, bool *);
100 static tree handle_vector_type_attribute (tree *, tree, tree, int, bool *);
102 /* Fake handler for attributes we don't properly support, typically because
103 they'd require dragging a lot of the common-c front-end circuitry. */
104 static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
106 /* Table of machine-independent internal attributes for Ada. We support
107 this minimal set of attributes to accommodate the needs of builtins. */
108 const struct attribute_spec gnat_internal_attribute_table[] =
110 /* { name, min_len, max_len, decl_req, type_req, fn_type_req,
111 affects_type_identity, handler, exclude } */
112 { "const", 0, 0, true, false, false, false,
113 handle_const_attribute, NULL },
114 { "nothrow", 0, 0, true, false, false, false,
115 handle_nothrow_attribute, NULL },
116 { "pure", 0, 0, true, false, false, false,
117 handle_pure_attribute, NULL },
118 { "no vops", 0, 0, true, false, false, false,
119 handle_novops_attribute, NULL },
120 { "nonnull", 0, -1, false, true, true, false,
121 handle_nonnull_attribute, NULL },
122 { "sentinel", 0, 1, false, true, true, false,
123 handle_sentinel_attribute, NULL },
124 { "noreturn", 0, 0, true, false, false, false,
125 handle_noreturn_attribute, NULL },
126 { "noinline", 0, 0, true, false, false, false,
127 handle_noinline_attribute, NULL },
128 { "noclone", 0, 0, true, false, false, false,
129 handle_noclone_attribute, NULL },
130 { "leaf", 0, 0, true, false, false, false,
131 handle_leaf_attribute, NULL },
132 { "always_inline",0, 0, true, false, false, false,
133 handle_always_inline_attribute, NULL },
134 { "malloc", 0, 0, true, false, false, false,
135 handle_malloc_attribute, NULL },
136 { "type generic", 0, 0, false, true, true, false,
137 handle_type_generic_attribute, NULL },
139 { "vector_size", 1, 1, false, true, false, false,
140 handle_vector_size_attribute, NULL },
141 { "vector_type", 0, 0, false, true, false, false,
142 handle_vector_type_attribute, NULL },
143 { "may_alias", 0, 0, false, true, false, false, NULL, NULL },
145 /* ??? format and format_arg are heavy and not supported, which actually
146 prevents support for stdio builtins, which we however declare as part
147 of the common builtins.def contents. */
148 { "format", 3, 3, false, true, true, false, fake_attribute_handler,
150 { "format_arg", 1, 1, false, true, true, false, fake_attribute_handler,
153 { NULL, 0, 0, false, false, false, false, NULL, NULL }
156 /* Associates a GNAT tree node to a GCC tree node. It is used in
157 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
158 of `save_gnu_tree' for more info. */
159 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
161 #define GET_GNU_TREE(GNAT_ENTITY) \
162 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
164 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
165 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
167 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
168 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
170 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
171 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
173 #define GET_DUMMY_NODE(GNAT_ENTITY) \
174 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
176 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
177 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
179 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
180 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
182 /* This variable keeps a table for types for each precision so that we only
183 allocate each of them once. Signed and unsigned types are kept separate.
185 Note that these types are only used when fold-const requests something
186 special. Perhaps we should NOT share these types; we'll see how it
188 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
190 /* Likewise for float types, but record these by mode. */
191 static GTY(()) tree float_types[NUM_MACHINE_MODES];
193 /* For each binding contour we allocate a binding_level structure to indicate
194 the binding depth. */
196 struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
197 /* The binding level containing this one (the enclosing binding level). */
198 struct gnat_binding_level *chain;
199 /* The BLOCK node for this level. */
201 /* If nonzero, the setjmp buffer that needs to be updated for any
202 variable-sized definition within this context. */
206 /* The binding level currently in effect. */
207 static GTY(()) struct gnat_binding_level *current_binding_level;
209 /* A chain of gnat_binding_level structures awaiting reuse. */
210 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
212 /* The context to be used for global declarations. */
213 static GTY(()) tree global_context;
215 /* An array of global declarations. */
216 static GTY(()) vec<tree, va_gc> *global_decls;
218 /* An array of builtin function declarations. */
219 static GTY(()) vec<tree, va_gc> *builtin_decls;
221 /* A chain of unused BLOCK nodes. */
222 static GTY((deletable)) tree free_block_chain;
224 /* A hash table of padded types. It is modelled on the generic type
225 hash table in tree.c, which must thus be used as a reference. */
227 struct GTY((for_user)) pad_type_hash
233 struct pad_type_hasher : ggc_cache_ptr_hash<pad_type_hash>
235 static inline hashval_t hash (pad_type_hash *t) { return t->hash; }
236 static bool equal (pad_type_hash *a, pad_type_hash *b);
239 keep_cache_entry (pad_type_hash *&t)
241 return ggc_marked_p (t->type);
245 static GTY ((cache)) hash_table<pad_type_hasher> *pad_type_hash_table;
247 static tree merge_sizes (tree, tree, tree, bool, bool);
248 static tree fold_bit_position (const_tree);
249 static tree compute_related_constant (tree, tree);
250 static tree split_plus (tree, tree *);
251 static tree float_type_for_precision (int, machine_mode);
252 static tree convert_to_fat_pointer (tree, tree);
253 static unsigned int scale_by_factor_of (tree, unsigned int);
254 static bool potential_alignment_gap (tree, tree, tree);
256 /* Linked list used as a queue to defer the initialization of the DECL_CONTEXT
257 of ..._DECL nodes and of the TYPE_CONTEXT of ..._TYPE nodes. */
258 struct deferred_decl_context_node
260 /* The ..._DECL node to work on. */
263 /* The corresponding entity's Scope. */
264 Entity_Id gnat_scope;
266 /* The value of force_global when DECL was pushed. */
269 /* The list of ..._TYPE nodes to propagate the context to. */
272 /* The next queue item. */
273 struct deferred_decl_context_node *next;
276 static struct deferred_decl_context_node *deferred_decl_context_queue = NULL;
278 /* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
279 feed it with the elaboration of GNAT_SCOPE. */
280 static struct deferred_decl_context_node *
281 add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global);
283 /* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
284 feed it with the DECL_CONTEXT computed as part of N as soon as it is
286 static void add_deferred_type_context (struct deferred_decl_context_node *n,
289 /* Initialize data structures of the utils.c module. */
292 init_gnat_utils (void)
294 /* Initialize the association of GNAT nodes to GCC trees. */
295 associate_gnat_to_gnu = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
297 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
298 dummy_node_table = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
300 /* Initialize the hash table of padded types. */
301 pad_type_hash_table = hash_table<pad_type_hasher>::create_ggc (512);
304 /* Destroy data structures of the utils.c module. */
307 destroy_gnat_utils (void)
309 /* Destroy the association of GNAT nodes to GCC trees. */
310 ggc_free (associate_gnat_to_gnu);
311 associate_gnat_to_gnu = NULL;
313 /* Destroy the association of GNAT nodes to GCC trees as dummies. */
314 ggc_free (dummy_node_table);
315 dummy_node_table = NULL;
317 /* Destroy the hash table of padded types. */
318 pad_type_hash_table->empty ();
319 pad_type_hash_table = NULL;
322 /* GNAT_ENTITY is a GNAT tree node for an entity. Associate GNU_DECL, a GCC
323 tree node, with GNAT_ENTITY. If GNU_DECL is not a ..._DECL node, abort.
324 If NO_CHECK is true, the latter check is suppressed.
326 If GNU_DECL is zero, reset a previous association. */
329 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
331 /* Check that GNAT_ENTITY is not already defined and that it is being set
332 to something which is a decl. If that is not the case, this usually
333 means GNAT_ENTITY is defined twice, but occasionally is due to some
335 gcc_assert (!(gnu_decl
336 && (PRESENT_GNU_TREE (gnat_entity)
337 || (!no_check && !DECL_P (gnu_decl)))));
339 SET_GNU_TREE (gnat_entity, gnu_decl);
342 /* GNAT_ENTITY is a GNAT tree node for an entity. Return the GCC tree node
343 that was associated with it. If there is no such tree node, abort.
345 In some cases, such as delayed elaboration or expressions that need to
346 be elaborated only once, GNAT_ENTITY is really not an entity. */
349 get_gnu_tree (Entity_Id gnat_entity)
351 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
352 return GET_GNU_TREE (gnat_entity);
355 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
358 present_gnu_tree (Entity_Id gnat_entity)
360 return PRESENT_GNU_TREE (gnat_entity);
363 /* Make a dummy type corresponding to GNAT_TYPE. */
366 make_dummy_type (Entity_Id gnat_type)
368 Entity_Id gnat_equiv = Gigi_Equivalent_Type (Underlying_Type (gnat_type));
369 tree gnu_type, debug_type;
371 /* If there was no equivalent type (can only happen when just annotating
372 types) or underlying type, go back to the original type. */
374 gnat_equiv = gnat_type;
376 /* If it there already a dummy type, use that one. Else make one. */
377 if (PRESENT_DUMMY_NODE (gnat_equiv))
378 return GET_DUMMY_NODE (gnat_equiv);
380 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
382 gnu_type = make_node (Is_Record_Type (gnat_equiv)
383 ? tree_code_for_record_type (gnat_equiv)
385 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
386 TYPE_DUMMY_P (gnu_type) = 1;
387 TYPE_STUB_DECL (gnu_type)
388 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
389 if (Is_By_Reference_Type (gnat_equiv))
390 TYPE_BY_REFERENCE_P (gnu_type) = 1;
392 SET_DUMMY_NODE (gnat_equiv, gnu_type);
394 /* Create a debug type so that debuggers only see an unspecified type. */
395 if (Needs_Debug_Info (gnat_type))
397 debug_type = make_node (LANG_TYPE);
398 TYPE_NAME (debug_type) = TYPE_NAME (gnu_type);
399 TYPE_ARTIFICIAL (debug_type) = TYPE_ARTIFICIAL (gnu_type);
400 SET_TYPE_DEBUG_TYPE (gnu_type, debug_type);
406 /* Return the dummy type that was made for GNAT_TYPE, if any. */
409 get_dummy_type (Entity_Id gnat_type)
411 return GET_DUMMY_NODE (gnat_type);
414 /* Build dummy fat and thin pointer types whose designated type is specified
415 by GNAT_DESIG_TYPE/GNU_DESIG_TYPE and attach them to the latter. */
418 build_dummy_unc_pointer_types (Entity_Id gnat_desig_type, tree gnu_desig_type)
420 tree gnu_template_type, gnu_ptr_template, gnu_array_type, gnu_ptr_array;
421 tree gnu_fat_type, fields, gnu_object_type;
423 gnu_template_type = make_node (RECORD_TYPE);
424 TYPE_NAME (gnu_template_type) = create_concat_name (gnat_desig_type, "XUB");
425 TYPE_DUMMY_P (gnu_template_type) = 1;
426 gnu_ptr_template = build_pointer_type (gnu_template_type);
428 gnu_array_type = make_node (ENUMERAL_TYPE);
429 TYPE_NAME (gnu_array_type) = create_concat_name (gnat_desig_type, "XUA");
430 TYPE_DUMMY_P (gnu_array_type) = 1;
431 gnu_ptr_array = build_pointer_type (gnu_array_type);
433 gnu_fat_type = make_node (RECORD_TYPE);
434 /* Build a stub DECL to trigger the special processing for fat pointer types
436 TYPE_NAME (gnu_fat_type)
437 = create_type_stub_decl (create_concat_name (gnat_desig_type, "XUP"),
439 fields = create_field_decl (get_identifier ("P_ARRAY"), gnu_ptr_array,
440 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
442 = create_field_decl (get_identifier ("P_BOUNDS"), gnu_ptr_template,
443 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
444 finish_fat_pointer_type (gnu_fat_type, fields);
445 SET_TYPE_UNCONSTRAINED_ARRAY (gnu_fat_type, gnu_desig_type);
446 /* Suppress debug info until after the type is completed. */
447 TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (gnu_fat_type)) = 1;
449 gnu_object_type = make_node (RECORD_TYPE);
450 TYPE_NAME (gnu_object_type) = create_concat_name (gnat_desig_type, "XUT");
451 TYPE_DUMMY_P (gnu_object_type) = 1;
453 TYPE_POINTER_TO (gnu_desig_type) = gnu_fat_type;
454 TYPE_REFERENCE_TO (gnu_desig_type) = gnu_fat_type;
455 TYPE_OBJECT_RECORD_TYPE (gnu_desig_type) = gnu_object_type;
458 /* Return true if we are in the global binding level. */
461 global_bindings_p (void)
463 return force_global || !current_function_decl;
466 /* Enter a new binding level. */
469 gnat_pushlevel (void)
471 struct gnat_binding_level *newlevel = NULL;
473 /* Reuse a struct for this binding level, if there is one. */
474 if (free_binding_level)
476 newlevel = free_binding_level;
477 free_binding_level = free_binding_level->chain;
480 newlevel = ggc_alloc<gnat_binding_level> ();
482 /* Use a free BLOCK, if any; otherwise, allocate one. */
483 if (free_block_chain)
485 newlevel->block = free_block_chain;
486 free_block_chain = BLOCK_CHAIN (free_block_chain);
487 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
490 newlevel->block = make_node (BLOCK);
492 /* Point the BLOCK we just made to its parent. */
493 if (current_binding_level)
494 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
496 BLOCK_VARS (newlevel->block) = NULL_TREE;
497 BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
498 TREE_USED (newlevel->block) = 1;
500 /* Add this level to the front of the chain (stack) of active levels. */
501 newlevel->chain = current_binding_level;
502 newlevel->jmpbuf_decl = NULL_TREE;
503 current_binding_level = newlevel;
506 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
507 and point FNDECL to this BLOCK. */
510 set_current_block_context (tree fndecl)
512 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
513 DECL_INITIAL (fndecl) = current_binding_level->block;
514 set_block_for_group (current_binding_level->block);
517 /* Set the jmpbuf_decl for the current binding level to DECL. */
520 set_block_jmpbuf_decl (tree decl)
522 current_binding_level->jmpbuf_decl = decl;
525 /* Get the jmpbuf_decl, if any, for the current binding level. */
528 get_block_jmpbuf_decl (void)
530 return current_binding_level->jmpbuf_decl;
533 /* Exit a binding level. Set any BLOCK into the current code group. */
538 struct gnat_binding_level *level = current_binding_level;
539 tree block = level->block;
541 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
542 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
544 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
545 are no variables free the block and merge its subblocks into those of its
546 parent block. Otherwise, add it to the list of its parent. */
547 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
549 else if (!BLOCK_VARS (block))
551 BLOCK_SUBBLOCKS (level->chain->block)
552 = block_chainon (BLOCK_SUBBLOCKS (block),
553 BLOCK_SUBBLOCKS (level->chain->block));
554 BLOCK_CHAIN (block) = free_block_chain;
555 free_block_chain = block;
559 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
560 BLOCK_SUBBLOCKS (level->chain->block) = block;
561 TREE_USED (block) = 1;
562 set_block_for_group (block);
565 /* Free this binding structure. */
566 current_binding_level = level->chain;
567 level->chain = free_binding_level;
568 free_binding_level = level;
571 /* Exit a binding level and discard the associated BLOCK. */
576 struct gnat_binding_level *level = current_binding_level;
577 tree block = level->block;
579 BLOCK_CHAIN (block) = free_block_chain;
580 free_block_chain = block;
582 /* Free this binding structure. */
583 current_binding_level = level->chain;
584 level->chain = free_binding_level;
585 free_binding_level = level;
588 /* Set the context of TYPE and its parallel types (if any) to CONTEXT. */
591 gnat_set_type_context (tree type, tree context)
593 tree decl = TYPE_STUB_DECL (type);
595 TYPE_CONTEXT (type) = context;
597 while (decl && DECL_PARALLEL_TYPE (decl))
599 tree parallel_type = DECL_PARALLEL_TYPE (decl);
601 /* Give a context to the parallel types and their stub decl, if any.
602 Some parallel types seems to be present in multiple parallel type
603 chains, so don't mess with their context if they already have one. */
604 if (!TYPE_CONTEXT (parallel_type))
606 if (TYPE_STUB_DECL (parallel_type))
607 DECL_CONTEXT (TYPE_STUB_DECL (parallel_type)) = context;
608 TYPE_CONTEXT (parallel_type) = context;
611 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
615 /* Return the innermost scope, starting at GNAT_NODE, we are be interested in
616 the debug info, or Empty if there is no such scope. If not NULL, set
617 IS_SUBPROGRAM to whether the returned entity is a subprogram. */
620 get_debug_scope (Node_Id gnat_node, bool *is_subprogram)
622 Entity_Id gnat_entity;
625 *is_subprogram = false;
627 if (Nkind (gnat_node) == N_Defining_Identifier
628 || Nkind (gnat_node) == N_Defining_Operator_Symbol)
629 gnat_entity = Scope (gnat_node);
633 while (Present (gnat_entity))
635 switch (Ekind (gnat_entity))
639 if (Present (Protected_Body_Subprogram (gnat_entity)))
640 gnat_entity = Protected_Body_Subprogram (gnat_entity);
642 /* If the scope is a subprogram, then just rely on
643 current_function_decl, so that we don't have to defer
644 anything. This is needed because other places rely on the
645 validity of the DECL_CONTEXT attribute of FUNCTION_DECL nodes. */
647 *is_subprogram = true;
651 case E_Record_Subtype:
655 /* By default, we are not interested in this particular scope: go to
660 gnat_entity = Scope (gnat_entity);
666 /* If N is NULL, set TYPE's context to CONTEXT. Defer this to the processing
670 defer_or_set_type_context (tree type, tree context,
671 struct deferred_decl_context_node *n)
674 add_deferred_type_context (n, type);
676 gnat_set_type_context (type, context);
679 /* Return global_context, but create it first if need be. */
682 get_global_context (void)
687 = build_translation_unit_decl (get_identifier (main_input_filename));
688 debug_hooks->register_main_translation_unit (global_context);
691 return global_context;
694 /* Record DECL as belonging to the current lexical scope and use GNAT_NODE
695 for location information and flag propagation. */
698 gnat_pushdecl (tree decl, Node_Id gnat_node)
700 tree context = NULL_TREE;
701 struct deferred_decl_context_node *deferred_decl_context = NULL;
703 /* If explicitely asked to make DECL global or if it's an imported nested
704 object, short-circuit the regular Scope-based context computation. */
705 if (!((TREE_PUBLIC (decl) && DECL_EXTERNAL (decl)) || force_global == 1))
707 /* Rely on the GNAT scope, or fallback to the current_function_decl if
708 the GNAT scope reached the global scope, if it reached a subprogram
709 or the declaration is a subprogram or a variable (for them we skip
710 intermediate context types because the subprogram body elaboration
711 machinery and the inliner both expect a subprogram context).
713 Falling back to current_function_decl is necessary for implicit
714 subprograms created by gigi, such as the elaboration subprograms. */
715 bool context_is_subprogram = false;
716 const Entity_Id gnat_scope
717 = get_debug_scope (gnat_node, &context_is_subprogram);
719 if (Present (gnat_scope)
720 && !context_is_subprogram
721 && TREE_CODE (decl) != FUNCTION_DECL
722 && TREE_CODE (decl) != VAR_DECL)
723 /* Always assume the scope has not been elaborated, thus defer the
724 context propagation to the time its elaboration will be
726 deferred_decl_context
727 = add_deferred_decl_context (decl, gnat_scope, force_global);
729 /* External declarations (when force_global > 0) may not be in a
731 else if (current_function_decl && force_global == 0)
732 context = current_function_decl;
735 /* If either we are forced to be in global mode or if both the GNAT scope and
736 the current_function_decl did not help in determining the context, use the
738 if (!deferred_decl_context && !context)
739 context = get_global_context ();
741 /* Functions imported in another function are not really nested.
742 For really nested functions mark them initially as needing
743 a static chain for uses of that flag before unnesting;
744 lower_nested_functions will then recompute it. */
745 if (TREE_CODE (decl) == FUNCTION_DECL
746 && !TREE_PUBLIC (decl)
748 && (TREE_CODE (context) == FUNCTION_DECL
749 || decl_function_context (context)))
750 DECL_STATIC_CHAIN (decl) = 1;
752 if (!deferred_decl_context)
753 DECL_CONTEXT (decl) = context;
755 TREE_NO_WARNING (decl) = (No (gnat_node) || Warnings_Off (gnat_node));
757 /* Set the location of DECL and emit a declaration for it. */
758 if (Present (gnat_node) && !renaming_from_instantiation_p (gnat_node))
759 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
761 add_decl_expr (decl, gnat_node);
763 /* Put the declaration on the list. The list of declarations is in reverse
764 order. The list will be reversed later. Put global declarations in the
765 globals list and local ones in the current block. But skip TYPE_DECLs
766 for UNCONSTRAINED_ARRAY_TYPE in both cases, as they will cause trouble
767 with the debugger and aren't needed anyway. */
768 if (!(TREE_CODE (decl) == TYPE_DECL
769 && TREE_CODE (TREE_TYPE (decl)) == UNCONSTRAINED_ARRAY_TYPE))
771 /* External declarations must go to the binding level they belong to.
772 This will make corresponding imported entities are available in the
773 debugger at the proper time. */
774 if (DECL_EXTERNAL (decl)
775 && TREE_CODE (decl) == FUNCTION_DECL
776 && fndecl_built_in_p (decl))
777 vec_safe_push (builtin_decls, decl);
778 else if (global_bindings_p ())
779 vec_safe_push (global_decls, decl);
782 DECL_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
783 BLOCK_VARS (current_binding_level->block) = decl;
787 /* For the declaration of a type, set its name either if it isn't already
788 set or if the previous type name was not derived from a source name.
789 We'd rather have the type named with a real name and all the pointer
790 types to the same object have the same node, except when the names are
791 both derived from source names. */
792 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
794 tree t = TREE_TYPE (decl);
796 /* Array and pointer types aren't tagged types in the C sense so we need
797 to generate a typedef in DWARF for them and make sure it is preserved,
798 unless the type is artificial. */
799 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL)
800 && ((TREE_CODE (t) != ARRAY_TYPE && TREE_CODE (t) != POINTER_TYPE)
801 || DECL_ARTIFICIAL (decl)))
803 /* For array and pointer types, create the DECL_ORIGINAL_TYPE that will
804 generate the typedef in DWARF. Also do that for fat pointer types
805 because, even though they are tagged types in the C sense, they are
806 still XUP types attached to the base array type at this point. */
807 else if (!DECL_ARTIFICIAL (decl)
808 && (TREE_CODE (t) == ARRAY_TYPE
809 || TREE_CODE (t) == POINTER_TYPE
810 || TYPE_IS_FAT_POINTER_P (t)))
812 tree tt = build_variant_type_copy (t);
813 TYPE_NAME (tt) = decl;
814 defer_or_set_type_context (tt,
816 deferred_decl_context);
817 TREE_TYPE (decl) = tt;
819 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL
820 && DECL_ORIGINAL_TYPE (TYPE_NAME (t)))
821 DECL_ORIGINAL_TYPE (decl) = DECL_ORIGINAL_TYPE (TYPE_NAME (t));
823 DECL_ORIGINAL_TYPE (decl) = t;
824 /* Array types need to have a name so that they can be related to
825 their GNAT encodings. */
826 if (TREE_CODE (t) == ARRAY_TYPE && !TYPE_NAME (t))
827 TYPE_NAME (t) = DECL_NAME (decl);
830 else if (TYPE_NAME (t)
831 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL
832 && DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
837 /* Propagate the name to all the variants, this is needed for the type
838 qualifiers machinery to work properly (see check_qualified_type).
839 Also propagate the context to them. Note that it will be propagated
840 to all parallel types too thanks to gnat_set_type_context. */
842 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
843 /* ??? Because of the previous kludge, we can have variants of fat
844 pointer types with different names. */
845 if (!(TYPE_IS_FAT_POINTER_P (t)
847 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
849 TYPE_NAME (t) = decl;
850 defer_or_set_type_context (t,
852 deferred_decl_context);
857 /* Create a record type that contains a SIZE bytes long field of TYPE with a
858 starting bit position so that it is aligned to ALIGN bits, and leaving at
859 least ROOM bytes free before the field. BASE_ALIGN is the alignment the
860 record is guaranteed to get. GNAT_NODE is used for the position of the
861 associated TYPE_DECL. */
864 make_aligning_type (tree type, unsigned int align, tree size,
865 unsigned int base_align, int room, Node_Id gnat_node)
867 /* We will be crafting a record type with one field at a position set to be
868 the next multiple of ALIGN past record'address + room bytes. We use a
869 record placeholder to express record'address. */
870 tree record_type = make_node (RECORD_TYPE);
871 tree record = build0 (PLACEHOLDER_EXPR, record_type);
874 = convert (sizetype, build_unary_op (ADDR_EXPR, NULL_TREE, record));
876 /* The diagram below summarizes the shape of what we manipulate:
878 <--------- pos ---------->
879 { +------------+-------------+-----------------+
880 record =>{ |############| ... | field (type) |
881 { +------------+-------------+-----------------+
882 |<-- room -->|<- voffset ->|<---- size ----->|
885 record_addr vblock_addr
887 Every length is in sizetype bytes there, except "pos" which has to be
888 set as a bit position in the GCC tree for the record. */
889 tree room_st = size_int (room);
890 tree vblock_addr_st = size_binop (PLUS_EXPR, record_addr_st, room_st);
891 tree voffset_st, pos, field;
893 tree name = TYPE_IDENTIFIER (type);
895 name = concat_name (name, "ALIGN");
896 TYPE_NAME (record_type) = name;
898 /* Compute VOFFSET and then POS. The next byte position multiple of some
899 alignment after some address is obtained by "and"ing the alignment minus
900 1 with the two's complement of the address. */
901 voffset_st = size_binop (BIT_AND_EXPR,
902 fold_build1 (NEGATE_EXPR, sizetype, vblock_addr_st),
903 size_int ((align / BITS_PER_UNIT) - 1));
905 /* POS = (ROOM + VOFFSET) * BIT_PER_UNIT, in bitsizetype. */
906 pos = size_binop (MULT_EXPR,
907 convert (bitsizetype,
908 size_binop (PLUS_EXPR, room_st, voffset_st)),
911 /* Craft the GCC record representation. We exceptionally do everything
912 manually here because 1) our generic circuitry is not quite ready to
913 handle the complex position/size expressions we are setting up, 2) we
914 have a strong simplifying factor at hand: we know the maximum possible
915 value of voffset, and 3) we have to set/reset at least the sizes in
916 accordance with this maximum value anyway, as we need them to convey
917 what should be "alloc"ated for this type.
919 Use -1 as the 'addressable' indication for the field to prevent the
920 creation of a bitfield. We don't need one, it would have damaging
921 consequences on the alignment computation, and create_field_decl would
922 make one without this special argument, for instance because of the
923 complex position expression. */
924 field = create_field_decl (get_identifier ("F"), type, record_type, size,
926 TYPE_FIELDS (record_type) = field;
928 SET_TYPE_ALIGN (record_type, base_align);
929 TYPE_USER_ALIGN (record_type) = 1;
931 TYPE_SIZE (record_type)
932 = size_binop (PLUS_EXPR,
933 size_binop (MULT_EXPR, convert (bitsizetype, size),
935 bitsize_int (align + room * BITS_PER_UNIT));
936 TYPE_SIZE_UNIT (record_type)
937 = size_binop (PLUS_EXPR, size,
938 size_int (room + align / BITS_PER_UNIT));
940 SET_TYPE_MODE (record_type, BLKmode);
941 relate_alias_sets (record_type, type, ALIAS_SET_COPY);
943 /* Declare it now since it will never be declared otherwise. This is
944 necessary to ensure that its subtrees are properly marked. */
945 create_type_decl (name, record_type, true, false, gnat_node);
950 /* TYPE is a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE that is being used
951 as the field type of a packed record if IN_RECORD is true, or as the
952 component type of a packed array if IN_RECORD is false. See if we can
953 rewrite it either as a type that has non-BLKmode, which we can pack
954 tighter in the packed record case, or as a smaller type with at most
955 MAX_ALIGN alignment if the value is non-zero. If so, return the new
956 type; if not, return the original type. */
959 make_packable_type (tree type, bool in_record, unsigned int max_align)
961 unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE (type));
962 unsigned HOST_WIDE_INT new_size;
963 unsigned int align = TYPE_ALIGN (type);
964 unsigned int new_align;
966 /* No point in doing anything if the size is zero. */
970 tree new_type = make_node (TREE_CODE (type));
972 /* Copy the name and flags from the old type to that of the new.
973 Note that we rely on the pointer equality created here for
974 TYPE_NAME to look through conversions in various places. */
975 TYPE_NAME (new_type) = TYPE_NAME (type);
976 TYPE_JUSTIFIED_MODULAR_P (new_type) = TYPE_JUSTIFIED_MODULAR_P (type);
977 TYPE_CONTAINS_TEMPLATE_P (new_type) = TYPE_CONTAINS_TEMPLATE_P (type);
978 TYPE_REVERSE_STORAGE_ORDER (new_type) = TYPE_REVERSE_STORAGE_ORDER (type);
979 if (TREE_CODE (type) == RECORD_TYPE)
980 TYPE_PADDING_P (new_type) = TYPE_PADDING_P (type);
982 /* If we are in a record and have a small size, set the alignment to
983 try for an integral mode. Otherwise set it to try for a smaller
984 type with BLKmode. */
985 if (in_record && size <= MAX_FIXED_MODE_SIZE)
987 new_size = ceil_pow2 (size);
988 new_align = MIN (new_size, BIGGEST_ALIGNMENT);
989 SET_TYPE_ALIGN (new_type, new_align);
993 tree type_size = TYPE_ADA_SIZE (type);
994 /* Do not try to shrink the size if the RM size is not constant. */
995 if (TYPE_CONTAINS_TEMPLATE_P (type)
996 || !tree_fits_uhwi_p (type_size))
999 /* Round the RM size up to a unit boundary to get the minimal size
1000 for a BLKmode record. Give up if it's already the size and we
1001 don't need to lower the alignment. */
1002 new_size = tree_to_uhwi (type_size);
1003 new_size = (new_size + BITS_PER_UNIT - 1) & -BITS_PER_UNIT;
1004 if (new_size == size && (max_align == 0 || align <= max_align))
1007 new_align = MIN (new_size & -new_size, BIGGEST_ALIGNMENT);
1008 if (max_align > 0 && new_align > max_align)
1009 new_align = max_align;
1010 SET_TYPE_ALIGN (new_type, MIN (align, new_align));
1013 TYPE_USER_ALIGN (new_type) = 1;
1015 /* Now copy the fields, keeping the position and size as we don't want
1016 to change the layout by propagating the packedness downwards. */
1017 tree new_field_list = NULL_TREE;
1018 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1020 tree new_field_type = TREE_TYPE (field);
1021 tree new_field, new_size;
1023 if (RECORD_OR_UNION_TYPE_P (new_field_type)
1024 && !TYPE_FAT_POINTER_P (new_field_type)
1025 && tree_fits_uhwi_p (TYPE_SIZE (new_field_type)))
1026 new_field_type = make_packable_type (new_field_type, true, max_align);
1028 /* However, for the last field in a not already packed record type
1029 that is of an aggregate type, we need to use the RM size in the
1030 packable version of the record type, see finish_record_type. */
1031 if (!DECL_CHAIN (field)
1032 && !TYPE_PACKED (type)
1033 && RECORD_OR_UNION_TYPE_P (new_field_type)
1034 && !TYPE_FAT_POINTER_P (new_field_type)
1035 && !TYPE_CONTAINS_TEMPLATE_P (new_field_type)
1036 && TYPE_ADA_SIZE (new_field_type))
1037 new_size = TYPE_ADA_SIZE (new_field_type);
1039 new_size = DECL_SIZE (field);
1042 = create_field_decl (DECL_NAME (field), new_field_type, new_type,
1043 new_size, bit_position (field),
1045 !DECL_NONADDRESSABLE_P (field));
1047 DECL_INTERNAL_P (new_field) = DECL_INTERNAL_P (field);
1048 SET_DECL_ORIGINAL_FIELD_TO_FIELD (new_field, field);
1049 if (TREE_CODE (new_type) == QUAL_UNION_TYPE)
1050 DECL_QUALIFIER (new_field) = DECL_QUALIFIER (field);
1052 DECL_CHAIN (new_field) = new_field_list;
1053 new_field_list = new_field;
1056 /* If this is a padding record, we never want to make the size smaller
1057 than what was specified. For QUAL_UNION_TYPE, also copy the size. */
1058 if (TYPE_IS_PADDING_P (type) || TREE_CODE (type) == QUAL_UNION_TYPE)
1060 TYPE_SIZE (new_type) = TYPE_SIZE (type);
1061 TYPE_SIZE_UNIT (new_type) = TYPE_SIZE_UNIT (type);
1066 TYPE_SIZE (new_type) = bitsize_int (new_size);
1067 TYPE_SIZE_UNIT (new_type) = size_int (new_size / BITS_PER_UNIT);
1070 if (!TYPE_CONTAINS_TEMPLATE_P (type))
1071 SET_TYPE_ADA_SIZE (new_type, TYPE_ADA_SIZE (type));
1073 finish_record_type (new_type, nreverse (new_field_list), 2, false);
1074 relate_alias_sets (new_type, type, ALIAS_SET_COPY);
1075 if (gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
1076 SET_TYPE_DEBUG_TYPE (new_type, TYPE_DEBUG_TYPE (type));
1077 else if (TYPE_STUB_DECL (type))
1078 SET_DECL_PARALLEL_TYPE (TYPE_STUB_DECL (new_type),
1079 DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type)));
1081 /* Try harder to get a packable type if necessary, for example
1082 in case the record itself contains a BLKmode field. */
1083 if (in_record && TYPE_MODE (new_type) == BLKmode)
1084 SET_TYPE_MODE (new_type,
1085 mode_for_size_tree (TYPE_SIZE (new_type),
1086 MODE_INT, 1).else_blk ());
1088 /* If neither mode nor size nor alignment shrunk, return the old type. */
1089 if (TYPE_MODE (new_type) == BLKmode && new_size >= size && max_align == 0)
1095 /* Return true if TYPE has an unsigned representation. This needs to be used
1096 when the representation of types whose precision is not equal to their size
1097 is manipulated based on the RM size. */
1100 type_unsigned_for_rm (tree type)
1102 /* This is the common case. */
1103 if (TYPE_UNSIGNED (type))
1106 /* See the E_Signed_Integer_Subtype case of gnat_to_gnu_entity. */
1107 if (TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST
1108 && tree_int_cst_sgn (TYPE_MIN_VALUE (type)) >= 0)
1114 /* Given a type TYPE, return a new type whose size is appropriate for SIZE.
1115 If TYPE is the best type, return it. Otherwise, make a new type. We
1116 only support new integral and pointer types. FOR_BIASED is true if
1117 we are making a biased type. */
1120 make_type_from_size (tree type, tree size_tree, bool for_biased)
1122 unsigned HOST_WIDE_INT size;
1126 /* If size indicates an error, just return TYPE to avoid propagating
1127 the error. Likewise if it's too large to represent. */
1128 if (!size_tree || !tree_fits_uhwi_p (size_tree))
1131 size = tree_to_uhwi (size_tree);
1133 switch (TREE_CODE (type))
1136 /* Do not mess with boolean types that have foreign convention. */
1137 if (TYPE_PRECISION (type) == 1 && TYPE_SIZE (type) == size_tree)
1140 /* ... fall through ... */
1144 biased_p = (TREE_CODE (type) == INTEGER_TYPE
1145 && TYPE_BIASED_REPRESENTATION_P (type));
1147 /* Integer types with precision 0 are forbidden. */
1151 /* Only do something if the type isn't a packed array type and doesn't
1152 already have the proper size and the size isn't too large. */
1153 if (TYPE_IS_PACKED_ARRAY_TYPE_P (type)
1154 || (TYPE_PRECISION (type) == size && biased_p == for_biased)
1155 || size > LONG_LONG_TYPE_SIZE)
1158 biased_p |= for_biased;
1160 /* The type should be an unsigned type if the original type is unsigned
1161 or if the lower bound is constant and non-negative or if the type is
1162 biased, see E_Signed_Integer_Subtype case of gnat_to_gnu_entity. */
1163 if (type_unsigned_for_rm (type) || biased_p)
1164 new_type = make_unsigned_type (size);
1166 new_type = make_signed_type (size);
1167 TREE_TYPE (new_type) = TREE_TYPE (type) ? TREE_TYPE (type) : type;
1168 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_MIN_VALUE (type));
1169 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_MAX_VALUE (type));
1170 /* Copy the name to show that it's essentially the same type and
1171 not a subrange type. */
1172 TYPE_NAME (new_type) = TYPE_NAME (type);
1173 TYPE_BIASED_REPRESENTATION_P (new_type) = biased_p;
1174 SET_TYPE_RM_SIZE (new_type, bitsize_int (size));
1178 /* Do something if this is a fat pointer, in which case we
1179 may need to return the thin pointer. */
1180 if (TYPE_FAT_POINTER_P (type) && size < POINTER_SIZE * 2)
1182 scalar_int_mode p_mode;
1183 if (!int_mode_for_size (size, 0).exists (&p_mode)
1184 || !targetm.valid_pointer_mode (p_mode))
1187 build_pointer_type_for_mode
1188 (TYPE_OBJECT_RECORD_TYPE (TYPE_UNCONSTRAINED_ARRAY (type)),
1194 /* Only do something if this is a thin pointer, in which case we
1195 may need to return the fat pointer. */
1196 if (TYPE_IS_THIN_POINTER_P (type) && size >= POINTER_SIZE * 2)
1198 build_pointer_type (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)));
1208 /* Return true iff the padded types are equivalent. */
1211 pad_type_hasher::equal (pad_type_hash *t1, pad_type_hash *t2)
1215 if (t1->hash != t2->hash)
1221 /* We consider that the padded types are equivalent if they pad the same type
1222 and have the same size, alignment, RM size and storage order. Taking the
1223 mode into account is redundant since it is determined by the others. */
1225 TREE_TYPE (TYPE_FIELDS (type1)) == TREE_TYPE (TYPE_FIELDS (type2))
1226 && TYPE_SIZE (type1) == TYPE_SIZE (type2)
1227 && TYPE_ALIGN (type1) == TYPE_ALIGN (type2)
1228 && TYPE_ADA_SIZE (type1) == TYPE_ADA_SIZE (type2)
1229 && TYPE_REVERSE_STORAGE_ORDER (type1) == TYPE_REVERSE_STORAGE_ORDER (type2);
1232 /* Compute the hash value for the padded TYPE. */
1235 hash_pad_type (tree type)
1240 = iterative_hash_object (TYPE_HASH (TREE_TYPE (TYPE_FIELDS (type))), 0);
1241 hashcode = iterative_hash_expr (TYPE_SIZE (type), hashcode);
1242 hashcode = iterative_hash_hashval_t (TYPE_ALIGN (type), hashcode);
1243 hashcode = iterative_hash_expr (TYPE_ADA_SIZE (type), hashcode);
1248 /* Look up the padded TYPE in the hash table and return its canonical version
1249 if it exists; otherwise, insert it into the hash table. */
1252 canonicalize_pad_type (tree type)
1254 const hashval_t hashcode = hash_pad_type (type);
1255 struct pad_type_hash in, *h, **slot;
1259 slot = pad_type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
1263 h = ggc_alloc<pad_type_hash> ();
1272 /* Ensure that TYPE has SIZE and ALIGN. Make and return a new padded type
1273 if needed. We have already verified that SIZE and ALIGN are large enough.
1274 GNAT_ENTITY is used to name the resulting record and to issue a warning.
1275 IS_COMPONENT_TYPE is true if this is being done for the component type of
1276 an array. IS_USER_TYPE is true if the original type needs to be completed.
1277 DEFINITION is true if this type is being defined. SET_RM_SIZE is true if
1278 the RM size of the resulting type is to be set to SIZE too; in this case,
1279 the padded type is canonicalized before being returned. */
1282 maybe_pad_type (tree type, tree size, unsigned int align,
1283 Entity_Id gnat_entity, bool is_component_type,
1284 bool is_user_type, bool definition, bool set_rm_size)
1286 tree orig_size = TYPE_SIZE (type);
1287 unsigned int orig_align = TYPE_ALIGN (type);
1290 /* If TYPE is a padded type, see if it agrees with any size and alignment
1291 we were given. If so, return the original type. Otherwise, strip
1292 off the padding, since we will either be returning the inner type
1293 or repadding it. If no size or alignment is specified, use that of
1294 the original padded type. */
1295 if (TYPE_IS_PADDING_P (type))
1298 || operand_equal_p (round_up (size, orig_align), orig_size, 0))
1299 && (align == 0 || align == orig_align))
1307 type = TREE_TYPE (TYPE_FIELDS (type));
1308 orig_size = TYPE_SIZE (type);
1309 orig_align = TYPE_ALIGN (type);
1312 /* If the size is either not being changed or is being made smaller (which
1313 is not done here and is only valid for bitfields anyway), show the size
1314 isn't changing. Likewise, clear the alignment if it isn't being
1315 changed. Then return if we aren't doing anything. */
1317 && (operand_equal_p (size, orig_size, 0)
1318 || (TREE_CODE (orig_size) == INTEGER_CST
1319 && tree_int_cst_lt (size, orig_size))))
1322 if (align == orig_align)
1325 if (align == 0 && !size)
1328 /* If requested, complete the original type and give it a name. */
1330 create_type_decl (get_entity_name (gnat_entity), type,
1331 !Comes_From_Source (gnat_entity),
1333 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1334 && DECL_IGNORED_P (TYPE_NAME (type))),
1337 /* We used to modify the record in place in some cases, but that could
1338 generate incorrect debugging information. So make a new record
1340 record = make_node (RECORD_TYPE);
1341 TYPE_PADDING_P (record) = 1;
1343 /* ??? Padding types around packed array implementation types will be
1344 considered as root types in the array descriptor language hook (see
1345 gnat_get_array_descr_info). Give them the original packed array type
1346 name so that the one coming from sources appears in the debugging
1348 if (TYPE_IMPL_PACKED_ARRAY_P (type)
1349 && TYPE_ORIGINAL_PACKED_ARRAY (type)
1350 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
1351 TYPE_NAME (record) = TYPE_NAME (TYPE_ORIGINAL_PACKED_ARRAY (type));
1352 else if (Present (gnat_entity))
1353 TYPE_NAME (record) = create_concat_name (gnat_entity, "PAD");
1355 SET_TYPE_ALIGN (record, align ? align : orig_align);
1356 TYPE_SIZE (record) = size ? size : orig_size;
1357 TYPE_SIZE_UNIT (record)
1358 = convert (sizetype,
1359 size_binop (CEIL_DIV_EXPR, TYPE_SIZE (record),
1360 bitsize_unit_node));
1362 /* If we are changing the alignment and the input type is a record with
1363 BLKmode and a small constant size, try to make a form that has an
1364 integral mode. This might allow the padding record to also have an
1365 integral mode, which will be much more efficient. There is no point
1366 in doing so if a size is specified unless it is also a small constant
1367 size and it is incorrect to do so if we cannot guarantee that the mode
1368 will be naturally aligned since the field must always be addressable.
1370 ??? This might not always be a win when done for a stand-alone object:
1371 since the nominal and the effective type of the object will now have
1372 different modes, a VIEW_CONVERT_EXPR will be required for converting
1373 between them and it might be hard to overcome afterwards, including
1374 at the RTL level when the stand-alone object is accessed as a whole. */
1376 && RECORD_OR_UNION_TYPE_P (type)
1377 && TYPE_MODE (type) == BLKmode
1378 && !TYPE_BY_REFERENCE_P (type)
1379 && TREE_CODE (orig_size) == INTEGER_CST
1380 && !TREE_OVERFLOW (orig_size)
1381 && compare_tree_int (orig_size, MAX_FIXED_MODE_SIZE) <= 0
1383 || (TREE_CODE (size) == INTEGER_CST
1384 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) <= 0)))
1386 tree packable_type = make_packable_type (type, true);
1387 if (TYPE_MODE (packable_type) != BLKmode
1388 && align >= TYPE_ALIGN (packable_type))
1389 type = packable_type;
1392 /* Now create the field with the original size. */
1393 field = create_field_decl (get_identifier ("F"), type, record, orig_size,
1394 bitsize_zero_node, 0, 1);
1395 DECL_INTERNAL_P (field) = 1;
1397 /* We will output additional debug info manually below. */
1398 finish_record_type (record, field, 1, false);
1400 /* Set the RM size if requested. */
1403 SET_TYPE_ADA_SIZE (record, size ? size : orig_size);
1405 /* If the padded type is complete and has constant size, we canonicalize
1406 it by means of the hash table. This is consistent with the language
1407 semantics and ensures that gigi and the middle-end have a common view
1408 of these padded types. */
1409 if (TREE_CONSTANT (TYPE_SIZE (record)))
1411 tree canonical = canonicalize_pad_type (record);
1412 if (canonical != record)
1420 if (gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
1421 SET_TYPE_DEBUG_TYPE (record, maybe_debug_type (type));
1423 /* Unless debugging information isn't being written for the input type,
1424 write a record that shows what we are a subtype of and also make a
1425 variable that indicates our size, if still variable. */
1426 if (TREE_CODE (orig_size) != INTEGER_CST
1427 && TYPE_NAME (record)
1429 && !(TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1430 && DECL_IGNORED_P (TYPE_NAME (type))))
1432 tree name = TYPE_IDENTIFIER (record);
1433 tree size_unit = TYPE_SIZE_UNIT (record);
1435 /* A variable that holds the size is required even with no encoding since
1436 it will be referenced by debugging information attributes. At global
1437 level, we need a single variable across all translation units. */
1439 && TREE_CODE (size) != INTEGER_CST
1440 && (definition || global_bindings_p ()))
1442 /* Whether or not gnat_entity comes from source, this XVZ variable is
1443 is a compilation artifact. */
1445 = create_var_decl (concat_name (name, "XVZ"), NULL_TREE, sizetype,
1446 size_unit, true, global_bindings_p (),
1447 !definition && global_bindings_p (), false,
1448 false, true, true, NULL, gnat_entity);
1449 TYPE_SIZE_UNIT (record) = size_unit;
1452 /* There is no need to show what we are a subtype of when outputting as
1453 few encodings as possible: regular debugging infomation makes this
1455 if (gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
1457 tree marker = make_node (RECORD_TYPE);
1458 tree orig_name = TYPE_IDENTIFIER (type);
1460 TYPE_NAME (marker) = concat_name (name, "XVS");
1461 finish_record_type (marker,
1462 create_field_decl (orig_name,
1463 build_reference_type (type),
1464 marker, NULL_TREE, NULL_TREE,
1467 TYPE_SIZE_UNIT (marker) = size_unit;
1469 add_parallel_type (record, marker);
1474 /* If a simple size was explicitly given, maybe issue a warning. */
1476 || TREE_CODE (size) == COND_EXPR
1477 || TREE_CODE (size) == MAX_EXPR
1478 || No (gnat_entity))
1481 /* But don't do it if we are just annotating types and the type is tagged or
1482 concurrent, since these types aren't fully laid out in this mode. */
1483 if (type_annotate_only)
1487 ? Component_Type (gnat_entity) : Etype (gnat_entity);
1489 if (Is_Tagged_Type (gnat_type) || Is_Concurrent_Type (gnat_type))
1493 /* Take the original size as the maximum size of the input if there was an
1494 unconstrained record involved and round it up to the specified alignment,
1495 if one was specified, but only for aggregate types. */
1496 if (CONTAINS_PLACEHOLDER_P (orig_size))
1497 orig_size = max_size (orig_size, true);
1499 if (align && AGGREGATE_TYPE_P (type))
1500 orig_size = round_up (orig_size, align);
1502 if (!operand_equal_p (size, orig_size, 0)
1503 && !(TREE_CODE (size) == INTEGER_CST
1504 && TREE_CODE (orig_size) == INTEGER_CST
1505 && (TREE_OVERFLOW (size)
1506 || TREE_OVERFLOW (orig_size)
1507 || tree_int_cst_lt (size, orig_size))))
1509 Node_Id gnat_error_node;
1511 /* For a packed array, post the message on the original array type. */
1512 if (Is_Packed_Array_Impl_Type (gnat_entity))
1513 gnat_entity = Original_Array_Type (gnat_entity);
1515 if ((Ekind (gnat_entity) == E_Component
1516 || Ekind (gnat_entity) == E_Discriminant)
1517 && Present (Component_Clause (gnat_entity)))
1518 gnat_error_node = Last_Bit (Component_Clause (gnat_entity));
1519 else if (Has_Size_Clause (gnat_entity))
1520 gnat_error_node = Expression (Size_Clause (gnat_entity));
1521 else if (Has_Object_Size_Clause (gnat_entity))
1522 gnat_error_node = Expression (Object_Size_Clause (gnat_entity));
1524 gnat_error_node = Empty;
1526 /* Generate message only for entities that come from source, since
1527 if we have an entity created by expansion, the message will be
1528 generated for some other corresponding source entity. */
1529 if (Comes_From_Source (gnat_entity))
1531 if (Present (gnat_error_node))
1532 post_error_ne_tree ("{^ }bits of & unused?",
1533 gnat_error_node, gnat_entity,
1534 size_diffop (size, orig_size));
1535 else if (is_component_type)
1536 post_error_ne_tree ("component of& padded{ by ^ bits}?",
1537 gnat_entity, gnat_entity,
1538 size_diffop (size, orig_size));
1545 /* Return true if padded TYPE was built with an RM size. */
1548 pad_type_has_rm_size (tree type)
1550 /* This is required for the lookup. */
1551 if (!TREE_CONSTANT (TYPE_SIZE (type)))
1554 const hashval_t hashcode = hash_pad_type (type);
1555 struct pad_type_hash in, *h;
1559 h = pad_type_hash_table->find_with_hash (&in, hashcode);
1561 /* The types built with an RM size are the canonicalized ones. */
1562 return h && h->type == type;
1565 /* Return a copy of the padded TYPE but with reverse storage order. */
1568 set_reverse_storage_order_on_pad_type (tree type)
1572 /* If the inner type is not scalar then the function does nothing. */
1573 tree inner_type = TREE_TYPE (TYPE_FIELDS (type));
1574 gcc_assert (!AGGREGATE_TYPE_P (inner_type)
1575 && !VECTOR_TYPE_P (inner_type));
1578 /* This is required for the canonicalization. */
1579 gcc_assert (TREE_CONSTANT (TYPE_SIZE (type)));
1581 tree field = copy_node (TYPE_FIELDS (type));
1582 type = copy_type (type);
1583 DECL_CONTEXT (field) = type;
1584 TYPE_FIELDS (type) = field;
1585 TYPE_REVERSE_STORAGE_ORDER (type) = 1;
1586 return canonicalize_pad_type (type);
1589 /* Relate the alias sets of GNU_NEW_TYPE and GNU_OLD_TYPE according to OP.
1590 If this is a multi-dimensional array type, do this recursively.
1593 - ALIAS_SET_COPY: the new set is made a copy of the old one.
1594 - ALIAS_SET_SUPERSET: the new set is made a superset of the old one.
1595 - ALIAS_SET_SUBSET: the new set is made a subset of the old one. */
1598 relate_alias_sets (tree gnu_new_type, tree gnu_old_type, enum alias_set_op op)
1600 /* Remove any padding from GNU_OLD_TYPE. It doesn't matter in the case
1601 of a one-dimensional array, since the padding has the same alias set
1602 as the field type, but if it's a multi-dimensional array, we need to
1603 see the inner types. */
1604 while (TREE_CODE (gnu_old_type) == RECORD_TYPE
1605 && (TYPE_JUSTIFIED_MODULAR_P (gnu_old_type)
1606 || TYPE_PADDING_P (gnu_old_type)))
1607 gnu_old_type = TREE_TYPE (TYPE_FIELDS (gnu_old_type));
1609 /* Unconstrained array types are deemed incomplete and would thus be given
1610 alias set 0. Retrieve the underlying array type. */
1611 if (TREE_CODE (gnu_old_type) == UNCONSTRAINED_ARRAY_TYPE)
1613 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_old_type))));
1614 if (TREE_CODE (gnu_new_type) == UNCONSTRAINED_ARRAY_TYPE)
1616 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_new_type))));
1618 if (TREE_CODE (gnu_new_type) == ARRAY_TYPE
1619 && TREE_CODE (TREE_TYPE (gnu_new_type)) == ARRAY_TYPE
1620 && TYPE_MULTI_ARRAY_P (TREE_TYPE (gnu_new_type)))
1621 relate_alias_sets (TREE_TYPE (gnu_new_type), TREE_TYPE (gnu_old_type), op);
1625 case ALIAS_SET_COPY:
1626 /* The alias set shouldn't be copied between array types with different
1627 aliasing settings because this can break the aliasing relationship
1628 between the array type and its element type. */
1629 if (flag_checking || flag_strict_aliasing)
1630 gcc_assert (!(TREE_CODE (gnu_new_type) == ARRAY_TYPE
1631 && TREE_CODE (gnu_old_type) == ARRAY_TYPE
1632 && TYPE_NONALIASED_COMPONENT (gnu_new_type)
1633 != TYPE_NONALIASED_COMPONENT (gnu_old_type)));
1635 TYPE_ALIAS_SET (gnu_new_type) = get_alias_set (gnu_old_type);
1638 case ALIAS_SET_SUBSET:
1639 case ALIAS_SET_SUPERSET:
1641 alias_set_type old_set = get_alias_set (gnu_old_type);
1642 alias_set_type new_set = get_alias_set (gnu_new_type);
1644 /* Do nothing if the alias sets conflict. This ensures that we
1645 never call record_alias_subset several times for the same pair
1646 or at all for alias set 0. */
1647 if (!alias_sets_conflict_p (old_set, new_set))
1649 if (op == ALIAS_SET_SUBSET)
1650 record_alias_subset (old_set, new_set);
1652 record_alias_subset (new_set, old_set);
1661 record_component_aliases (gnu_new_type);
1664 /* Record TYPE as a builtin type for Ada. NAME is the name of the type.
1665 ARTIFICIAL_P is true if the type was generated by the compiler. */
1668 record_builtin_type (const char *name, tree type, bool artificial_p)
1670 tree type_decl = build_decl (input_location,
1671 TYPE_DECL, get_identifier (name), type);
1672 DECL_ARTIFICIAL (type_decl) = artificial_p;
1673 TYPE_ARTIFICIAL (type) = artificial_p;
1674 gnat_pushdecl (type_decl, Empty);
1676 if (debug_hooks->type_decl)
1677 debug_hooks->type_decl (type_decl, false);
1680 /* Finish constructing the character type CHAR_TYPE.
1682 In Ada character types are enumeration types and, as a consequence, are
1683 represented in the front-end by integral types holding the positions of
1684 the enumeration values as defined by the language, which means that the
1685 integral types are unsigned.
1687 Unfortunately the signedness of 'char' in C is implementation-defined
1688 and GCC even has the option -f[un]signed-char to toggle it at run time.
1689 Since GNAT's philosophy is to be compatible with C by default, to wit
1690 Interfaces.C.char is defined as a mere copy of Character, we may need
1691 to declare character types as signed types in GENERIC and generate the
1692 necessary adjustments to make them behave as unsigned types.
1694 The overall strategy is as follows: if 'char' is unsigned, do nothing;
1695 if 'char' is signed, translate character types of CHAR_TYPE_SIZE and
1696 character subtypes with RM_Size = Esize = CHAR_TYPE_SIZE into signed
1697 types. The idea is to ensure that the bit pattern contained in the
1698 Esize'd objects is not changed, even though the numerical value will
1699 be interpreted differently depending on the signedness. */
1702 finish_character_type (tree char_type)
1704 if (TYPE_UNSIGNED (char_type))
1707 /* Make a copy of a generic unsigned version since we'll modify it. */
1708 tree unsigned_char_type
1709 = (char_type == char_type_node
1710 ? unsigned_char_type_node
1711 : copy_type (gnat_unsigned_type_for (char_type)));
1713 /* Create an unsigned version of the type and set it as debug type. */
1714 TYPE_NAME (unsigned_char_type) = TYPE_NAME (char_type);
1715 TYPE_STRING_FLAG (unsigned_char_type) = TYPE_STRING_FLAG (char_type);
1716 TYPE_ARTIFICIAL (unsigned_char_type) = TYPE_ARTIFICIAL (char_type);
1717 SET_TYPE_DEBUG_TYPE (char_type, unsigned_char_type);
1719 /* If this is a subtype, make the debug type a subtype of the debug type
1720 of the base type and convert literal RM bounds to unsigned. */
1721 if (TREE_TYPE (char_type))
1723 tree base_unsigned_char_type = TYPE_DEBUG_TYPE (TREE_TYPE (char_type));
1724 tree min_value = TYPE_RM_MIN_VALUE (char_type);
1725 tree max_value = TYPE_RM_MAX_VALUE (char_type);
1727 if (TREE_CODE (min_value) == INTEGER_CST)
1728 min_value = fold_convert (base_unsigned_char_type, min_value);
1729 if (TREE_CODE (max_value) == INTEGER_CST)
1730 max_value = fold_convert (base_unsigned_char_type, max_value);
1732 TREE_TYPE (unsigned_char_type) = base_unsigned_char_type;
1733 SET_TYPE_RM_MIN_VALUE (unsigned_char_type, min_value);
1734 SET_TYPE_RM_MAX_VALUE (unsigned_char_type, max_value);
1737 /* Adjust the RM bounds of the original type to unsigned; that's especially
1738 important for types since they are implicit in this case. */
1739 SET_TYPE_RM_MIN_VALUE (char_type, TYPE_MIN_VALUE (unsigned_char_type));
1740 SET_TYPE_RM_MAX_VALUE (char_type, TYPE_MAX_VALUE (unsigned_char_type));
1743 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1744 finish constructing the record type as a fat pointer type. */
1747 finish_fat_pointer_type (tree record_type, tree field_list)
1749 /* Make sure we can put it into a register. */
1750 if (STRICT_ALIGNMENT)
1751 SET_TYPE_ALIGN (record_type, MIN (BIGGEST_ALIGNMENT, 2 * POINTER_SIZE));
1753 /* Show what it really is. */
1754 TYPE_FAT_POINTER_P (record_type) = 1;
1756 /* Do not emit debug info for it since the types of its fields may still be
1757 incomplete at this point. */
1758 finish_record_type (record_type, field_list, 0, false);
1760 /* Force type_contains_placeholder_p to return true on it. Although the
1761 PLACEHOLDER_EXPRs are referenced only indirectly, this isn't a pointer
1762 type but the representation of the unconstrained array. */
1763 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (record_type) = 2;
1766 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1767 finish constructing the record or union type. If REP_LEVEL is zero, this
1768 record has no representation clause and so will be entirely laid out here.
1769 If REP_LEVEL is one, this record has a representation clause and has been
1770 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
1771 this record is derived from a parent record and thus inherits its layout;
1772 only make a pass on the fields to finalize them. DEBUG_INFO_P is true if
1773 additional debug info needs to be output for this type. */
1776 finish_record_type (tree record_type, tree field_list, int rep_level,
1779 enum tree_code code = TREE_CODE (record_type);
1780 tree name = TYPE_IDENTIFIER (record_type);
1781 tree ada_size = bitsize_zero_node;
1782 tree size = bitsize_zero_node;
1783 bool had_size = TYPE_SIZE (record_type) != 0;
1784 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
1785 bool had_align = TYPE_ALIGN (record_type) != 0;
1788 TYPE_FIELDS (record_type) = field_list;
1790 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
1791 generate debug info and have a parallel type. */
1792 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
1794 /* Globally initialize the record first. If this is a rep'ed record,
1795 that just means some initializations; otherwise, layout the record. */
1798 SET_TYPE_ALIGN (record_type, MAX (BITS_PER_UNIT,
1799 TYPE_ALIGN (record_type)));
1802 TYPE_SIZE_UNIT (record_type) = size_zero_node;
1805 TYPE_SIZE (record_type) = bitsize_zero_node;
1807 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
1808 out just like a UNION_TYPE, since the size will be fixed. */
1809 else if (code == QUAL_UNION_TYPE)
1814 /* Ensure there isn't a size already set. There can be in an error
1815 case where there is a rep clause but all fields have errors and
1816 no longer have a position. */
1817 TYPE_SIZE (record_type) = 0;
1819 /* Ensure we use the traditional GCC layout for bitfields when we need
1820 to pack the record type or have a representation clause. The other
1821 possible layout (Microsoft C compiler), if available, would prevent
1822 efficient packing in almost all cases. */
1823 #ifdef TARGET_MS_BITFIELD_LAYOUT
1824 if (TARGET_MS_BITFIELD_LAYOUT && TYPE_PACKED (record_type))
1825 decl_attributes (&record_type,
1826 tree_cons (get_identifier ("gcc_struct"),
1827 NULL_TREE, NULL_TREE),
1828 ATTR_FLAG_TYPE_IN_PLACE);
1831 layout_type (record_type);
1834 /* At this point, the position and size of each field is known. It was
1835 either set before entry by a rep clause, or by laying out the type above.
1837 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
1838 to compute the Ada size; the GCC size and alignment (for rep'ed records
1839 that are not padding types); and the mode (for rep'ed records). We also
1840 clear the DECL_BIT_FIELD indication for the cases we know have not been
1841 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
1843 if (code == QUAL_UNION_TYPE)
1844 field_list = nreverse (field_list);
1846 for (field = field_list; field; field = DECL_CHAIN (field))
1848 tree type = TREE_TYPE (field);
1849 tree pos = bit_position (field);
1850 tree this_size = DECL_SIZE (field);
1853 if (RECORD_OR_UNION_TYPE_P (type)
1854 && !TYPE_FAT_POINTER_P (type)
1855 && !TYPE_CONTAINS_TEMPLATE_P (type)
1856 && TYPE_ADA_SIZE (type))
1857 this_ada_size = TYPE_ADA_SIZE (type);
1859 this_ada_size = this_size;
1861 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
1862 if (DECL_BIT_FIELD (field)
1863 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
1865 unsigned int align = TYPE_ALIGN (type);
1867 /* In the general case, type alignment is required. */
1868 if (value_factor_p (pos, align))
1870 /* The enclosing record type must be sufficiently aligned.
1871 Otherwise, if no alignment was specified for it and it
1872 has been laid out already, bump its alignment to the
1873 desired one if this is compatible with its size and
1874 maximum alignment, if any. */
1875 if (TYPE_ALIGN (record_type) >= align)
1877 SET_DECL_ALIGN (field, MAX (DECL_ALIGN (field), align));
1878 DECL_BIT_FIELD (field) = 0;
1882 && value_factor_p (TYPE_SIZE (record_type), align)
1883 && (!TYPE_MAX_ALIGN (record_type)
1884 || TYPE_MAX_ALIGN (record_type) >= align))
1886 SET_TYPE_ALIGN (record_type, align);
1887 SET_DECL_ALIGN (field, MAX (DECL_ALIGN (field), align));
1888 DECL_BIT_FIELD (field) = 0;
1892 /* In the non-strict alignment case, only byte alignment is. */
1893 if (!STRICT_ALIGNMENT
1894 && DECL_BIT_FIELD (field)
1895 && value_factor_p (pos, BITS_PER_UNIT))
1896 DECL_BIT_FIELD (field) = 0;
1899 /* If we still have DECL_BIT_FIELD set at this point, we know that the
1900 field is technically not addressable. Except that it can actually
1901 be addressed if it is BLKmode and happens to be properly aligned. */
1902 if (DECL_BIT_FIELD (field)
1903 && !(DECL_MODE (field) == BLKmode
1904 && value_factor_p (pos, BITS_PER_UNIT)))
1905 DECL_NONADDRESSABLE_P (field) = 1;
1907 /* A type must be as aligned as its most aligned field that is not
1908 a bit-field. But this is already enforced by layout_type. */
1909 if (rep_level > 0 && !DECL_BIT_FIELD (field))
1910 SET_TYPE_ALIGN (record_type,
1911 MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field)));
1916 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
1917 size = size_binop (MAX_EXPR, size, this_size);
1920 case QUAL_UNION_TYPE:
1922 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1923 this_ada_size, ada_size);
1924 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1929 /* Since we know here that all fields are sorted in order of
1930 increasing bit position, the size of the record is one
1931 higher than the ending bit of the last field processed
1932 unless we have a rep clause, since in that case we might
1933 have a field outside a QUAL_UNION_TYPE that has a higher ending
1934 position. So use a MAX in that case. Also, if this field is a
1935 QUAL_UNION_TYPE, we need to take into account the previous size in
1936 the case of empty variants. */
1938 = merge_sizes (ada_size, pos, this_ada_size,
1939 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1941 = merge_sizes (size, pos, this_size,
1942 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1950 if (code == QUAL_UNION_TYPE)
1951 nreverse (field_list);
1953 /* We need to set the regular sizes if REP_LEVEL is one. */
1956 /* If this is a padding record, we never want to make the size smaller
1957 than what was specified in it, if any. */
1958 if (TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
1959 size = TYPE_SIZE (record_type);
1961 tree size_unit = had_size_unit
1962 ? TYPE_SIZE_UNIT (record_type)
1963 : convert (sizetype,
1964 size_binop (CEIL_DIV_EXPR, size,
1965 bitsize_unit_node));
1966 const unsigned int align = TYPE_ALIGN (record_type);
1968 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
1969 TYPE_SIZE_UNIT (record_type)
1970 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
1973 /* We need to set the Ada size if REP_LEVEL is zero or one. */
1976 /* Now set any of the values we've just computed that apply. */
1977 if (!TYPE_FAT_POINTER_P (record_type)
1978 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
1979 SET_TYPE_ADA_SIZE (record_type, ada_size);
1982 /* We need to set the mode if REP_LEVEL is one or two. */
1985 compute_record_mode (record_type);
1986 finish_bitfield_layout (record_type);
1989 /* Reset the TYPE_MAX_ALIGN field since it's private to gigi. */
1990 TYPE_MAX_ALIGN (record_type) = 0;
1993 rest_of_record_type_compilation (record_type);
1996 /* Append PARALLEL_TYPE on the chain of parallel types of TYPE. If
1997 PARRALEL_TYPE has no context and its computation is not deferred yet, also
1998 propagate TYPE's context to PARALLEL_TYPE's or defer its propagation to the
1999 moment TYPE will get a context. */
2002 add_parallel_type (tree type, tree parallel_type)
2004 tree decl = TYPE_STUB_DECL (type);
2006 while (DECL_PARALLEL_TYPE (decl))
2007 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
2009 SET_DECL_PARALLEL_TYPE (decl, parallel_type);
2011 /* If PARALLEL_TYPE already has a context, we are done. */
2012 if (TYPE_CONTEXT (parallel_type))
2015 /* Otherwise, try to get one from TYPE's context. If so, simply propagate
2016 it to PARALLEL_TYPE. */
2017 if (TYPE_CONTEXT (type))
2018 gnat_set_type_context (parallel_type, TYPE_CONTEXT (type));
2020 /* Otherwise TYPE has not context yet. We know it will have one thanks to
2021 gnat_pushdecl and then its context will be propagated to PARALLEL_TYPE,
2022 so we have nothing to do in this case. */
2025 /* Return true if TYPE has a parallel type. */
2028 has_parallel_type (tree type)
2030 tree decl = TYPE_STUB_DECL (type);
2032 return DECL_PARALLEL_TYPE (decl) != NULL_TREE;
2035 /* Wrap up compilation of RECORD_TYPE, i.e. output additional debug info
2036 associated with it. It need not be invoked directly in most cases as
2037 finish_record_type takes care of doing so. */
2040 rest_of_record_type_compilation (tree record_type)
2042 bool var_size = false;
2045 /* If this is a padded type, the bulk of the debug info has already been
2046 generated for the field's type. */
2047 if (TYPE_IS_PADDING_P (record_type))
2050 /* If the type already has a parallel type (XVS type), then we're done. */
2051 if (has_parallel_type (record_type))
2054 for (field = TYPE_FIELDS (record_type); field; field = DECL_CHAIN (field))
2056 /* We need to make an XVE/XVU record if any field has variable size,
2057 whether or not the record does. For example, if we have a union,
2058 it may be that all fields, rounded up to the alignment, have the
2059 same size, in which case we'll use that size. But the debug
2060 output routines (except Dwarf2) won't be able to output the fields,
2061 so we need to make the special record. */
2062 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
2063 /* If a field has a non-constant qualifier, the record will have
2064 variable size too. */
2065 || (TREE_CODE (record_type) == QUAL_UNION_TYPE
2066 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
2073 /* If this record type is of variable size, make a parallel record type that
2074 will tell the debugger how the former is laid out (see exp_dbug.ads). */
2075 if (var_size && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
2077 tree new_record_type
2078 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
2079 ? UNION_TYPE : TREE_CODE (record_type));
2080 tree orig_name = TYPE_IDENTIFIER (record_type), new_name;
2081 tree last_pos = bitsize_zero_node;
2082 tree old_field, prev_old_field = NULL_TREE;
2085 = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
2087 TYPE_NAME (new_record_type) = new_name;
2088 SET_TYPE_ALIGN (new_record_type, BIGGEST_ALIGNMENT);
2089 TYPE_STUB_DECL (new_record_type)
2090 = create_type_stub_decl (new_name, new_record_type);
2091 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
2092 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
2093 gnat_pushdecl (TYPE_STUB_DECL (new_record_type), Empty);
2094 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
2095 TYPE_SIZE_UNIT (new_record_type)
2096 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
2098 /* Now scan all the fields, replacing each field with a new field
2099 corresponding to the new encoding. */
2100 for (old_field = TYPE_FIELDS (record_type); old_field;
2101 old_field = DECL_CHAIN (old_field))
2103 tree field_type = TREE_TYPE (old_field);
2104 tree field_name = DECL_NAME (old_field);
2105 tree curpos = fold_bit_position (old_field);
2106 tree pos, new_field;
2108 unsigned int align = 0;
2110 /* See how the position was modified from the last position.
2112 There are two basic cases we support: a value was added
2113 to the last position or the last position was rounded to
2114 a boundary and they something was added. Check for the
2115 first case first. If not, see if there is any evidence
2116 of rounding. If so, round the last position and retry.
2118 If this is a union, the position can be taken as zero. */
2119 if (TREE_CODE (new_record_type) == UNION_TYPE)
2120 pos = bitsize_zero_node;
2122 pos = compute_related_constant (curpos, last_pos);
2125 && TREE_CODE (curpos) == MULT_EXPR
2126 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1)))
2128 tree offset = TREE_OPERAND (curpos, 0);
2129 align = tree_to_uhwi (TREE_OPERAND (curpos, 1));
2130 align = scale_by_factor_of (offset, align);
2131 last_pos = round_up (last_pos, align);
2132 pos = compute_related_constant (curpos, last_pos);
2135 && TREE_CODE (curpos) == PLUS_EXPR
2136 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1))
2137 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
2139 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1)))
2141 tree offset = TREE_OPERAND (TREE_OPERAND (curpos, 0), 0);
2142 unsigned HOST_WIDE_INT addend
2143 = tree_to_uhwi (TREE_OPERAND (curpos, 1));
2145 = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1));
2146 align = scale_by_factor_of (offset, align);
2147 align = MIN (align, addend & -addend);
2148 last_pos = round_up (last_pos, align);
2149 pos = compute_related_constant (curpos, last_pos);
2151 else if (potential_alignment_gap (prev_old_field, old_field, pos))
2153 align = TYPE_ALIGN (field_type);
2154 last_pos = round_up (last_pos, align);
2155 pos = compute_related_constant (curpos, last_pos);
2158 /* If we can't compute a position, set it to zero.
2160 ??? We really should abort here, but it's too much work
2161 to get this correct for all cases. */
2163 pos = bitsize_zero_node;
2165 /* See if this type is variable-sized and make a pointer type
2166 and indicate the indirection if so. Beware that the debug
2167 back-end may adjust the position computed above according
2168 to the alignment of the field type, i.e. the pointer type
2169 in this case, if we don't preventively counter that. */
2170 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
2172 field_type = build_pointer_type (field_type);
2173 if (align != 0 && TYPE_ALIGN (field_type) > align)
2175 field_type = copy_type (field_type);
2176 SET_TYPE_ALIGN (field_type, align);
2181 /* Make a new field name, if necessary. */
2182 if (var || align != 0)
2187 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
2188 align / BITS_PER_UNIT);
2190 strcpy (suffix, "XVL");
2192 field_name = concat_name (field_name, suffix);
2196 = create_field_decl (field_name, field_type, new_record_type,
2197 DECL_SIZE (old_field), pos, 0, 0);
2198 DECL_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
2199 TYPE_FIELDS (new_record_type) = new_field;
2201 /* If old_field is a QUAL_UNION_TYPE, take its size as being
2202 zero. The only time it's not the last field of the record
2203 is when there are other components at fixed positions after
2204 it (meaning there was a rep clause for every field) and we
2205 want to be able to encode them. */
2206 last_pos = size_binop (PLUS_EXPR, curpos,
2207 (TREE_CODE (TREE_TYPE (old_field))
2210 : DECL_SIZE (old_field));
2211 prev_old_field = old_field;
2214 TYPE_FIELDS (new_record_type) = nreverse (TYPE_FIELDS (new_record_type));
2216 add_parallel_type (record_type, new_record_type);
2220 /* Utility function of above to merge LAST_SIZE, the previous size of a record
2221 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
2222 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
2223 replace a value of zero with the old size. If HAS_REP is true, we take the
2224 MAX of the end position of this field with LAST_SIZE. In all other cases,
2225 we use FIRST_BIT plus SIZE. Return an expression for the size. */
2228 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
2231 tree type = TREE_TYPE (last_size);
2234 if (!special || TREE_CODE (size) != COND_EXPR)
2236 new_size = size_binop (PLUS_EXPR, first_bit, size);
2238 new_size = size_binop (MAX_EXPR, last_size, new_size);
2242 new_size = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
2243 integer_zerop (TREE_OPERAND (size, 1))
2244 ? last_size : merge_sizes (last_size, first_bit,
2245 TREE_OPERAND (size, 1),
2247 integer_zerop (TREE_OPERAND (size, 2))
2248 ? last_size : merge_sizes (last_size, first_bit,
2249 TREE_OPERAND (size, 2),
2252 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
2253 when fed through substitute_in_expr) into thinking that a constant
2254 size is not constant. */
2255 while (TREE_CODE (new_size) == NON_LVALUE_EXPR)
2256 new_size = TREE_OPERAND (new_size, 0);
2261 /* Return the bit position of FIELD, in bits from the start of the record,
2262 and fold it as much as possible. This is a tree of type bitsizetype. */
2265 fold_bit_position (const_tree field)
2267 tree offset = DECL_FIELD_OFFSET (field);
2268 if (TREE_CODE (offset) == MULT_EXPR || TREE_CODE (offset) == PLUS_EXPR)
2269 offset = size_binop (TREE_CODE (offset),
2270 fold_convert (bitsizetype, TREE_OPERAND (offset, 0)),
2271 fold_convert (bitsizetype, TREE_OPERAND (offset, 1)));
2273 offset = fold_convert (bitsizetype, offset);
2274 return size_binop (PLUS_EXPR, DECL_FIELD_BIT_OFFSET (field),
2275 size_binop (MULT_EXPR, offset, bitsize_unit_node));
2278 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
2279 related by the addition of a constant. Return that constant if so. */
2282 compute_related_constant (tree op0, tree op1)
2284 tree factor, op0_var, op1_var, op0_cst, op1_cst, result;
2286 if (TREE_CODE (op0) == MULT_EXPR
2287 && TREE_CODE (op1) == MULT_EXPR
2288 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST
2289 && TREE_OPERAND (op1, 1) == TREE_OPERAND (op0, 1))
2291 factor = TREE_OPERAND (op0, 1);
2292 op0 = TREE_OPERAND (op0, 0);
2293 op1 = TREE_OPERAND (op1, 0);
2298 op0_cst = split_plus (op0, &op0_var);
2299 op1_cst = split_plus (op1, &op1_var);
2300 result = size_binop (MINUS_EXPR, op0_cst, op1_cst);
2302 if (operand_equal_p (op0_var, op1_var, 0))
2303 return factor ? size_binop (MULT_EXPR, factor, result) : result;
2308 /* Utility function of above to split a tree OP which may be a sum, into a
2309 constant part, which is returned, and a variable part, which is stored
2310 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
2314 split_plus (tree in, tree *pvar)
2316 /* Strip conversions in order to ease the tree traversal and maximize the
2317 potential for constant or plus/minus discovery. We need to be careful
2318 to always return and set *pvar to bitsizetype trees, but it's worth
2320 in = remove_conversions (in, false);
2322 *pvar = convert (bitsizetype, in);
2324 if (TREE_CODE (in) == INTEGER_CST)
2326 *pvar = bitsize_zero_node;
2327 return convert (bitsizetype, in);
2329 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
2331 tree lhs_var, rhs_var;
2332 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
2333 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
2335 if (lhs_var == TREE_OPERAND (in, 0)
2336 && rhs_var == TREE_OPERAND (in, 1))
2337 return bitsize_zero_node;
2339 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
2340 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
2343 return bitsize_zero_node;
2346 /* Return a copy of TYPE but safe to modify in any way. */
2349 copy_type (tree type)
2351 tree new_type = copy_node (type);
2353 /* Unshare the language-specific data. */
2354 if (TYPE_LANG_SPECIFIC (type))
2356 TYPE_LANG_SPECIFIC (new_type) = NULL;
2357 SET_TYPE_LANG_SPECIFIC (new_type, GET_TYPE_LANG_SPECIFIC (type));
2360 /* And the contents of the language-specific slot if needed. */
2361 if ((INTEGRAL_TYPE_P (type) || TREE_CODE (type) == REAL_TYPE)
2362 && TYPE_RM_VALUES (type))
2364 TYPE_RM_VALUES (new_type) = NULL_TREE;
2365 SET_TYPE_RM_SIZE (new_type, TYPE_RM_SIZE (type));
2366 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_RM_MIN_VALUE (type));
2367 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_RM_MAX_VALUE (type));
2370 /* copy_node clears this field instead of copying it, because it is
2371 aliased with TREE_CHAIN. */
2372 TYPE_STUB_DECL (new_type) = TYPE_STUB_DECL (type);
2374 TYPE_POINTER_TO (new_type) = NULL_TREE;
2375 TYPE_REFERENCE_TO (new_type) = NULL_TREE;
2376 TYPE_MAIN_VARIANT (new_type) = new_type;
2377 TYPE_NEXT_VARIANT (new_type) = NULL_TREE;
2378 TYPE_CANONICAL (new_type) = new_type;
2383 /* Return a subtype of sizetype with range MIN to MAX and whose
2384 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
2385 of the associated TYPE_DECL. */
2388 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
2390 /* First build a type for the desired range. */
2391 tree type = build_nonshared_range_type (sizetype, min, max);
2393 /* Then set the index type. */
2394 SET_TYPE_INDEX_TYPE (type, index);
2395 create_type_decl (NULL_TREE, type, true, false, gnat_node);
2400 /* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
2401 sizetype is used. */
2404 create_range_type (tree type, tree min, tree max)
2411 /* First build a type with the base range. */
2412 range_type = build_nonshared_range_type (type, TYPE_MIN_VALUE (type),
2413 TYPE_MAX_VALUE (type));
2415 /* Then set the actual range. */
2416 SET_TYPE_RM_MIN_VALUE (range_type, min);
2417 SET_TYPE_RM_MAX_VALUE (range_type, max);
2422 /* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of TYPE.
2423 NAME gives the name of the type to be used in the declaration. */
2426 create_type_stub_decl (tree name, tree type)
2428 tree type_decl = build_decl (input_location, TYPE_DECL, name, type);
2429 DECL_ARTIFICIAL (type_decl) = 1;
2430 TYPE_ARTIFICIAL (type) = 1;
2434 /* Return a TYPE_DECL node for TYPE. NAME gives the name of the type to be
2435 used in the declaration. ARTIFICIAL_P is true if the declaration was
2436 generated by the compiler. DEBUG_INFO_P is true if we need to write
2437 debug information about this type. GNAT_NODE is used for the position
2441 create_type_decl (tree name, tree type, bool artificial_p, bool debug_info_p,
2444 enum tree_code code = TREE_CODE (type);
2446 = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
2449 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
2450 gcc_assert (!TYPE_IS_DUMMY_P (type));
2452 /* If the type hasn't been named yet, we're naming it; preserve an existing
2453 TYPE_STUB_DECL that has been attached to it for some purpose. */
2454 if (!is_named && TYPE_STUB_DECL (type))
2456 type_decl = TYPE_STUB_DECL (type);
2457 DECL_NAME (type_decl) = name;
2460 type_decl = build_decl (input_location, TYPE_DECL, name, type);
2462 DECL_ARTIFICIAL (type_decl) = artificial_p;
2463 TYPE_ARTIFICIAL (type) = artificial_p;
2465 /* Add this decl to the current binding level. */
2466 gnat_pushdecl (type_decl, gnat_node);
2468 /* If we're naming the type, equate the TYPE_STUB_DECL to the name. This
2469 causes the name to be also viewed as a "tag" by the debug back-end, with
2470 the advantage that no DW_TAG_typedef is emitted for artificial "tagged"
2473 Note that if "type" is used as a DECL_ORIGINAL_TYPE, it may be referenced
2474 from multiple contexts, and "type_decl" references a copy of it: in such a
2475 case, do not mess TYPE_STUB_DECL: we do not want to re-use the TYPE_DECL
2476 with the mechanism above. */
2477 if (!is_named && type != DECL_ORIGINAL_TYPE (type_decl))
2478 TYPE_STUB_DECL (type) = type_decl;
2480 /* Do not generate debug info for UNCONSTRAINED_ARRAY_TYPE that the
2481 back-end doesn't support, and for others if we don't need to. */
2482 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
2483 DECL_IGNORED_P (type_decl) = 1;
2488 /* Return a VAR_DECL or CONST_DECL node.
2490 NAME gives the name of the variable. ASM_NAME is its assembler name
2491 (if provided). TYPE is its data type (a GCC ..._TYPE node). INIT is
2492 the GCC tree for an optional initial expression; NULL_TREE if none.
2494 CONST_FLAG is true if this variable is constant, in which case we might
2495 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
2497 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
2498 definition to be made visible outside of the current compilation unit, for
2499 instance variable definitions in a package specification.
2501 EXTERN_FLAG is true when processing an external variable declaration (as
2502 opposed to a definition: no storage is to be allocated for the variable).
2504 STATIC_FLAG is only relevant when not at top level and indicates whether
2505 to always allocate storage to the variable.
2507 VOLATILE_FLAG is true if this variable is declared as volatile.
2509 ARTIFICIAL_P is true if the variable was generated by the compiler.
2511 DEBUG_INFO_P is true if we need to write debug information for it.
2513 ATTR_LIST is the list of attributes to be attached to the variable.
2515 GNAT_NODE is used for the position of the decl. */
2518 create_var_decl (tree name, tree asm_name, tree type, tree init,
2519 bool const_flag, bool public_flag, bool extern_flag,
2520 bool static_flag, bool volatile_flag, bool artificial_p,
2521 bool debug_info_p, struct attrib *attr_list,
2522 Node_Id gnat_node, bool const_decl_allowed_p)
2524 /* Whether the object has static storage duration, either explicitly or by
2525 virtue of being declared at the global level. */
2526 const bool static_storage = static_flag || global_bindings_p ();
2528 /* Whether the initializer is constant: for an external object or an object
2529 with static storage duration, we check that the initializer is a valid
2530 constant expression for initializing a static variable; otherwise, we
2531 only check that it is constant. */
2532 const bool init_const
2534 && gnat_types_compatible_p (type, TREE_TYPE (init))
2535 && (extern_flag || static_storage
2536 ? initializer_constant_valid_p (init, TREE_TYPE (init))
2538 : TREE_CONSTANT (init)));
2540 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
2541 case the initializer may be used in lieu of the DECL node (as done in
2542 Identifier_to_gnu). This is useful to prevent the need of elaboration
2543 code when an identifier for which such a DECL is made is in turn used
2544 as an initializer. We used to rely on CONST_DECL vs VAR_DECL for this,
2545 but extra constraints apply to this choice (see below) and they are not
2546 relevant to the distinction we wish to make. */
2547 const bool constant_p = const_flag && init_const;
2549 /* The actual DECL node. CONST_DECL was initially intended for enumerals
2550 and may be used for scalars in general but not for aggregates. */
2552 = build_decl (input_location,
2554 && const_decl_allowed_p
2555 && !AGGREGATE_TYPE_P (type) ? CONST_DECL : VAR_DECL),
2558 /* Detect constants created by the front-end to hold 'reference to function
2559 calls for stabilization purposes. This is needed for renaming. */
2560 if (const_flag && init && POINTER_TYPE_P (type))
2563 if (TREE_CODE (inner) == COMPOUND_EXPR)
2564 inner = TREE_OPERAND (inner, 1);
2565 inner = remove_conversions (inner, true);
2566 if (TREE_CODE (inner) == ADDR_EXPR
2567 && ((TREE_CODE (TREE_OPERAND (inner, 0)) == CALL_EXPR
2568 && !call_is_atomic_load (TREE_OPERAND (inner, 0)))
2569 || (TREE_CODE (TREE_OPERAND (inner, 0)) == VAR_DECL
2570 && DECL_RETURN_VALUE_P (TREE_OPERAND (inner, 0)))))
2571 DECL_RETURN_VALUE_P (var_decl) = 1;
2574 /* If this is external, throw away any initializations (they will be done
2575 elsewhere) unless this is a constant for which we would like to remain
2576 able to get the initializer. If we are defining a global here, leave a
2577 constant initialization and save any variable elaborations for the
2578 elaboration routine. If we are just annotating types, throw away the
2579 initialization if it isn't a constant. */
2580 if ((extern_flag && !constant_p)
2581 || (type_annotate_only && init && !TREE_CONSTANT (init)))
2584 /* At the global level, a non-constant initializer generates elaboration
2585 statements. Check that such statements are allowed, that is to say,
2586 not violating a No_Elaboration_Code restriction. */
2587 if (init && !init_const && global_bindings_p ())
2588 Check_Elaboration_Code_Allowed (gnat_node);
2590 /* Attach the initializer, if any. */
2591 DECL_INITIAL (var_decl) = init;
2593 /* Directly set some flags. */
2594 DECL_ARTIFICIAL (var_decl) = artificial_p;
2595 DECL_EXTERNAL (var_decl) = extern_flag;
2597 TREE_CONSTANT (var_decl) = constant_p;
2598 TREE_READONLY (var_decl) = const_flag;
2600 /* The object is public if it is external or if it is declared public
2601 and has static storage duration. */
2602 TREE_PUBLIC (var_decl) = extern_flag || (public_flag && static_storage);
2604 /* We need to allocate static storage for an object with static storage
2605 duration if it isn't external. */
2606 TREE_STATIC (var_decl) = !extern_flag && static_storage;
2608 TREE_SIDE_EFFECTS (var_decl)
2609 = TREE_THIS_VOLATILE (var_decl)
2610 = TYPE_VOLATILE (type) | volatile_flag;
2612 if (TREE_SIDE_EFFECTS (var_decl))
2613 TREE_ADDRESSABLE (var_decl) = 1;
2615 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
2616 try to fiddle with DECL_COMMON. However, on platforms that don't
2617 support global BSS sections, uninitialized global variables would
2618 go in DATA instead, thus increasing the size of the executable. */
2620 && TREE_CODE (var_decl) == VAR_DECL
2621 && TREE_PUBLIC (var_decl)
2622 && !have_global_bss_p ())
2623 DECL_COMMON (var_decl) = 1;
2625 /* Do not emit debug info for a CONST_DECL if optimization isn't enabled,
2626 since we will create an associated variable. Likewise for an external
2627 constant whose initializer is not absolute, because this would mean a
2628 global relocation in a read-only section which runs afoul of the PE-COFF
2629 run-time relocation mechanism. */
2631 || (TREE_CODE (var_decl) == CONST_DECL && !optimize)
2635 && initializer_constant_valid_p (init, TREE_TYPE (init))
2636 != null_pointer_node))
2637 DECL_IGNORED_P (var_decl) = 1;
2639 /* ??? Some attributes cannot be applied to CONST_DECLs. */
2640 if (TREE_CODE (var_decl) == VAR_DECL)
2641 process_attributes (&var_decl, &attr_list, true, gnat_node);
2643 /* Add this decl to the current binding level. */
2644 gnat_pushdecl (var_decl, gnat_node);
2646 if (TREE_CODE (var_decl) == VAR_DECL && asm_name)
2648 /* Let the target mangle the name if this isn't a verbatim asm. */
2649 if (*IDENTIFIER_POINTER (asm_name) != '*')
2650 asm_name = targetm.mangle_decl_assembler_name (var_decl, asm_name);
2652 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
2658 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
2661 aggregate_type_contains_array_p (tree type)
2663 switch (TREE_CODE (type))
2667 case QUAL_UNION_TYPE:
2670 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
2671 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
2672 && aggregate_type_contains_array_p (TREE_TYPE (field)))
2685 /* Return a FIELD_DECL node. NAME is the field's name, TYPE is its type and
2686 RECORD_TYPE is the type of the enclosing record. If SIZE is nonzero, it
2687 is the specified size of the field. If POS is nonzero, it is the bit
2688 position. PACKED is 1 if the enclosing record is packed, -1 if it has
2689 Component_Alignment of Storage_Unit. If ADDRESSABLE is nonzero, it
2690 means we are allowed to take the address of the field; if it is negative,
2691 we should not make a bitfield, which is used by make_aligning_type. */
2694 create_field_decl (tree name, tree type, tree record_type, tree size, tree pos,
2695 int packed, int addressable)
2697 tree field_decl = build_decl (input_location, FIELD_DECL, name, type);
2699 DECL_CONTEXT (field_decl) = record_type;
2700 TREE_READONLY (field_decl) = TYPE_READONLY (type);
2702 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
2703 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
2704 Likewise for an aggregate without specified position that contains an
2705 array, because in this case slices of variable length of this array
2706 must be handled by GCC and variable-sized objects need to be aligned
2707 to at least a byte boundary. */
2708 if (packed && (TYPE_MODE (type) == BLKmode
2710 && AGGREGATE_TYPE_P (type)
2711 && aggregate_type_contains_array_p (type))))
2712 SET_DECL_ALIGN (field_decl, BITS_PER_UNIT);
2714 /* If a size is specified, use it. Otherwise, if the record type is packed
2715 compute a size to use, which may differ from the object's natural size.
2716 We always set a size in this case to trigger the checks for bitfield
2717 creation below, which is typically required when no position has been
2720 size = convert (bitsizetype, size);
2721 else if (packed == 1)
2723 size = rm_size (type);
2724 if (TYPE_MODE (type) == BLKmode)
2725 size = round_up (size, BITS_PER_UNIT);
2728 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
2729 specified for two reasons: first if the size differs from the natural
2730 size. Second, if the alignment is insufficient. There are a number of
2731 ways the latter can be true.
2733 We never make a bitfield if the type of the field has a nonconstant size,
2734 because no such entity requiring bitfield operations should reach here.
2736 We do *preventively* make a bitfield when there might be the need for it
2737 but we don't have all the necessary information to decide, as is the case
2738 of a field with no specified position in a packed record.
2740 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
2741 in layout_decl or finish_record_type to clear the bit_field indication if
2742 it is in fact not needed. */
2743 if (addressable >= 0
2745 && TREE_CODE (size) == INTEGER_CST
2746 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
2747 && (!tree_int_cst_equal (size, TYPE_SIZE (type))
2748 || (pos && !value_factor_p (pos, TYPE_ALIGN (type)))
2750 || (TYPE_ALIGN (record_type) != 0
2751 && TYPE_ALIGN (record_type) < TYPE_ALIGN (type))))
2753 DECL_BIT_FIELD (field_decl) = 1;
2754 DECL_SIZE (field_decl) = size;
2755 if (!packed && !pos)
2757 if (TYPE_ALIGN (record_type) != 0
2758 && TYPE_ALIGN (record_type) < TYPE_ALIGN (type))
2759 SET_DECL_ALIGN (field_decl, TYPE_ALIGN (record_type));
2761 SET_DECL_ALIGN (field_decl, TYPE_ALIGN (type));
2765 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
2767 /* Bump the alignment if need be, either for bitfield/packing purposes or
2768 to satisfy the type requirements if no such consideration applies. When
2769 we get the alignment from the type, indicate if this is from an explicit
2770 user request, which prevents stor-layout from lowering it later on. */
2772 unsigned int bit_align
2773 = (DECL_BIT_FIELD (field_decl) ? 1
2774 : packed && TYPE_MODE (type) != BLKmode ? BITS_PER_UNIT : 0);
2776 if (bit_align > DECL_ALIGN (field_decl))
2777 SET_DECL_ALIGN (field_decl, bit_align);
2778 else if (!bit_align && TYPE_ALIGN (type) > DECL_ALIGN (field_decl))
2780 SET_DECL_ALIGN (field_decl, TYPE_ALIGN (type));
2781 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (type);
2787 /* We need to pass in the alignment the DECL is known to have.
2788 This is the lowest-order bit set in POS, but no more than
2789 the alignment of the record, if one is specified. Note
2790 that an alignment of 0 is taken as infinite. */
2791 unsigned int known_align;
2793 if (tree_fits_uhwi_p (pos))
2794 known_align = tree_to_uhwi (pos) & - tree_to_uhwi (pos);
2796 known_align = BITS_PER_UNIT;
2798 if (TYPE_ALIGN (record_type)
2799 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
2800 known_align = TYPE_ALIGN (record_type);
2802 layout_decl (field_decl, known_align);
2803 SET_DECL_OFFSET_ALIGN (field_decl,
2804 tree_fits_uhwi_p (pos) ? BIGGEST_ALIGNMENT
2806 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
2807 &DECL_FIELD_BIT_OFFSET (field_decl),
2808 DECL_OFFSET_ALIGN (field_decl), pos);
2811 /* In addition to what our caller says, claim the field is addressable if we
2812 know that its type is not suitable.
2814 The field may also be "technically" nonaddressable, meaning that even if
2815 we attempt to take the field's address we will actually get the address
2816 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
2817 value we have at this point is not accurate enough, so we don't account
2818 for this here and let finish_record_type decide. */
2819 if (!addressable && !type_for_nonaliased_component_p (type))
2822 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
2827 /* Return a PARM_DECL node with NAME and TYPE. */
2830 create_param_decl (tree name, tree type)
2832 tree param_decl = build_decl (input_location, PARM_DECL, name, type);
2834 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
2835 can lead to various ABI violations. */
2836 if (targetm.calls.promote_prototypes (NULL_TREE)
2837 && INTEGRAL_TYPE_P (type)
2838 && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node))
2840 /* We have to be careful about biased types here. Make a subtype
2841 of integer_type_node with the proper biasing. */
2842 if (TREE_CODE (type) == INTEGER_TYPE
2843 && TYPE_BIASED_REPRESENTATION_P (type))
2846 = make_unsigned_type (TYPE_PRECISION (integer_type_node));
2847 TREE_TYPE (subtype) = integer_type_node;
2848 TYPE_BIASED_REPRESENTATION_P (subtype) = 1;
2849 SET_TYPE_RM_MIN_VALUE (subtype, TYPE_MIN_VALUE (type));
2850 SET_TYPE_RM_MAX_VALUE (subtype, TYPE_MAX_VALUE (type));
2854 type = integer_type_node;
2857 DECL_ARG_TYPE (param_decl) = type;
2861 /* Process the attributes in ATTR_LIST for NODE, which is either a DECL or
2862 a TYPE. If IN_PLACE is true, the tree pointed to by NODE should not be
2863 changed. GNAT_NODE is used for the position of error messages. */
2866 process_attributes (tree *node, struct attrib **attr_list, bool in_place,
2869 struct attrib *attr;
2871 for (attr = *attr_list; attr; attr = attr->next)
2874 case ATTR_MACHINE_ATTRIBUTE:
2875 Sloc_to_locus (Sloc (gnat_node), &input_location);
2876 decl_attributes (node, tree_cons (attr->name, attr->args, NULL_TREE),
2877 in_place ? ATTR_FLAG_TYPE_IN_PLACE : 0);
2880 case ATTR_LINK_ALIAS:
2881 if (!DECL_EXTERNAL (*node))
2883 TREE_STATIC (*node) = 1;
2884 assemble_alias (*node, attr->name);
2888 case ATTR_WEAK_EXTERNAL:
2890 declare_weak (*node);
2892 post_error ("?weak declarations not supported on this target",
2896 case ATTR_LINK_SECTION:
2897 if (targetm_common.have_named_sections)
2899 set_decl_section_name (*node, IDENTIFIER_POINTER (attr->name));
2900 DECL_COMMON (*node) = 0;
2903 post_error ("?section attributes are not supported for this target",
2907 case ATTR_LINK_CONSTRUCTOR:
2908 DECL_STATIC_CONSTRUCTOR (*node) = 1;
2909 TREE_USED (*node) = 1;
2912 case ATTR_LINK_DESTRUCTOR:
2913 DECL_STATIC_DESTRUCTOR (*node) = 1;
2914 TREE_USED (*node) = 1;
2917 case ATTR_THREAD_LOCAL_STORAGE:
2918 set_decl_tls_model (*node, decl_default_tls_model (*node));
2919 DECL_COMMON (*node) = 0;
2926 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
2930 value_factor_p (tree value, HOST_WIDE_INT factor)
2932 if (tree_fits_uhwi_p (value))
2933 return tree_to_uhwi (value) % factor == 0;
2935 if (TREE_CODE (value) == MULT_EXPR)
2936 return (value_factor_p (TREE_OPERAND (value, 0), factor)
2937 || value_factor_p (TREE_OPERAND (value, 1), factor));
2942 /* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
2943 feed it with the elaboration of GNAT_SCOPE. */
2945 static struct deferred_decl_context_node *
2946 add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global)
2948 struct deferred_decl_context_node *new_node;
2951 = (struct deferred_decl_context_node * ) xmalloc (sizeof (*new_node));
2952 new_node->decl = decl;
2953 new_node->gnat_scope = gnat_scope;
2954 new_node->force_global = force_global;
2955 new_node->types.create (1);
2956 new_node->next = deferred_decl_context_queue;
2957 deferred_decl_context_queue = new_node;
2961 /* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
2962 feed it with the DECL_CONTEXT computed as part of N as soon as it is
2966 add_deferred_type_context (struct deferred_decl_context_node *n, tree type)
2968 n->types.safe_push (type);
2971 /* Get the GENERIC node corresponding to GNAT_SCOPE, if available. Return
2972 NULL_TREE if it is not available. */
2975 compute_deferred_decl_context (Entity_Id gnat_scope)
2979 if (present_gnu_tree (gnat_scope))
2980 context = get_gnu_tree (gnat_scope);
2984 if (TREE_CODE (context) == TYPE_DECL)
2986 const tree context_type = TREE_TYPE (context);
2988 /* Skip dummy types: only the final ones can appear in the context
2990 if (TYPE_DUMMY_P (context_type))
2993 /* ..._TYPE nodes are more useful than TYPE_DECL nodes in the context
2996 context = context_type;
3002 /* Try to process all deferred nodes in the queue. Keep in the queue the ones
3003 that cannot be processed yet, remove the other ones. If FORCE is true,
3004 force the processing for all nodes, use the global context when nodes don't
3005 have a GNU translation. */
3008 process_deferred_decl_context (bool force)
3010 struct deferred_decl_context_node **it = &deferred_decl_context_queue;
3011 struct deferred_decl_context_node *node;
3015 bool processed = false;
3016 tree context = NULL_TREE;
3017 Entity_Id gnat_scope;
3021 /* If FORCE, get the innermost elaborated scope. Otherwise, just try to
3022 get the first scope. */
3023 gnat_scope = node->gnat_scope;
3024 while (Present (gnat_scope))
3026 context = compute_deferred_decl_context (gnat_scope);
3027 if (!force || context)
3029 gnat_scope = get_debug_scope (gnat_scope, NULL);
3032 /* Imported declarations must not be in a local context (i.e. not inside
3034 if (context && node->force_global > 0)
3040 gcc_assert (TREE_CODE (ctx) != FUNCTION_DECL);
3041 ctx = DECL_P (ctx) ? DECL_CONTEXT (ctx) : TYPE_CONTEXT (ctx);
3045 /* If FORCE, we want to get rid of all nodes in the queue: in case there
3046 was no elaborated scope, use the global context. */
3047 if (force && !context)
3048 context = get_global_context ();
3055 DECL_CONTEXT (node->decl) = context;
3057 /* Propagate it to the TYPE_CONTEXT attributes of the requested
3059 FOR_EACH_VEC_ELT (node->types, i, t)
3061 gnat_set_type_context (t, context);
3066 /* If this node has been successfuly processed, remove it from the
3067 queue. Then move to the next node. */
3071 node->types.release ();
3079 /* Return VALUE scaled by the biggest power-of-2 factor of EXPR. */
3082 scale_by_factor_of (tree expr, unsigned int value)
3084 unsigned HOST_WIDE_INT addend = 0;
3085 unsigned HOST_WIDE_INT factor = 1;
3087 /* Peel conversions around EXPR and try to extract bodies from function
3088 calls: it is possible to get the scale factor from size functions. */
3089 expr = remove_conversions (expr, true);
3090 if (TREE_CODE (expr) == CALL_EXPR)
3091 expr = maybe_inline_call_in_expr (expr);
3093 /* Sometimes we get PLUS_EXPR (BIT_AND_EXPR (..., X), Y), where Y is a
3094 multiple of the scale factor we are looking for. */
3095 if (TREE_CODE (expr) == PLUS_EXPR
3096 && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
3097 && tree_fits_uhwi_p (TREE_OPERAND (expr, 1)))
3099 addend = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
3100 expr = TREE_OPERAND (expr, 0);
3103 /* An expression which is a bitwise AND with a mask has a power-of-2 factor
3104 corresponding to the number of trailing zeros of the mask. */
3105 if (TREE_CODE (expr) == BIT_AND_EXPR
3106 && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST)
3108 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
3111 while ((mask & 1) == 0 && i < HOST_BITS_PER_WIDE_INT)
3119 /* If the addend is not a multiple of the factor we found, give up. In
3120 theory we could find a smaller common factor but it's useless for our
3121 needs. This situation arises when dealing with a field F1 with no
3122 alignment requirement but that is following a field F2 with such
3123 requirements. As long as we have F2's offset, we don't need alignment
3124 information to compute F1's. */
3125 if (addend % factor != 0)
3128 return factor * value;
3131 /* Given two consecutive field decls PREV_FIELD and CURR_FIELD, return true
3132 unless we can prove these 2 fields are laid out in such a way that no gap
3133 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
3134 is the distance in bits between the end of PREV_FIELD and the starting
3135 position of CURR_FIELD. It is ignored if null. */
3138 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
3140 /* If this is the first field of the record, there cannot be any gap */
3144 /* If the previous field is a union type, then return false: The only
3145 time when such a field is not the last field of the record is when
3146 there are other components at fixed positions after it (meaning there
3147 was a rep clause for every field), in which case we don't want the
3148 alignment constraint to override them. */
3149 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
3152 /* If the distance between the end of prev_field and the beginning of
3153 curr_field is constant, then there is a gap if the value of this
3154 constant is not null. */
3155 if (offset && tree_fits_uhwi_p (offset))
3156 return !integer_zerop (offset);
3158 /* If the size and position of the previous field are constant,
3159 then check the sum of this size and position. There will be a gap
3160 iff it is not multiple of the current field alignment. */
3161 if (tree_fits_uhwi_p (DECL_SIZE (prev_field))
3162 && tree_fits_uhwi_p (bit_position (prev_field)))
3163 return ((tree_to_uhwi (bit_position (prev_field))
3164 + tree_to_uhwi (DECL_SIZE (prev_field)))
3165 % DECL_ALIGN (curr_field) != 0);
3167 /* If both the position and size of the previous field are multiples
3168 of the current field alignment, there cannot be any gap. */
3169 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
3170 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
3173 /* Fallback, return that there may be a potential gap */
3177 /* Return a LABEL_DECL with NAME. GNAT_NODE is used for the position of
3181 create_label_decl (tree name, Node_Id gnat_node)
3184 = build_decl (input_location, LABEL_DECL, name, void_type_node);
3186 SET_DECL_MODE (label_decl, VOIDmode);
3188 /* Add this decl to the current binding level. */
3189 gnat_pushdecl (label_decl, gnat_node);
3194 /* Return a FUNCTION_DECL node. NAME is the name of the subprogram, ASM_NAME
3195 its assembler name, TYPE its type (a FUNCTION_TYPE or METHOD_TYPE node),
3196 PARAM_DECL_LIST the list of its parameters (a list of PARM_DECL nodes
3197 chained through the DECL_CHAIN field).
3199 INLINE_STATUS describes the inline flags to be set on the FUNCTION_DECL.
3201 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
3202 definition to be made visible outside of the current compilation unit.
3204 EXTERN_FLAG is true when processing an external subprogram declaration.
3206 ARTIFICIAL_P is true if the subprogram was generated by the compiler.
3208 DEBUG_INFO_P is true if we need to write debug information for it.
3210 DEFINITION is true if the subprogram is to be considered as a definition.
3212 ATTR_LIST is the list of attributes to be attached to the subprogram.
3214 GNAT_NODE is used for the position of the decl. */
3217 create_subprog_decl (tree name, tree asm_name, tree type, tree param_decl_list,
3218 enum inline_status_t inline_status, bool public_flag,
3219 bool extern_flag, bool artificial_p, bool debug_info_p,
3220 bool definition, struct attrib *attr_list,
3223 tree subprog_decl = build_decl (input_location, FUNCTION_DECL, name, type);
3224 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
3226 DECL_ARTIFICIAL (subprog_decl) = artificial_p;
3227 DECL_EXTERNAL (subprog_decl) = extern_flag;
3228 TREE_PUBLIC (subprog_decl) = public_flag;
3231 DECL_IGNORED_P (subprog_decl) = 1;
3233 DECL_FUNCTION_IS_DEF (subprog_decl) = 1;
3235 switch (inline_status)
3238 DECL_UNINLINABLE (subprog_decl) = 1;
3245 if (Back_End_Inlining)
3247 decl_attributes (&subprog_decl,
3248 tree_cons (get_identifier ("always_inline"),
3249 NULL_TREE, NULL_TREE),
3250 ATTR_FLAG_TYPE_IN_PLACE);
3252 /* Inline_Always guarantees that every direct call is inlined and
3253 that there is no indirect reference to the subprogram, so the
3254 instance in the original package (as well as its clones in the
3255 client packages created for inter-unit inlining) can be made
3256 private, which causes the out-of-line body to be eliminated. */
3257 TREE_PUBLIC (subprog_decl) = 0;
3260 /* ... fall through ... */
3263 DECL_DECLARED_INLINE_P (subprog_decl) = 1;
3264 DECL_NO_INLINE_WARNING_P (subprog_decl) = artificial_p;
3271 process_attributes (&subprog_decl, &attr_list, true, gnat_node);
3273 /* Once everything is processed, finish the subprogram declaration. */
3274 finish_subprog_decl (subprog_decl, asm_name, type);
3276 /* Add this decl to the current binding level. */
3277 gnat_pushdecl (subprog_decl, gnat_node);
3279 /* Output the assembler code and/or RTL for the declaration. */
3280 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
3282 return subprog_decl;
3285 /* Given a subprogram declaration DECL, its assembler name and its type,
3286 finish constructing the subprogram declaration from ASM_NAME and TYPE. */
3289 finish_subprog_decl (tree decl, tree asm_name, tree type)
3292 = build_decl (DECL_SOURCE_LOCATION (decl), RESULT_DECL, NULL_TREE,
3295 DECL_ARTIFICIAL (result_decl) = 1;
3296 DECL_IGNORED_P (result_decl) = 1;
3297 DECL_CONTEXT (result_decl) = decl;
3298 DECL_BY_REFERENCE (result_decl) = TREE_ADDRESSABLE (type);
3299 DECL_RESULT (decl) = result_decl;
3301 /* Propagate the "const" property. */
3302 TREE_READONLY (decl) = TYPE_READONLY (type);
3304 /* Propagate the "pure" property. */
3305 DECL_PURE_P (decl) = TYPE_RESTRICT (type);
3307 /* Propagate the "noreturn" property. */
3308 TREE_THIS_VOLATILE (decl) = TYPE_VOLATILE (type);
3312 /* Let the target mangle the name if this isn't a verbatim asm. */
3313 if (*IDENTIFIER_POINTER (asm_name) != '*')
3314 asm_name = targetm.mangle_decl_assembler_name (decl, asm_name);
3316 SET_DECL_ASSEMBLER_NAME (decl, asm_name);
3318 /* The expand_main_function circuitry expects "main_identifier_node" to
3319 designate the DECL_NAME of the 'main' entry point, in turn expected
3320 to be declared as the "main" function literally by default. Ada
3321 program entry points are typically declared with a different name
3322 within the binder generated file, exported as 'main' to satisfy the
3323 system expectations. Force main_identifier_node in this case. */
3324 if (asm_name == main_identifier_node)
3325 DECL_NAME (decl) = main_identifier_node;
3329 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
3330 body. This routine needs to be invoked before processing the declarations
3331 appearing in the subprogram. */
3334 begin_subprog_body (tree subprog_decl)
3338 announce_function (subprog_decl);
3340 /* This function is being defined. */
3341 TREE_STATIC (subprog_decl) = 1;
3343 /* The failure of this assertion will likely come from a wrong context for
3344 the subprogram body, e.g. another procedure for a procedure declared at
3346 gcc_assert (current_function_decl == decl_function_context (subprog_decl));
3348 current_function_decl = subprog_decl;
3350 /* Enter a new binding level and show that all the parameters belong to
3354 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
3355 param_decl = DECL_CHAIN (param_decl))
3356 DECL_CONTEXT (param_decl) = subprog_decl;
3358 make_decl_rtl (subprog_decl);
3361 /* Finish translating the current subprogram and set its BODY. */
3364 end_subprog_body (tree body)
3366 tree fndecl = current_function_decl;
3368 /* Attach the BLOCK for this level to the function and pop the level. */
3369 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
3370 DECL_INITIAL (fndecl) = current_binding_level->block;
3373 /* The body should be a BIND_EXPR whose BLOCK is the top-level one. */
3374 if (TREE_CODE (body) == BIND_EXPR)
3376 BLOCK_SUPERCONTEXT (BIND_EXPR_BLOCK (body)) = fndecl;
3377 DECL_INITIAL (fndecl) = BIND_EXPR_BLOCK (body);
3380 DECL_SAVED_TREE (fndecl) = body;
3382 current_function_decl = decl_function_context (fndecl);
3385 /* Wrap up compilation of SUBPROG_DECL, a subprogram body. */
3388 rest_of_subprog_body_compilation (tree subprog_decl)
3390 /* We cannot track the location of errors past this point. */
3391 Current_Error_Node = Empty;
3393 /* If we're only annotating types, don't actually compile this function. */
3394 if (type_annotate_only)
3397 /* Dump functions before gimplification. */
3398 dump_function (TDI_original, subprog_decl);
3400 if (!decl_function_context (subprog_decl))
3401 cgraph_node::finalize_function (subprog_decl, false);
3403 /* Register this function with cgraph just far enough to get it
3404 added to our parent's nested function list. */
3405 (void) cgraph_node::get_create (subprog_decl);
3409 gnat_builtin_function (tree decl)
3411 gnat_pushdecl (decl, Empty);
3415 /* Return an integer type with the number of bits of precision given by
3416 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
3417 it is a signed type. */
3420 gnat_type_for_size (unsigned precision, int unsignedp)
3425 if (precision <= 2 * MAX_BITS_PER_WORD
3426 && signed_and_unsigned_types[precision][unsignedp])
3427 return signed_and_unsigned_types[precision][unsignedp];
3430 t = make_unsigned_type (precision);
3432 t = make_signed_type (precision);
3433 TYPE_ARTIFICIAL (t) = 1;
3435 if (precision <= 2 * MAX_BITS_PER_WORD)
3436 signed_and_unsigned_types[precision][unsignedp] = t;
3440 sprintf (type_name, "%sSIGNED_%u", unsignedp ? "UN" : "", precision);
3441 TYPE_NAME (t) = get_identifier (type_name);
3447 /* Likewise for floating-point types. */
3450 float_type_for_precision (int precision, machine_mode mode)
3455 if (float_types[(int) mode])
3456 return float_types[(int) mode];
3458 float_types[(int) mode] = t = make_node (REAL_TYPE);
3459 TYPE_PRECISION (t) = precision;
3462 gcc_assert (TYPE_MODE (t) == mode);
3465 sprintf (type_name, "FLOAT_%d", precision);
3466 TYPE_NAME (t) = get_identifier (type_name);
3472 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
3473 an unsigned type; otherwise a signed type is returned. */
3476 gnat_type_for_mode (machine_mode mode, int unsignedp)
3478 if (mode == BLKmode)
3481 if (mode == VOIDmode)
3482 return void_type_node;
3484 if (COMPLEX_MODE_P (mode))
3487 scalar_float_mode float_mode;
3488 if (is_a <scalar_float_mode> (mode, &float_mode))
3489 return float_type_for_precision (GET_MODE_PRECISION (float_mode),
3492 scalar_int_mode int_mode;
3493 if (is_a <scalar_int_mode> (mode, &int_mode))
3494 return gnat_type_for_size (GET_MODE_BITSIZE (int_mode), unsignedp);
3496 if (VECTOR_MODE_P (mode))
3498 machine_mode inner_mode = GET_MODE_INNER (mode);
3499 tree inner_type = gnat_type_for_mode (inner_mode, unsignedp);
3501 return build_vector_type_for_mode (inner_type, mode);
3507 /* Return the signed or unsigned version of TYPE_NODE, a scalar type, the
3508 signedness being specified by UNSIGNEDP. */
3511 gnat_signed_or_unsigned_type_for (int unsignedp, tree type_node)
3513 if (type_node == char_type_node)
3514 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
3516 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), unsignedp);
3518 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
3520 type = copy_type (type);
3521 TREE_TYPE (type) = type_node;
3523 else if (TREE_TYPE (type_node)
3524 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
3525 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
3527 type = copy_type (type);
3528 TREE_TYPE (type) = TREE_TYPE (type_node);
3534 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
3535 transparently converted to each other. */
3538 gnat_types_compatible_p (tree t1, tree t2)
3540 enum tree_code code;
3542 /* This is the default criterion. */
3543 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
3546 /* We only check structural equivalence here. */
3547 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
3550 /* Vector types are also compatible if they have the same number of subparts
3551 and the same form of (scalar) element type. */
3552 if (code == VECTOR_TYPE
3553 && known_eq (TYPE_VECTOR_SUBPARTS (t1), TYPE_VECTOR_SUBPARTS (t2))
3554 && TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2))
3555 && TYPE_PRECISION (TREE_TYPE (t1)) == TYPE_PRECISION (TREE_TYPE (t2)))
3558 /* Array types are also compatible if they are constrained and have the same
3559 domain(s), the same component type and the same scalar storage order. */
3560 if (code == ARRAY_TYPE
3561 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
3562 || (TYPE_DOMAIN (t1)
3564 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
3565 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
3566 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
3567 TYPE_MAX_VALUE (TYPE_DOMAIN (t2)))))
3568 && (TREE_TYPE (t1) == TREE_TYPE (t2)
3569 || (TREE_CODE (TREE_TYPE (t1)) == ARRAY_TYPE
3570 && gnat_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2))))
3571 && TYPE_REVERSE_STORAGE_ORDER (t1) == TYPE_REVERSE_STORAGE_ORDER (t2))
3577 /* Return true if EXPR is a useless type conversion. */
3580 gnat_useless_type_conversion (tree expr)
3582 if (CONVERT_EXPR_P (expr)
3583 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
3584 || TREE_CODE (expr) == NON_LVALUE_EXPR)
3585 return gnat_types_compatible_p (TREE_TYPE (expr),
3586 TREE_TYPE (TREE_OPERAND (expr, 0)));
3591 /* Return true if T, a {FUNCTION,METHOD}_TYPE, has the specified flags. */
3594 fntype_same_flags_p (const_tree t, tree cico_list, bool return_unconstrained_p,
3595 bool return_by_direct_ref_p, bool return_by_invisi_ref_p)
3597 return TYPE_CI_CO_LIST (t) == cico_list
3598 && TYPE_RETURN_UNCONSTRAINED_P (t) == return_unconstrained_p
3599 && TYPE_RETURN_BY_DIRECT_REF_P (t) == return_by_direct_ref_p
3600 && TREE_ADDRESSABLE (t) == return_by_invisi_ref_p;
3603 /* EXP is an expression for the size of an object. If this size contains
3604 discriminant references, replace them with the maximum (if MAX_P) or
3605 minimum (if !MAX_P) possible value of the discriminant. */
3608 max_size (tree exp, bool max_p)
3610 enum tree_code code = TREE_CODE (exp);
3611 tree type = TREE_TYPE (exp);
3614 switch (TREE_CODE_CLASS (code))
3616 case tcc_declaration:
3620 case tcc_exceptional:
3621 gcc_assert (code == SSA_NAME);
3625 if (code == CALL_EXPR)
3630 t = maybe_inline_call_in_expr (exp);
3632 return max_size (t, max_p);
3634 n = call_expr_nargs (exp);
3636 argarray = XALLOCAVEC (tree, n);
3637 for (i = 0; i < n; i++)
3638 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
3639 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
3644 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
3645 modify. Otherwise, we treat it like a variable. */
3646 if (CONTAINS_PLACEHOLDER_P (exp))
3648 tree val_type = TREE_TYPE (TREE_OPERAND (exp, 1));
3649 tree val = (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type));
3652 max_size (convert (get_base_type (val_type), val), true));
3657 case tcc_comparison:
3658 return build_int_cst (type, max_p ? 1 : 0);
3661 if (code == NON_LVALUE_EXPR)
3662 return max_size (TREE_OPERAND (exp, 0), max_p);
3664 op0 = max_size (TREE_OPERAND (exp, 0),
3665 code == NEGATE_EXPR ? !max_p : max_p);
3667 if (op0 == TREE_OPERAND (exp, 0))
3670 return fold_build1 (code, type, op0);
3674 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
3675 tree rhs = max_size (TREE_OPERAND (exp, 1),
3676 code == MINUS_EXPR ? !max_p : max_p);
3678 /* Special-case wanting the maximum value of a MIN_EXPR.
3679 In that case, if one side overflows, return the other. */
3680 if (max_p && code == MIN_EXPR)
3682 if (TREE_CODE (rhs) == INTEGER_CST && TREE_OVERFLOW (rhs))
3685 if (TREE_CODE (lhs) == INTEGER_CST && TREE_OVERFLOW (lhs))
3689 /* Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
3690 overflowing and the RHS a variable. */
3691 if ((code == MINUS_EXPR || code == PLUS_EXPR)
3692 && TREE_CODE (lhs) == INTEGER_CST
3693 && TREE_OVERFLOW (lhs)
3694 && TREE_CODE (rhs) != INTEGER_CST)
3697 /* If we are going to subtract a "negative" value in an unsigned type,
3698 do the operation as an addition of the negated value, in order to
3699 avoid creating a spurious overflow below. */
3700 if (code == MINUS_EXPR
3701 && TYPE_UNSIGNED (type)
3702 && TREE_CODE (rhs) == INTEGER_CST
3703 && !TREE_OVERFLOW (rhs)
3704 && tree_int_cst_sign_bit (rhs) != 0)
3706 rhs = fold_build1 (NEGATE_EXPR, type, rhs);
3710 if (lhs == TREE_OPERAND (exp, 0) && rhs == TREE_OPERAND (exp, 1))
3713 /* We need to detect overflows so we call size_binop here. */
3714 return size_binop (code, lhs, rhs);
3717 case tcc_expression:
3718 switch (TREE_CODE_LENGTH (code))
3721 if (code == SAVE_EXPR)
3724 op0 = max_size (TREE_OPERAND (exp, 0),
3725 code == TRUTH_NOT_EXPR ? !max_p : max_p);
3727 if (op0 == TREE_OPERAND (exp, 0))
3730 return fold_build1 (code, type, op0);
3733 if (code == COMPOUND_EXPR)
3734 return max_size (TREE_OPERAND (exp, 1), max_p);
3736 op0 = max_size (TREE_OPERAND (exp, 0), max_p);
3737 op1 = max_size (TREE_OPERAND (exp, 1), max_p);
3739 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3742 return fold_build2 (code, type, op0, op1);
3745 if (code == COND_EXPR)
3747 op1 = TREE_OPERAND (exp, 1);
3748 op2 = TREE_OPERAND (exp, 2);
3754 fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
3755 max_size (op1, max_p), max_size (op2, max_p));
3763 /* Other tree classes cannot happen. */
3771 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
3772 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
3773 Return a constructor for the template. */
3776 build_template (tree template_type, tree array_type, tree expr)
3778 vec<constructor_elt, va_gc> *template_elts = NULL;
3779 tree bound_list = NULL_TREE;
3782 while (TREE_CODE (array_type) == RECORD_TYPE
3783 && (TYPE_PADDING_P (array_type)
3784 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
3785 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
3787 if (TREE_CODE (array_type) == ARRAY_TYPE
3788 || (TREE_CODE (array_type) == INTEGER_TYPE
3789 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
3790 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
3792 /* First make the list for a CONSTRUCTOR for the template. Go down the
3793 field list of the template instead of the type chain because this
3794 array might be an Ada array of arrays and we can't tell where the
3795 nested arrays stop being the underlying object. */
3797 for (field = TYPE_FIELDS (template_type); field;
3799 ? (bound_list = TREE_CHAIN (bound_list))
3800 : (array_type = TREE_TYPE (array_type))),
3801 field = DECL_CHAIN (DECL_CHAIN (field)))
3803 tree bounds, min, max;
3805 /* If we have a bound list, get the bounds from there. Likewise
3806 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
3807 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
3808 This will give us a maximum range. */
3810 bounds = TREE_VALUE (bound_list);
3811 else if (TREE_CODE (array_type) == ARRAY_TYPE)
3812 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
3813 else if (expr && TREE_CODE (expr) == PARM_DECL
3814 && DECL_BY_COMPONENT_PTR_P (expr))
3815 bounds = TREE_TYPE (field);
3819 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
3820 max = convert (TREE_TYPE (DECL_CHAIN (field)), TYPE_MAX_VALUE (bounds));
3822 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
3823 substitute it from OBJECT. */
3824 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
3825 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
3827 CONSTRUCTOR_APPEND_ELT (template_elts, field, min);
3828 CONSTRUCTOR_APPEND_ELT (template_elts, DECL_CHAIN (field), max);
3831 return gnat_build_constructor (template_type, template_elts);
3834 /* Return true if TYPE is suitable for the element type of a vector. */
3837 type_for_vector_element_p (tree type)
3841 if (!INTEGRAL_TYPE_P (type)
3842 && !SCALAR_FLOAT_TYPE_P (type)
3843 && !FIXED_POINT_TYPE_P (type))
3846 mode = TYPE_MODE (type);
3847 if (GET_MODE_CLASS (mode) != MODE_INT
3848 && !SCALAR_FLOAT_MODE_P (mode)
3849 && !ALL_SCALAR_FIXED_POINT_MODE_P (mode))
3855 /* Return a vector type given the SIZE and the INNER_TYPE, or NULL_TREE if
3856 this is not possible. If ATTRIBUTE is non-zero, we are processing the
3857 attribute declaration and want to issue error messages on failure. */
3860 build_vector_type_for_size (tree inner_type, tree size, tree attribute)
3862 unsigned HOST_WIDE_INT size_int, inner_size_int;
3865 /* Silently punt on variable sizes. We can't make vector types for them,
3866 need to ignore them on front-end generated subtypes of unconstrained
3867 base types, and this attribute is for binding implementors, not end
3868 users, so we should never get there from legitimate explicit uses. */
3869 if (!tree_fits_uhwi_p (size))
3871 size_int = tree_to_uhwi (size);
3873 if (!type_for_vector_element_p (inner_type))
3876 error ("invalid element type for attribute %qs",
3877 IDENTIFIER_POINTER (attribute));
3880 inner_size_int = tree_to_uhwi (TYPE_SIZE_UNIT (inner_type));
3882 if (size_int % inner_size_int)
3885 error ("vector size not an integral multiple of component size");
3892 error ("zero vector size");
3896 nunits = size_int / inner_size_int;
3897 if (nunits & (nunits - 1))
3900 error ("number of components of vector not a power of two");
3904 return build_vector_type (inner_type, nunits);
3907 /* Return a vector type whose representative array type is ARRAY_TYPE, or
3908 NULL_TREE if this is not possible. If ATTRIBUTE is non-zero, we are
3909 processing the attribute and want to issue error messages on failure. */
3912 build_vector_type_for_array (tree array_type, tree attribute)
3914 tree vector_type = build_vector_type_for_size (TREE_TYPE (array_type),
3915 TYPE_SIZE_UNIT (array_type),
3920 TYPE_REPRESENTATIVE_ARRAY (vector_type) = array_type;
3924 /* Build a type to be used to represent an aliased object whose nominal type
3925 is an unconstrained array. This consists of a RECORD_TYPE containing a
3926 field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an ARRAY_TYPE.
3927 If ARRAY_TYPE is that of an unconstrained array, this is used to represent
3928 an arbitrary unconstrained object. Use NAME as the name of the record.
3929 DEBUG_INFO_P is true if we need to write debug information for the type. */
3932 build_unc_object_type (tree template_type, tree object_type, tree name,
3936 tree type = make_node (RECORD_TYPE);
3938 = create_field_decl (get_identifier ("BOUNDS"), template_type, type,
3939 NULL_TREE, NULL_TREE, 0, 1);
3941 = create_field_decl (get_identifier ("ARRAY"), object_type, type,
3942 NULL_TREE, NULL_TREE, 0, 1);
3944 TYPE_NAME (type) = name;
3945 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
3946 DECL_CHAIN (template_field) = array_field;
3947 finish_record_type (type, template_field, 0, true);
3949 /* Declare it now since it will never be declared otherwise. This is
3950 necessary to ensure that its subtrees are properly marked. */
3951 decl = create_type_decl (name, type, true, debug_info_p, Empty);
3953 /* template_type will not be used elsewhere than here, so to keep the debug
3954 info clean and in order to avoid scoping issues, make decl its
3956 gnat_set_type_context (template_type, decl);
3961 /* Same, taking a thin or fat pointer type instead of a template type. */
3964 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
3965 tree name, bool debug_info_p)
3969 gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
3972 = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type)
3973 ? TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
3974 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
3977 build_unc_object_type (template_type, object_type, name, debug_info_p);
3980 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
3981 In the normal case this is just two adjustments, but we have more to
3982 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
3985 update_pointer_to (tree old_type, tree new_type)
3987 tree ptr = TYPE_POINTER_TO (old_type);
3988 tree ref = TYPE_REFERENCE_TO (old_type);
3991 /* If this is the main variant, process all the other variants first. */
3992 if (TYPE_MAIN_VARIANT (old_type) == old_type)
3993 for (t = TYPE_NEXT_VARIANT (old_type); t; t = TYPE_NEXT_VARIANT (t))
3994 update_pointer_to (t, new_type);
3996 /* If no pointers and no references, we are done. */
4000 /* Merge the old type qualifiers in the new type.
4002 Each old variant has qualifiers for specific reasons, and the new
4003 designated type as well. Each set of qualifiers represents useful
4004 information grabbed at some point, and merging the two simply unifies
4005 these inputs into the final type description.
4007 Consider for instance a volatile type frozen after an access to constant
4008 type designating it; after the designated type's freeze, we get here with
4009 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
4010 when the access type was processed. We will make a volatile and readonly
4011 designated type, because that's what it really is.
4013 We might also get here for a non-dummy OLD_TYPE variant with different
4014 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
4015 to private record type elaboration (see the comments around the call to
4016 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
4017 the qualifiers in those cases too, to avoid accidentally discarding the
4018 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
4020 = build_qualified_type (new_type,
4021 TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
4023 /* If old type and new type are identical, there is nothing to do. */
4024 if (old_type == new_type)
4027 /* Otherwise, first handle the simple case. */
4028 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
4030 tree new_ptr, new_ref;
4032 /* If pointer or reference already points to new type, nothing to do.
4033 This can happen as update_pointer_to can be invoked multiple times
4034 on the same couple of types because of the type variants. */
4035 if ((ptr && TREE_TYPE (ptr) == new_type)
4036 || (ref && TREE_TYPE (ref) == new_type))
4039 /* Chain PTR and its variants at the end. */
4040 new_ptr = TYPE_POINTER_TO (new_type);
4043 while (TYPE_NEXT_PTR_TO (new_ptr))
4044 new_ptr = TYPE_NEXT_PTR_TO (new_ptr);
4045 TYPE_NEXT_PTR_TO (new_ptr) = ptr;
4048 TYPE_POINTER_TO (new_type) = ptr;
4050 /* Now adjust them. */
4051 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
4052 for (t = TYPE_MAIN_VARIANT (ptr); t; t = TYPE_NEXT_VARIANT (t))
4054 TREE_TYPE (t) = new_type;
4055 if (TYPE_NULL_BOUNDS (t))
4056 TREE_TYPE (TREE_OPERAND (TYPE_NULL_BOUNDS (t), 0)) = new_type;
4059 /* Chain REF and its variants at the end. */
4060 new_ref = TYPE_REFERENCE_TO (new_type);
4063 while (TYPE_NEXT_REF_TO (new_ref))
4064 new_ref = TYPE_NEXT_REF_TO (new_ref);
4065 TYPE_NEXT_REF_TO (new_ref) = ref;
4068 TYPE_REFERENCE_TO (new_type) = ref;
4070 /* Now adjust them. */
4071 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
4072 for (t = TYPE_MAIN_VARIANT (ref); t; t = TYPE_NEXT_VARIANT (t))
4073 TREE_TYPE (t) = new_type;
4075 TYPE_POINTER_TO (old_type) = NULL_TREE;
4076 TYPE_REFERENCE_TO (old_type) = NULL_TREE;
4079 /* Now deal with the unconstrained array case. In this case the pointer
4080 is actually a record where both fields are pointers to dummy nodes.
4081 Turn them into pointers to the correct types using update_pointer_to.
4082 Likewise for the pointer to the object record (thin pointer). */
4085 tree new_ptr = TYPE_POINTER_TO (new_type);
4087 gcc_assert (TYPE_IS_FAT_POINTER_P (ptr));
4089 /* If PTR already points to NEW_TYPE, nothing to do. This can happen
4090 since update_pointer_to can be invoked multiple times on the same
4091 couple of types because of the type variants. */
4092 if (TYPE_UNCONSTRAINED_ARRAY (ptr) == new_type)
4096 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
4097 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))));
4100 (TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (ptr)))),
4101 TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (new_ptr)))));
4103 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type),
4104 TYPE_OBJECT_RECORD_TYPE (new_type));
4106 TYPE_POINTER_TO (old_type) = NULL_TREE;
4107 TYPE_REFERENCE_TO (old_type) = NULL_TREE;
4111 /* Convert EXPR, a pointer to a constrained array, into a pointer to an
4112 unconstrained one. This involves making or finding a template. */
4115 convert_to_fat_pointer (tree type, tree expr)
4117 tree template_type = TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))));
4118 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
4119 tree etype = TREE_TYPE (expr);
4121 vec<constructor_elt, va_gc> *v;
4124 /* If EXPR is null, make a fat pointer that contains a null pointer to the
4125 array (compare_fat_pointers ensures that this is the full discriminant)
4126 and a valid pointer to the bounds. This latter property is necessary
4127 since the compiler can hoist the load of the bounds done through it. */
4128 if (integer_zerop (expr))
4130 tree ptr_template_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4131 tree null_bounds, t;
4133 if (TYPE_NULL_BOUNDS (ptr_template_type))
4134 null_bounds = TYPE_NULL_BOUNDS (ptr_template_type);
4137 /* The template type can still be dummy at this point so we build an
4138 empty constructor. The middle-end will fill it in with zeros. */
4139 t = build_constructor (template_type, NULL);
4140 TREE_CONSTANT (t) = TREE_STATIC (t) = 1;
4141 null_bounds = build_unary_op (ADDR_EXPR, NULL_TREE, t);
4142 SET_TYPE_NULL_BOUNDS (ptr_template_type, null_bounds);
4145 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4146 fold_convert (p_array_type, null_pointer_node));
4147 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), null_bounds);
4148 t = build_constructor (type, v);
4149 /* Do not set TREE_CONSTANT so as to force T to static memory. */
4150 TREE_CONSTANT (t) = 0;
4151 TREE_STATIC (t) = 1;
4156 /* If EXPR is a thin pointer, make template and data from the record. */
4157 if (TYPE_IS_THIN_POINTER_P (etype))
4159 tree field = TYPE_FIELDS (TREE_TYPE (etype));
4161 expr = gnat_protect_expr (expr);
4163 /* If we have a TYPE_UNCONSTRAINED_ARRAY attached to the RECORD_TYPE,
4164 the thin pointer value has been shifted so we shift it back to get
4165 the template address. */
4166 if (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)))
4169 = build_binary_op (POINTER_PLUS_EXPR, etype, expr,
4170 fold_build1 (NEGATE_EXPR, sizetype,
4172 (DECL_CHAIN (field))));
4174 = fold_convert (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))),
4178 /* Otherwise we explicitly take the address of the fields. */
4181 expr = build_unary_op (INDIRECT_REF, NULL_TREE, expr);
4183 = build_unary_op (ADDR_EXPR, NULL_TREE,
4184 build_component_ref (expr, field, false));
4185 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
4186 build_component_ref (expr, DECL_CHAIN (field),
4191 /* Otherwise, build the constructor for the template. */
4194 = build_unary_op (ADDR_EXPR, NULL_TREE,
4195 build_template (template_type, TREE_TYPE (etype),
4198 /* The final result is a constructor for the fat pointer.
4200 If EXPR is an argument of a foreign convention subprogram, the type it
4201 points to is directly the component type. In this case, the expression
4202 type may not match the corresponding FIELD_DECL type at this point, so we
4203 call "convert" here to fix that up if necessary. This type consistency is
4204 required, for instance because it ensures that possible later folding of
4205 COMPONENT_REFs against this constructor always yields something of the
4206 same type as the initial reference.
4208 Note that the call to "build_template" above is still fine because it
4209 will only refer to the provided TEMPLATE_TYPE in this case. */
4210 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type), convert (p_array_type, expr));
4211 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), template_addr);
4212 return gnat_build_constructor (type, v);
4215 /* Create an expression whose value is that of EXPR,
4216 converted to type TYPE. The TREE_TYPE of the value
4217 is always TYPE. This function implements all reasonable
4218 conversions; callers should filter out those that are
4219 not permitted by the language being compiled. */
4222 convert (tree type, tree expr)
4224 tree etype = TREE_TYPE (expr);
4225 enum tree_code ecode = TREE_CODE (etype);
4226 enum tree_code code = TREE_CODE (type);
4228 /* If the expression is already of the right type, we are done. */
4232 /* If both input and output have padding and are of variable size, do this
4233 as an unchecked conversion. Likewise if one is a mere variant of the
4234 other, so we avoid a pointless unpad/repad sequence. */
4235 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
4236 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4237 && (!TREE_CONSTANT (TYPE_SIZE (type))
4238 || !TREE_CONSTANT (TYPE_SIZE (etype))
4239 || TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
4240 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
4241 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
4244 /* If the output type has padding, convert to the inner type and make a
4245 constructor to build the record, unless a variable size is involved. */
4246 else if (code == RECORD_TYPE && TYPE_PADDING_P (type))
4248 /* If we previously converted from another type and our type is
4249 of variable size, remove the conversion to avoid the need for
4250 variable-sized temporaries. Likewise for a conversion between
4251 original and packable version. */
4252 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4253 && (!TREE_CONSTANT (TYPE_SIZE (type))
4254 || (ecode == RECORD_TYPE
4255 && TYPE_NAME (etype)
4256 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
4257 expr = TREE_OPERAND (expr, 0);
4259 /* If we are just removing the padding from expr, convert the original
4260 object if we have variable size in order to avoid the need for some
4261 variable-sized temporaries. Likewise if the padding is a variant
4262 of the other, so we avoid a pointless unpad/repad sequence. */
4263 if (TREE_CODE (expr) == COMPONENT_REF
4264 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
4265 && (!TREE_CONSTANT (TYPE_SIZE (type))
4266 || TYPE_MAIN_VARIANT (type)
4267 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr, 0)))
4268 || (ecode == RECORD_TYPE
4269 && TYPE_NAME (etype)
4270 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
4271 return convert (type, TREE_OPERAND (expr, 0));
4273 /* If the inner type is of self-referential size and the expression type
4274 is a record, do this as an unchecked conversion unless both types are
4275 essentially the same. But first pad the expression if possible to
4276 have the same size on both sides. */
4277 if (ecode == RECORD_TYPE
4278 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type)))
4279 && TYPE_MAIN_VARIANT (etype)
4280 != TYPE_MAIN_VARIANT (TREE_TYPE (TYPE_FIELDS (type))))
4282 if (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST)
4283 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4284 false, false, false, true),
4286 return unchecked_convert (type, expr, false);
4289 /* If we are converting between array types with variable size, do the
4290 final conversion as an unchecked conversion, again to avoid the need
4291 for some variable-sized temporaries. If valid, this conversion is
4292 very likely purely technical and without real effects. */
4293 if (ecode == ARRAY_TYPE
4294 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == ARRAY_TYPE
4295 && !TREE_CONSTANT (TYPE_SIZE (etype))
4296 && !TREE_CONSTANT (TYPE_SIZE (type)))
4297 return unchecked_convert (type,
4298 convert (TREE_TYPE (TYPE_FIELDS (type)),
4302 tree t = convert (TREE_TYPE (TYPE_FIELDS (type)), expr);
4304 /* If converting to the inner type has already created a CONSTRUCTOR with
4305 the right size, then reuse it instead of creating another one. This
4306 can happen for the padding type built to overalign local variables. */
4307 if (TREE_CODE (t) == VIEW_CONVERT_EXPR
4308 && TREE_CODE (TREE_OPERAND (t, 0)) == CONSTRUCTOR
4309 && TREE_CONSTANT (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (t, 0))))
4310 && tree_int_cst_equal (TYPE_SIZE (type),
4311 TYPE_SIZE (TREE_TYPE (TREE_OPERAND (t, 0)))))
4312 return build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (t, 0));
4314 vec<constructor_elt, va_gc> *v;
4316 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type), t);
4317 return gnat_build_constructor (type, v);
4320 /* If the input type has padding, remove it and convert to the output type.
4321 The conditions ordering is arranged to ensure that the output type is not
4322 a padding type here, as it is not clear whether the conversion would
4323 always be correct if this was to happen. */
4324 else if (ecode == RECORD_TYPE && TYPE_PADDING_P (etype))
4328 /* If we have just converted to this padded type, just get the
4329 inner expression. */
4330 if (TREE_CODE (expr) == CONSTRUCTOR)
4331 unpadded = CONSTRUCTOR_ELT (expr, 0)->value;
4333 /* Otherwise, build an explicit component reference. */
4335 unpadded = build_component_ref (expr, TYPE_FIELDS (etype), false);
4337 return convert (type, unpadded);
4340 /* If the input is a biased type, convert first to the base type and add
4341 the bias. Note that the bias must go through a full conversion to the
4342 base type, lest it is itself a biased value; this happens for subtypes
4344 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4345 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
4346 fold_convert (TREE_TYPE (etype), expr),
4347 convert (TREE_TYPE (etype),
4348 TYPE_MIN_VALUE (etype))));
4350 /* If the input is a justified modular type, we need to extract the actual
4351 object before converting it to any other type with the exceptions of an
4352 unconstrained array or of a mere type variant. It is useful to avoid the
4353 extraction and conversion in the type variant case because it could end
4354 up replacing a VAR_DECL expr by a constructor and we might be about the
4355 take the address of the result. */
4356 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
4357 && code != UNCONSTRAINED_ARRAY_TYPE
4358 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
4360 convert (type, build_component_ref (expr, TYPE_FIELDS (etype), false));
4362 /* If converting to a type that contains a template, convert to the data
4363 type and then build the template. */
4364 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
4366 tree obj_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4367 vec<constructor_elt, va_gc> *v;
4370 /* If the source already has a template, get a reference to the
4371 associated array only, as we are going to rebuild a template
4372 for the target type anyway. */
4373 expr = maybe_unconstrained_array (expr);
4375 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4376 build_template (TREE_TYPE (TYPE_FIELDS (type)),
4377 obj_type, NULL_TREE));
4379 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
4380 convert (obj_type, expr));
4381 return gnat_build_constructor (type, v);
4384 /* There are some cases of expressions that we process specially. */
4385 switch (TREE_CODE (expr))
4391 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
4392 conversion in gnat_expand_expr. NULL_EXPR does not represent
4393 and actual value, so no conversion is needed. */
4394 expr = copy_node (expr);
4395 TREE_TYPE (expr) = type;
4399 /* If we are converting a STRING_CST to another constrained array type,
4400 just make a new one in the proper type. */
4401 if (code == ecode && AGGREGATE_TYPE_P (etype)
4402 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
4403 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
4405 expr = copy_node (expr);
4406 TREE_TYPE (expr) = type;
4412 /* If we are converting a VECTOR_CST to a mere type variant, just make
4413 a new one in the proper type. */
4414 if (code == ecode && gnat_types_compatible_p (type, etype))
4416 expr = copy_node (expr);
4417 TREE_TYPE (expr) = type;
4423 /* If we are converting a CONSTRUCTOR to a mere type variant, or to
4424 another padding type around the same type, just make a new one in
4427 && (gnat_types_compatible_p (type, etype)
4428 || (code == RECORD_TYPE
4429 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4430 && TREE_TYPE (TYPE_FIELDS (type))
4431 == TREE_TYPE (TYPE_FIELDS (etype)))))
4433 expr = copy_node (expr);
4434 TREE_TYPE (expr) = type;
4435 CONSTRUCTOR_ELTS (expr) = vec_safe_copy (CONSTRUCTOR_ELTS (expr));
4439 /* Likewise for a conversion between original and packable version, or
4440 conversion between types of the same size and with the same list of
4441 fields, but we have to work harder to preserve type consistency. */
4443 && code == RECORD_TYPE
4444 && (TYPE_NAME (type) == TYPE_NAME (etype)
4445 || tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (etype))))
4448 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4449 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4450 vec<constructor_elt, va_gc> *v;
4452 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
4453 unsigned HOST_WIDE_INT idx;
4456 /* Whether we need to clear TREE_CONSTANT et al. on the output
4457 constructor when we convert in place. */
4458 bool clear_constant = false;
4460 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
4462 /* Skip the missing fields in the CONSTRUCTOR. */
4463 while (efield && field && !SAME_FIELD_P (efield, index))
4465 efield = DECL_CHAIN (efield);
4466 field = DECL_CHAIN (field);
4468 /* The field must be the same. */
4469 if (!(efield && field && SAME_FIELD_P (efield, field)))
4472 = {field, convert (TREE_TYPE (field), value)};
4473 v->quick_push (elt);
4475 /* If packing has made this field a bitfield and the input
4476 value couldn't be emitted statically any more, we need to
4477 clear TREE_CONSTANT on our output. */
4479 && TREE_CONSTANT (expr)
4480 && !CONSTRUCTOR_BITFIELD_P (efield)
4481 && CONSTRUCTOR_BITFIELD_P (field)
4482 && !initializer_constant_valid_for_bitfield_p (value))
4483 clear_constant = true;
4485 efield = DECL_CHAIN (efield);
4486 field = DECL_CHAIN (field);
4489 /* If we have been able to match and convert all the input fields
4490 to their output type, convert in place now. We'll fallback to a
4491 view conversion downstream otherwise. */
4494 expr = copy_node (expr);
4495 TREE_TYPE (expr) = type;
4496 CONSTRUCTOR_ELTS (expr) = v;
4498 TREE_CONSTANT (expr) = TREE_STATIC (expr) = 0;
4503 /* Likewise for a conversion between array type and vector type with a
4504 compatible representative array. */
4505 else if (code == VECTOR_TYPE
4506 && ecode == ARRAY_TYPE
4507 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4510 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4511 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4512 vec<constructor_elt, va_gc> *v;
4513 unsigned HOST_WIDE_INT ix;
4516 /* Build a VECTOR_CST from a *constant* array constructor. */
4517 if (TREE_CONSTANT (expr))
4519 bool constant_p = true;
4521 /* Iterate through elements and check if all constructor
4522 elements are *_CSTs. */
4523 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4524 if (!CONSTANT_CLASS_P (value))
4531 return build_vector_from_ctor (type,
4532 CONSTRUCTOR_ELTS (expr));
4535 /* Otherwise, build a regular vector constructor. */
4537 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4539 constructor_elt elt = {NULL_TREE, value};
4540 v->quick_push (elt);
4542 expr = copy_node (expr);
4543 TREE_TYPE (expr) = type;
4544 CONSTRUCTOR_ELTS (expr) = v;
4549 case UNCONSTRAINED_ARRAY_REF:
4550 /* First retrieve the underlying array. */
4551 expr = maybe_unconstrained_array (expr);
4552 etype = TREE_TYPE (expr);
4553 ecode = TREE_CODE (etype);
4556 case VIEW_CONVERT_EXPR:
4558 /* GCC 4.x is very sensitive to type consistency overall, and view
4559 conversions thus are very frequent. Even though just "convert"ing
4560 the inner operand to the output type is fine in most cases, it
4561 might expose unexpected input/output type mismatches in special
4562 circumstances so we avoid such recursive calls when we can. */
4563 tree op0 = TREE_OPERAND (expr, 0);
4565 /* If we are converting back to the original type, we can just
4566 lift the input conversion. This is a common occurrence with
4567 switches back-and-forth amongst type variants. */
4568 if (type == TREE_TYPE (op0))
4571 /* Otherwise, if we're converting between two aggregate or vector
4572 types, we might be allowed to substitute the VIEW_CONVERT_EXPR
4573 target type in place or to just convert the inner expression. */
4574 if ((AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
4575 || (VECTOR_TYPE_P (type) && VECTOR_TYPE_P (etype)))
4577 /* If we are converting between mere variants, we can just
4578 substitute the VIEW_CONVERT_EXPR in place. */
4579 if (gnat_types_compatible_p (type, etype))
4580 return build1 (VIEW_CONVERT_EXPR, type, op0);
4582 /* Otherwise, we may just bypass the input view conversion unless
4583 one of the types is a fat pointer, which is handled by
4584 specialized code below which relies on exact type matching. */
4585 else if (!TYPE_IS_FAT_POINTER_P (type)
4586 && !TYPE_IS_FAT_POINTER_P (etype))
4587 return convert (type, op0);
4597 /* Check for converting to a pointer to an unconstrained array. */
4598 if (TYPE_IS_FAT_POINTER_P (type) && !TYPE_IS_FAT_POINTER_P (etype))
4599 return convert_to_fat_pointer (type, expr);
4601 /* If we are converting between two aggregate or vector types that are mere
4602 variants, just make a VIEW_CONVERT_EXPR. Likewise when we are converting
4603 to a vector type from its representative array type. */
4604 else if ((code == ecode
4605 && (AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type))
4606 && gnat_types_compatible_p (type, etype))
4607 || (code == VECTOR_TYPE
4608 && ecode == ARRAY_TYPE
4609 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4611 return build1 (VIEW_CONVERT_EXPR, type, expr);
4613 /* If we are converting between tagged types, try to upcast properly.
4614 But don't do it if we are just annotating types since tagged types
4615 aren't fully laid out in this mode. */
4616 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4617 && TYPE_ALIGN_OK (etype) && TYPE_ALIGN_OK (type)
4618 && !type_annotate_only)
4620 tree child_etype = etype;
4622 tree field = TYPE_FIELDS (child_etype);
4623 if (DECL_NAME (field) == parent_name_id && TREE_TYPE (field) == type)
4624 return build_component_ref (expr, field, false);
4625 child_etype = TREE_TYPE (field);
4626 } while (TREE_CODE (child_etype) == RECORD_TYPE);
4629 /* If we are converting from a smaller form of record type back to it, just
4630 make a VIEW_CONVERT_EXPR. But first pad the expression to have the same
4631 size on both sides. */
4632 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4633 && smaller_form_type_p (etype, type))
4635 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4636 false, false, false, true),
4638 return build1 (VIEW_CONVERT_EXPR, type, expr);
4641 /* In all other cases of related types, make a NOP_EXPR. */
4642 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
4643 return fold_convert (type, expr);
4648 return fold_build1 (CONVERT_EXPR, type, expr);
4651 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4652 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4653 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4654 return unchecked_convert (type, expr, false);
4656 /* If the output is a biased type, convert first to the base type and
4657 subtract the bias. Note that the bias itself must go through a full
4658 conversion to the base type, lest it is a biased value; this happens
4659 for subtypes of biased types. */
4660 if (TYPE_BIASED_REPRESENTATION_P (type))
4661 return fold_convert (type,
4662 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4663 convert (TREE_TYPE (type), expr),
4664 convert (TREE_TYPE (type),
4665 TYPE_MIN_VALUE (type))));
4667 /* ... fall through ... */
4671 /* If we are converting an additive expression to an integer type
4672 with lower precision, be wary of the optimization that can be
4673 applied by convert_to_integer. There are 2 problematic cases:
4674 - if the first operand was originally of a biased type,
4675 because we could be recursively called to convert it
4676 to an intermediate type and thus rematerialize the
4677 additive operator endlessly,
4678 - if the expression contains a placeholder, because an
4679 intermediate conversion that changes the sign could
4680 be inserted and thus introduce an artificial overflow
4681 at compile time when the placeholder is substituted. */
4682 if (code == INTEGER_TYPE
4683 && ecode == INTEGER_TYPE
4684 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4685 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4687 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4689 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4690 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4691 || CONTAINS_PLACEHOLDER_P (expr))
4692 return build1 (NOP_EXPR, type, expr);
4695 return fold (convert_to_integer (type, expr));
4698 case REFERENCE_TYPE:
4699 /* If converting between two thin pointers, adjust if needed to account
4700 for differing offsets from the base pointer, depending on whether
4701 there is a TYPE_UNCONSTRAINED_ARRAY attached to the record type. */
4702 if (TYPE_IS_THIN_POINTER_P (etype) && TYPE_IS_THIN_POINTER_P (type))
4705 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype))
4706 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (etype))))
4709 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))
4710 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (type))))
4712 tree byte_diff = size_diffop (type_pos, etype_pos);
4714 expr = build1 (NOP_EXPR, type, expr);
4715 if (integer_zerop (byte_diff))
4718 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
4719 fold_convert (sizetype, byte_diff));
4722 /* If converting fat pointer to normal or thin pointer, get the pointer
4723 to the array and then convert it. */
4724 if (TYPE_IS_FAT_POINTER_P (etype))
4725 expr = build_component_ref (expr, TYPE_FIELDS (etype), false);
4727 return fold (convert_to_pointer (type, expr));
4730 return fold (convert_to_real (type, expr));
4733 /* Do a normal conversion between scalar and justified modular type. */
4734 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
4736 vec<constructor_elt, va_gc> *v;
4739 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4740 convert (TREE_TYPE (TYPE_FIELDS (type)),
4742 return gnat_build_constructor (type, v);
4745 /* In these cases, assume the front-end has validated the conversion.
4746 If the conversion is valid, it will be a bit-wise conversion, so
4747 it can be viewed as an unchecked conversion. */
4748 return unchecked_convert (type, expr, false);
4751 /* Do a normal conversion between unconstrained and constrained array
4752 type, assuming the latter is a constrained version of the former. */
4753 if (TREE_CODE (expr) == INDIRECT_REF
4754 && ecode == ARRAY_TYPE
4755 && TREE_TYPE (etype) == TREE_TYPE (type))
4757 tree ptr_type = build_pointer_type (type);
4758 tree t = build_unary_op (INDIRECT_REF, NULL_TREE,
4759 fold_convert (ptr_type,
4760 TREE_OPERAND (expr, 0)));
4761 TREE_READONLY (t) = TREE_READONLY (expr);
4762 TREE_THIS_NOTRAP (t) = TREE_THIS_NOTRAP (expr);
4766 /* In these cases, assume the front-end has validated the conversion.
4767 If the conversion is valid, it will be a bit-wise conversion, so
4768 it can be viewed as an unchecked conversion. */
4769 return unchecked_convert (type, expr, false);
4772 /* This is a either a conversion between a tagged type and some
4773 subtype, which we have to mark as a UNION_TYPE because of
4774 overlapping fields or a conversion of an Unchecked_Union. */
4775 return unchecked_convert (type, expr, false);
4777 case UNCONSTRAINED_ARRAY_TYPE:
4778 /* If the input is a VECTOR_TYPE, convert to the representative
4779 array type first. */
4780 if (ecode == VECTOR_TYPE)
4782 expr = convert (TYPE_REPRESENTATIVE_ARRAY (etype), expr);
4783 etype = TREE_TYPE (expr);
4784 ecode = TREE_CODE (etype);
4787 /* If EXPR is a constrained array, take its address, convert it to a
4788 fat pointer, and then dereference it. Likewise if EXPR is a
4789 record containing both a template and a constrained array.
4790 Note that a record representing a justified modular type
4791 always represents a packed constrained array. */
4792 if (ecode == ARRAY_TYPE
4793 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4794 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4795 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4798 (INDIRECT_REF, NULL_TREE,
4799 convert_to_fat_pointer (TREE_TYPE (type),
4800 build_unary_op (ADDR_EXPR,
4803 /* Do something very similar for converting one unconstrained
4804 array to another. */
4805 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
4807 build_unary_op (INDIRECT_REF, NULL_TREE,
4808 convert (TREE_TYPE (type),
4809 build_unary_op (ADDR_EXPR,
4815 return fold (convert_to_complex (type, expr));
4822 /* Create an expression whose value is that of EXPR converted to the common
4823 index type, which is sizetype. EXPR is supposed to be in the base type
4824 of the GNAT index type. Calling it is equivalent to doing
4826 convert (sizetype, expr)
4828 but we try to distribute the type conversion with the knowledge that EXPR
4829 cannot overflow in its type. This is a best-effort approach and we fall
4830 back to the above expression as soon as difficulties are encountered.
4832 This is necessary to overcome issues that arise when the GNAT base index
4833 type and the GCC common index type (sizetype) don't have the same size,
4834 which is quite frequent on 64-bit architectures. In this case, and if
4835 the GNAT base index type is signed but the iteration type of the loop has
4836 been forced to unsigned, the loop scalar evolution engine cannot compute
4837 a simple evolution for the general induction variables associated with the
4838 array indices, because it will preserve the wrap-around semantics in the
4839 unsigned type of their "inner" part. As a result, many loop optimizations
4842 The solution is to use a special (basic) induction variable that is at
4843 least as large as sizetype, and to express the aforementioned general
4844 induction variables in terms of this induction variable, eliminating
4845 the problematic intermediate truncation to the GNAT base index type.
4846 This is possible as long as the original expression doesn't overflow
4847 and if the middle-end hasn't introduced artificial overflows in the
4848 course of the various simplification it can make to the expression. */
4851 convert_to_index_type (tree expr)
4853 enum tree_code code = TREE_CODE (expr);
4854 tree type = TREE_TYPE (expr);
4856 /* If the type is unsigned, overflow is allowed so we cannot be sure that
4857 EXPR doesn't overflow. Keep it simple if optimization is disabled. */
4858 if (TYPE_UNSIGNED (type) || !optimize)
4859 return convert (sizetype, expr);
4864 /* The main effect of the function: replace a loop parameter with its
4865 associated special induction variable. */
4866 if (DECL_LOOP_PARM_P (expr) && DECL_INDUCTION_VAR (expr))
4867 expr = DECL_INDUCTION_VAR (expr);
4872 tree otype = TREE_TYPE (TREE_OPERAND (expr, 0));
4873 /* Bail out as soon as we suspect some sort of type frobbing. */
4874 if (TYPE_PRECISION (type) != TYPE_PRECISION (otype)
4875 || TYPE_UNSIGNED (type) != TYPE_UNSIGNED (otype))
4879 /* ... fall through ... */
4881 case NON_LVALUE_EXPR:
4882 return fold_build1 (code, sizetype,
4883 convert_to_index_type (TREE_OPERAND (expr, 0)));
4888 return fold_build2 (code, sizetype,
4889 convert_to_index_type (TREE_OPERAND (expr, 0)),
4890 convert_to_index_type (TREE_OPERAND (expr, 1)));
4893 return fold_build2 (code, sizetype, TREE_OPERAND (expr, 0),
4894 convert_to_index_type (TREE_OPERAND (expr, 1)));
4897 return fold_build3 (code, sizetype, TREE_OPERAND (expr, 0),
4898 convert_to_index_type (TREE_OPERAND (expr, 1)),
4899 convert_to_index_type (TREE_OPERAND (expr, 2)));
4905 return convert (sizetype, expr);
4908 /* Remove all conversions that are done in EXP. This includes converting
4909 from a padded type or to a justified modular type. If TRUE_ADDRESS
4910 is true, always return the address of the containing object even if
4911 the address is not bit-aligned. */
4914 remove_conversions (tree exp, bool true_address)
4916 switch (TREE_CODE (exp))
4920 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4921 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
4923 remove_conversions (CONSTRUCTOR_ELT (exp, 0)->value, true);
4927 if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
4928 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4932 case VIEW_CONVERT_EXPR:
4933 case NON_LVALUE_EXPR:
4934 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4943 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
4944 refers to the underlying array. If it has TYPE_CONTAINS_TEMPLATE_P,
4945 likewise return an expression pointing to the underlying array. */
4948 maybe_unconstrained_array (tree exp)
4950 enum tree_code code = TREE_CODE (exp);
4951 tree type = TREE_TYPE (exp);
4953 switch (TREE_CODE (type))
4955 case UNCONSTRAINED_ARRAY_TYPE:
4956 if (code == UNCONSTRAINED_ARRAY_REF)
4958 const bool read_only = TREE_READONLY (exp);
4959 const bool no_trap = TREE_THIS_NOTRAP (exp);
4961 exp = TREE_OPERAND (exp, 0);
4962 type = TREE_TYPE (exp);
4964 if (TREE_CODE (exp) == COND_EXPR)
4967 = build_unary_op (INDIRECT_REF, NULL_TREE,
4968 build_component_ref (TREE_OPERAND (exp, 1),
4972 = build_unary_op (INDIRECT_REF, NULL_TREE,
4973 build_component_ref (TREE_OPERAND (exp, 2),
4977 exp = build3 (COND_EXPR,
4978 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))),
4979 TREE_OPERAND (exp, 0), op1, op2);
4983 exp = build_unary_op (INDIRECT_REF, NULL_TREE,
4984 build_component_ref (exp,
4987 TREE_READONLY (exp) = read_only;
4988 TREE_THIS_NOTRAP (exp) = no_trap;
4992 else if (code == NULL_EXPR)
4993 exp = build1 (NULL_EXPR,
4994 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type)))),
4995 TREE_OPERAND (exp, 0));
4999 /* If this is a padded type and it contains a template, convert to the
5000 unpadded type first. */
5001 if (TYPE_PADDING_P (type)
5002 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
5003 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
5005 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
5006 code = TREE_CODE (exp);
5007 type = TREE_TYPE (exp);
5010 if (TYPE_CONTAINS_TEMPLATE_P (type))
5012 /* If the array initializer is a box, return NULL_TREE. */
5013 if (code == CONSTRUCTOR && CONSTRUCTOR_NELTS (exp) < 2)
5016 exp = build_component_ref (exp, DECL_CHAIN (TYPE_FIELDS (type)),
5018 type = TREE_TYPE (exp);
5020 /* If the array type is padded, convert to the unpadded type. */
5021 if (TYPE_IS_PADDING_P (type))
5022 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
5033 /* Return true if EXPR is an expression that can be folded as an operand
5034 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
5037 can_fold_for_view_convert_p (tree expr)
5041 /* The folder will fold NOP_EXPRs between integral types with the same
5042 precision (in the middle-end's sense). We cannot allow it if the
5043 types don't have the same precision in the Ada sense as well. */
5044 if (TREE_CODE (expr) != NOP_EXPR)
5047 t1 = TREE_TYPE (expr);
5048 t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
5050 /* Defer to the folder for non-integral conversions. */
5051 if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
5054 /* Only fold conversions that preserve both precisions. */
5055 if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
5056 && operand_equal_p (rm_size (t1), rm_size (t2), 0))
5062 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
5063 If NOTRUNC_P is true, truncation operations should be suppressed.
5065 Special care is required with (source or target) integral types whose
5066 precision is not equal to their size, to make sure we fetch or assign
5067 the value bits whose location might depend on the endianness, e.g.
5069 Rmsize : constant := 8;
5070 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
5072 type Bit_Array is array (1 .. Rmsize) of Boolean;
5073 pragma Pack (Bit_Array);
5075 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
5077 Value : Int := 2#1000_0001#;
5078 Vbits : Bit_Array := To_Bit_Array (Value);
5080 we expect the 8 bits at Vbits'Address to always contain Value, while
5081 their original location depends on the endianness, at Value'Address
5082 on a little-endian architecture but not on a big-endian one.
5084 One pitfall is that we cannot use TYPE_UNSIGNED directly to decide how
5085 the bits between the precision and the size are filled, because of the
5086 trick used in the E_Signed_Integer_Subtype case of gnat_to_gnu_entity.
5087 So we use the special predicate type_unsigned_for_rm above. */
5090 unchecked_convert (tree type, tree expr, bool notrunc_p)
5092 tree etype = TREE_TYPE (expr);
5093 enum tree_code ecode = TREE_CODE (etype);
5094 enum tree_code code = TREE_CODE (type);
5096 = (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype));
5098 = (code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type));
5100 = (AGGREGATE_TYPE_P (etype) && TYPE_REVERSE_STORAGE_ORDER (etype));
5102 = (AGGREGATE_TYPE_P (type) && TYPE_REVERSE_STORAGE_ORDER (type));
5106 /* If the expression is already of the right type, we are done. */
5110 /* If both types are integral just do a normal conversion.
5111 Likewise for a conversion to an unconstrained array. */
5112 if (((INTEGRAL_TYPE_P (type)
5113 || (POINTER_TYPE_P (type) && !TYPE_IS_THIN_POINTER_P (type))
5114 || (code == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (type)))
5115 && (INTEGRAL_TYPE_P (etype)
5116 || (POINTER_TYPE_P (etype) && !TYPE_IS_THIN_POINTER_P (etype))
5117 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype))))
5118 || code == UNCONSTRAINED_ARRAY_TYPE)
5122 tree ntype = copy_type (etype);
5123 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
5124 TYPE_MAIN_VARIANT (ntype) = ntype;
5125 expr = build1 (NOP_EXPR, ntype, expr);
5130 tree rtype = copy_type (type);
5131 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
5132 TYPE_MAIN_VARIANT (rtype) = rtype;
5133 expr = convert (rtype, expr);
5134 expr = build1 (NOP_EXPR, type, expr);
5137 expr = convert (type, expr);
5140 /* If we are converting to an integral type whose precision is not equal
5141 to its size, first unchecked convert to a record type that contains a
5142 field of the given precision. Then extract the result from the field.
5144 There is a subtlety if the source type is an aggregate type with reverse
5145 storage order because its representation is not contiguous in the native
5146 storage order, i.e. a direct unchecked conversion to an integral type
5147 with N bits of precision cannot read the first N bits of the aggregate
5148 type. To overcome it, we do an unchecked conversion to an integral type
5149 with reverse storage order and return the resulting value. This also
5150 ensures that the result of the unchecked conversion doesn't depend on
5151 the endianness of the target machine, but only on the storage order of
5154 Finally, for the sake of consistency, we do the unchecked conversion
5155 to an integral type with reverse storage order as soon as the source
5156 type is an aggregate type with reverse storage order, even if there
5157 are no considerations of precision or size involved. Ultimately, we
5158 further extend this processing to any scalar type. */
5159 else if ((INTEGRAL_TYPE_P (type)
5160 && TYPE_RM_SIZE (type)
5161 && ((c = tree_int_cst_compare (TYPE_RM_SIZE (type),
5162 TYPE_SIZE (type))) < 0
5164 || (SCALAR_FLOAT_TYPE_P (type) && ereverse))
5166 tree rec_type = make_node (RECORD_TYPE);
5167 tree field_type, field;
5169 TYPE_REVERSE_STORAGE_ORDER (rec_type) = ereverse;
5173 const unsigned HOST_WIDE_INT prec
5174 = TREE_INT_CST_LOW (TYPE_RM_SIZE (type));
5175 if (type_unsigned_for_rm (type))
5176 field_type = make_unsigned_type (prec);
5178 field_type = make_signed_type (prec);
5179 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (type));
5184 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
5185 NULL_TREE, bitsize_zero_node, c < 0, 0);
5187 finish_record_type (rec_type, field, 1, false);
5189 expr = unchecked_convert (rec_type, expr, notrunc_p);
5190 expr = build_component_ref (expr, field, false);
5191 expr = fold_build1 (NOP_EXPR, type, expr);
5194 /* Similarly if we are converting from an integral type whose precision is
5195 not equal to its size, first copy into a field of the given precision
5196 and unchecked convert the record type.
5198 The same considerations as above apply if the target type is an aggregate
5199 type with reverse storage order and we also proceed similarly. */
5200 else if ((INTEGRAL_TYPE_P (etype)
5201 && TYPE_RM_SIZE (etype)
5202 && ((c = tree_int_cst_compare (TYPE_RM_SIZE (etype),
5203 TYPE_SIZE (etype))) < 0
5205 || (SCALAR_FLOAT_TYPE_P (etype) && reverse))
5207 tree rec_type = make_node (RECORD_TYPE);
5208 vec<constructor_elt, va_gc> *v;
5210 tree field_type, field;
5212 TYPE_REVERSE_STORAGE_ORDER (rec_type) = reverse;
5216 const unsigned HOST_WIDE_INT prec
5217 = TREE_INT_CST_LOW (TYPE_RM_SIZE (etype));
5218 if (type_unsigned_for_rm (etype))
5219 field_type = make_unsigned_type (prec);
5221 field_type = make_signed_type (prec);
5222 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (etype));
5227 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
5228 NULL_TREE, bitsize_zero_node, c < 0, 0);
5230 finish_record_type (rec_type, field, 1, false);
5232 expr = fold_build1 (NOP_EXPR, field_type, expr);
5233 CONSTRUCTOR_APPEND_ELT (v, field, expr);
5234 expr = gnat_build_constructor (rec_type, v);
5235 expr = unchecked_convert (type, expr, notrunc_p);
5238 /* If we are converting from a scalar type to a type with a different size,
5239 we need to pad to have the same size on both sides.
5241 ??? We cannot do it unconditionally because unchecked conversions are
5242 used liberally by the front-end to implement polymorphism, e.g. in:
5244 S191s : constant ada__tags__addr_ptr := ada__tags__addr_ptr!(S190s);
5245 return p___size__4 (p__object!(S191s.all));
5247 so we skip all expressions that are references. */
5248 else if (!REFERENCE_CLASS_P (expr)
5249 && !AGGREGATE_TYPE_P (etype)
5250 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5251 && (c = tree_int_cst_compare (TYPE_SIZE (etype), TYPE_SIZE (type))))
5255 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
5256 false, false, false, true),
5258 expr = unchecked_convert (type, expr, notrunc_p);
5262 tree rec_type = maybe_pad_type (type, TYPE_SIZE (etype), 0, Empty,
5263 false, false, false, true);
5264 expr = unchecked_convert (rec_type, expr, notrunc_p);
5265 expr = build_component_ref (expr, TYPE_FIELDS (rec_type), false);
5269 /* We have a special case when we are converting between two unconstrained
5270 array types. In that case, take the address, convert the fat pointer
5271 types, and dereference. */
5272 else if (ecode == code && code == UNCONSTRAINED_ARRAY_TYPE)
5273 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
5274 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
5275 build_unary_op (ADDR_EXPR, NULL_TREE,
5278 /* Another special case is when we are converting to a vector type from its
5279 representative array type; this a regular conversion. */
5280 else if (code == VECTOR_TYPE
5281 && ecode == ARRAY_TYPE
5282 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
5284 expr = convert (type, expr);
5286 /* And, if the array type is not the representative, we try to build an
5287 intermediate vector type of which the array type is the representative
5288 and to do the unchecked conversion between the vector types, in order
5289 to enable further simplifications in the middle-end. */
5290 else if (code == VECTOR_TYPE
5291 && ecode == ARRAY_TYPE
5292 && (tem = build_vector_type_for_array (etype, NULL_TREE)))
5294 expr = convert (tem, expr);
5295 return unchecked_convert (type, expr, notrunc_p);
5298 /* If we are converting a CONSTRUCTOR to a more aligned RECORD_TYPE, bump
5299 the alignment of the CONSTRUCTOR to speed up the copy operation. */
5300 else if (TREE_CODE (expr) == CONSTRUCTOR
5301 && code == RECORD_TYPE
5302 && TYPE_ALIGN (etype) < TYPE_ALIGN (type))
5304 expr = convert (maybe_pad_type (etype, NULL_TREE, TYPE_ALIGN (type),
5305 Empty, false, false, false, true),
5307 return unchecked_convert (type, expr, notrunc_p);
5310 /* Otherwise, just build a VIEW_CONVERT_EXPR of the expression. */
5313 expr = maybe_unconstrained_array (expr);
5314 etype = TREE_TYPE (expr);
5315 ecode = TREE_CODE (etype);
5316 if (can_fold_for_view_convert_p (expr))
5317 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
5319 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
5322 /* If the result is a non-biased integral type whose precision is not equal
5323 to its size, sign- or zero-extend the result. But we need not do this
5324 if the input is also an integral type and both are unsigned or both are
5325 signed and have the same precision. */
5329 && INTEGRAL_TYPE_P (type)
5330 && (type_rm_size = TYPE_RM_SIZE (type))
5331 && tree_int_cst_compare (type_rm_size, TYPE_SIZE (type)) < 0
5332 && !(INTEGRAL_TYPE_P (etype)
5333 && type_unsigned_for_rm (type) == type_unsigned_for_rm (etype)
5334 && (type_unsigned_for_rm (type)
5335 || tree_int_cst_compare (type_rm_size,
5336 TYPE_RM_SIZE (etype)
5337 ? TYPE_RM_SIZE (etype)
5338 : TYPE_SIZE (etype)) == 0)))
5340 if (integer_zerop (type_rm_size))
5341 expr = build_int_cst (type, 0);
5345 = gnat_type_for_size (TREE_INT_CST_LOW (TYPE_SIZE (type)),
5346 type_unsigned_for_rm (type));
5348 = convert (base_type,
5349 size_binop (MINUS_EXPR,
5350 TYPE_SIZE (type), type_rm_size));
5353 build_binary_op (RSHIFT_EXPR, base_type,
5354 build_binary_op (LSHIFT_EXPR, base_type,
5362 /* An unchecked conversion should never raise Constraint_Error. The code
5363 below assumes that GCC's conversion routines overflow the same way that
5364 the underlying hardware does. This is probably true. In the rare case
5365 when it is false, we can rely on the fact that such conversions are
5366 erroneous anyway. */
5367 if (TREE_CODE (expr) == INTEGER_CST)
5368 TREE_OVERFLOW (expr) = 0;
5370 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
5371 show no longer constant. */
5372 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
5373 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
5375 TREE_CONSTANT (expr) = 0;
5380 /* Return the appropriate GCC tree code for the specified GNAT_TYPE,
5381 the latter being a record type as predicated by Is_Record_Type. */
5384 tree_code_for_record_type (Entity_Id gnat_type)
5386 Node_Id component_list, component;
5388 /* Return UNION_TYPE if it's an Unchecked_Union whose non-discriminant
5389 fields are all in the variant part. Otherwise, return RECORD_TYPE. */
5390 if (!Is_Unchecked_Union (gnat_type))
5393 gnat_type = Implementation_Base_Type (gnat_type);
5395 = Component_List (Type_Definition (Declaration_Node (gnat_type)));
5397 for (component = First_Non_Pragma (Component_Items (component_list));
5398 Present (component);
5399 component = Next_Non_Pragma (component))
5400 if (Ekind (Defining_Entity (component)) == E_Component)
5406 /* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
5407 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
5408 according to the presence of an alignment clause on the type or, if it
5409 is an array, on the component type. */
5412 is_double_float_or_array (Entity_Id gnat_type, bool *align_clause)
5414 gnat_type = Underlying_Type (gnat_type);
5416 *align_clause = Present (Alignment_Clause (gnat_type));
5418 if (Is_Array_Type (gnat_type))
5420 gnat_type = Underlying_Type (Component_Type (gnat_type));
5421 if (Present (Alignment_Clause (gnat_type)))
5422 *align_clause = true;
5425 if (!Is_Floating_Point_Type (gnat_type))
5428 if (UI_To_Int (Esize (gnat_type)) != 64)
5434 /* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
5435 size is greater or equal to 64 bits, or an array of such a type. Set
5436 ALIGN_CLAUSE according to the presence of an alignment clause on the
5437 type or, if it is an array, on the component type. */
5440 is_double_scalar_or_array (Entity_Id gnat_type, bool *align_clause)
5442 gnat_type = Underlying_Type (gnat_type);
5444 *align_clause = Present (Alignment_Clause (gnat_type));
5446 if (Is_Array_Type (gnat_type))
5448 gnat_type = Underlying_Type (Component_Type (gnat_type));
5449 if (Present (Alignment_Clause (gnat_type)))
5450 *align_clause = true;
5453 if (!Is_Scalar_Type (gnat_type))
5456 if (UI_To_Int (Esize (gnat_type)) < 64)
5462 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
5463 component of an aggregate type. */
5466 type_for_nonaliased_component_p (tree gnu_type)
5468 /* If the type is passed by reference, we may have pointers to the
5469 component so it cannot be made non-aliased. */
5470 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
5473 /* We used to say that any component of aggregate type is aliased
5474 because the front-end may take 'Reference of it. The front-end
5475 has been enhanced in the meantime so as to use a renaming instead
5476 in most cases, but the back-end can probably take the address of
5477 such a component too so we go for the conservative stance.
5479 For instance, we might need the address of any array type, even
5480 if normally passed by copy, to construct a fat pointer if the
5481 component is used as an actual for an unconstrained formal.
5483 Likewise for record types: even if a specific record subtype is
5484 passed by copy, the parent type might be passed by ref (e.g. if
5485 it's of variable size) and we might take the address of a child
5486 component to pass to a parent formal. We have no way to check
5487 for such conditions here. */
5488 if (AGGREGATE_TYPE_P (gnu_type))
5494 /* Return true if TYPE is a smaller form of ORIG_TYPE. */
5497 smaller_form_type_p (tree type, tree orig_type)
5501 /* We're not interested in variants here. */
5502 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig_type))
5505 /* Like a variant, a packable version keeps the original TYPE_NAME. */
5506 if (TYPE_NAME (type) != TYPE_NAME (orig_type))
5509 size = TYPE_SIZE (type);
5510 osize = TYPE_SIZE (orig_type);
5512 if (!(TREE_CODE (size) == INTEGER_CST && TREE_CODE (osize) == INTEGER_CST))
5515 return tree_int_cst_lt (size, osize) != 0;
5518 /* Return whether EXPR, which is the renamed object in an object renaming
5519 declaration, can be materialized as a reference (with a REFERENCE_TYPE).
5520 This should be synchronized with Exp_Dbug.Debug_Renaming_Declaration. */
5523 can_materialize_object_renaming_p (Node_Id expr)
5527 expr = Original_Node (expr);
5532 case N_Expanded_Name:
5533 if (!Present (Renamed_Object (Entity (expr))))
5535 expr = Renamed_Object (Entity (expr));
5538 case N_Selected_Component:
5540 if (Is_Packed (Underlying_Type (Etype (Prefix (expr)))))
5544 = Normalized_First_Bit (Entity (Selector_Name (expr)));
5545 if (!UI_Is_In_Int_Range (bitpos)
5546 || (bitpos != UI_No_Uint && bitpos != UI_From_Int (0)))
5549 expr = Prefix (expr);
5553 case N_Indexed_Component:
5556 const Entity_Id t = Underlying_Type (Etype (Prefix (expr)));
5558 if (Is_Array_Type (t) && Present (Packed_Array_Impl_Type (t)))
5561 expr = Prefix (expr);
5565 case N_Explicit_Dereference:
5566 expr = Prefix (expr);
5575 /* Perform final processing on global declarations. */
5577 static GTY (()) tree dummy_global;
5580 gnat_write_global_declarations (void)
5585 /* If we have declared types as used at the global level, insert them in
5586 the global hash table. We use a dummy variable for this purpose, but
5587 we need to build it unconditionally to avoid -fcompare-debug issues. */
5588 if (first_global_object_name)
5590 struct varpool_node *node;
5593 ASM_FORMAT_PRIVATE_NAME (label, first_global_object_name, 0);
5595 = build_decl (BUILTINS_LOCATION, VAR_DECL, get_identifier (label),
5597 DECL_HARD_REGISTER (dummy_global) = 1;
5598 TREE_STATIC (dummy_global) = 1;
5599 node = varpool_node::get_create (dummy_global);
5600 node->definition = 1;
5601 node->force_output = 1;
5603 if (types_used_by_cur_var_decl)
5604 while (!types_used_by_cur_var_decl->is_empty ())
5606 tree t = types_used_by_cur_var_decl->pop ();
5607 types_used_by_var_decl_insert (t, dummy_global);
5611 /* Output debug information for all global type declarations first. This
5612 ensures that global types whose compilation hasn't been finalized yet,
5613 for example pointers to Taft amendment types, have their compilation
5614 finalized in the right context. */
5615 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5616 if (TREE_CODE (iter) == TYPE_DECL && !DECL_IGNORED_P (iter))
5617 debug_hooks->type_decl (iter, false);
5619 /* Output imported functions. */
5620 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5621 if (TREE_CODE (iter) == FUNCTION_DECL
5622 && DECL_EXTERNAL (iter)
5623 && DECL_INITIAL (iter) == NULL
5624 && !DECL_IGNORED_P (iter)
5625 && DECL_FUNCTION_IS_DEF (iter))
5626 debug_hooks->early_global_decl (iter);
5628 /* Then output the global variables. We need to do that after the debug
5629 information for global types is emitted so that they are finalized. Skip
5630 external global variables, unless we need to emit debug info for them:
5631 this is useful for imported variables, for instance. */
5632 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5633 if (TREE_CODE (iter) == VAR_DECL
5634 && (!DECL_EXTERNAL (iter) || !DECL_IGNORED_P (iter)))
5635 rest_of_decl_compilation (iter, true, 0);
5637 /* Output the imported modules/declarations. In GNAT, these are only
5638 materializing subprogram. */
5639 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5640 if (TREE_CODE (iter) == IMPORTED_DECL && !DECL_IGNORED_P (iter))
5641 debug_hooks->imported_module_or_decl (iter, DECL_NAME (iter),
5642 DECL_CONTEXT (iter), false, false);
5645 /* ************************************************************************
5646 * * GCC builtins support *
5647 * ************************************************************************ */
5649 /* The general scheme is fairly simple:
5651 For each builtin function/type to be declared, gnat_install_builtins calls
5652 internal facilities which eventually get to gnat_pushdecl, which in turn
5653 tracks the so declared builtin function decls in the 'builtin_decls' global
5654 datastructure. When an Intrinsic subprogram declaration is processed, we
5655 search this global datastructure to retrieve the associated BUILT_IN DECL
5658 /* Search the chain of currently available builtin declarations for a node
5659 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
5660 found, if any, or NULL_TREE otherwise. */
5662 builtin_decl_for (tree name)
5667 FOR_EACH_VEC_SAFE_ELT (builtin_decls, i, decl)
5668 if (DECL_NAME (decl) == name)
5674 /* The code below eventually exposes gnat_install_builtins, which declares
5675 the builtin types and functions we might need, either internally or as
5676 user accessible facilities.
5678 ??? This is a first implementation shot, still in rough shape. It is
5679 heavily inspired from the "C" family implementation, with chunks copied
5680 verbatim from there.
5682 Two obvious improvement candidates are:
5683 o Use a more efficient name/decl mapping scheme
5684 o Devise a middle-end infrastructure to avoid having to copy
5685 pieces between front-ends. */
5687 /* ----------------------------------------------------------------------- *
5688 * BUILTIN ELEMENTARY TYPES *
5689 * ----------------------------------------------------------------------- */
5691 /* Standard data types to be used in builtin argument declarations. */
5695 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
5697 CTI_CONST_STRING_TYPE,
5702 static tree c_global_trees[CTI_MAX];
5704 #define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
5705 #define string_type_node c_global_trees[CTI_STRING_TYPE]
5706 #define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
5708 /* ??? In addition some attribute handlers, we currently don't support a
5709 (small) number of builtin-types, which in turns inhibits support for a
5710 number of builtin functions. */
5711 #define wint_type_node void_type_node
5712 #define intmax_type_node void_type_node
5713 #define uintmax_type_node void_type_node
5715 /* Used to help initialize the builtin-types.def table. When a type of
5716 the correct size doesn't exist, use error_mark_node instead of NULL.
5717 The later results in segfaults even when a decl using the type doesn't
5721 builtin_type_for_size (int size, bool unsignedp)
5723 tree type = gnat_type_for_size (size, unsignedp);
5724 return type ? type : error_mark_node;
5727 /* Build/push the elementary type decls that builtin functions/types
5731 install_builtin_elementary_types (void)
5733 signed_size_type_node = gnat_signed_type_for (size_type_node);
5734 pid_type_node = integer_type_node;
5736 string_type_node = build_pointer_type (char_type_node);
5737 const_string_type_node
5738 = build_pointer_type (build_qualified_type
5739 (char_type_node, TYPE_QUAL_CONST));
5742 /* ----------------------------------------------------------------------- *
5743 * BUILTIN FUNCTION TYPES *
5744 * ----------------------------------------------------------------------- */
5746 /* Now, builtin function types per se. */
5750 #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
5751 #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
5752 #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
5753 #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
5754 #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5755 #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5756 #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
5757 #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5759 #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5761 #define DEF_FUNCTION_TYPE_8(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5762 ARG6, ARG7, ARG8) NAME,
5763 #define DEF_FUNCTION_TYPE_9(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5764 ARG6, ARG7, ARG8, ARG9) NAME,
5765 #define DEF_FUNCTION_TYPE_10(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5766 ARG6, ARG7, ARG8, ARG9, ARG10) NAME,
5767 #define DEF_FUNCTION_TYPE_11(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5768 ARG6, ARG7, ARG8, ARG9, ARG10, ARG11) NAME,
5769 #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
5770 #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
5771 #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
5772 #define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5773 #define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5774 #define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5776 #define DEF_FUNCTION_TYPE_VAR_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5778 #define DEF_FUNCTION_TYPE_VAR_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5780 #define DEF_POINTER_TYPE(NAME, TYPE) NAME,
5781 #include "builtin-types.def"
5782 #undef DEF_PRIMITIVE_TYPE
5783 #undef DEF_FUNCTION_TYPE_0
5784 #undef DEF_FUNCTION_TYPE_1
5785 #undef DEF_FUNCTION_TYPE_2
5786 #undef DEF_FUNCTION_TYPE_3
5787 #undef DEF_FUNCTION_TYPE_4
5788 #undef DEF_FUNCTION_TYPE_5
5789 #undef DEF_FUNCTION_TYPE_6
5790 #undef DEF_FUNCTION_TYPE_7
5791 #undef DEF_FUNCTION_TYPE_8
5792 #undef DEF_FUNCTION_TYPE_9
5793 #undef DEF_FUNCTION_TYPE_10
5794 #undef DEF_FUNCTION_TYPE_11
5795 #undef DEF_FUNCTION_TYPE_VAR_0
5796 #undef DEF_FUNCTION_TYPE_VAR_1
5797 #undef DEF_FUNCTION_TYPE_VAR_2
5798 #undef DEF_FUNCTION_TYPE_VAR_3
5799 #undef DEF_FUNCTION_TYPE_VAR_4
5800 #undef DEF_FUNCTION_TYPE_VAR_5
5801 #undef DEF_FUNCTION_TYPE_VAR_6
5802 #undef DEF_FUNCTION_TYPE_VAR_7
5803 #undef DEF_POINTER_TYPE
5807 typedef enum c_builtin_type builtin_type;
5809 /* A temporary array used in communication with def_fn_type. */
5810 static GTY(()) tree builtin_types[(int) BT_LAST + 1];
5812 /* A helper function for install_builtin_types. Build function type
5813 for DEF with return type RET and N arguments. If VAR is true, then the
5814 function should be variadic after those N arguments.
5816 Takes special care not to ICE if any of the types involved are
5817 error_mark_node, which indicates that said type is not in fact available
5818 (see builtin_type_for_size). In which case the function type as a whole
5819 should be error_mark_node. */
5822 def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
5825 tree *args = XALLOCAVEC (tree, n);
5830 for (i = 0; i < n; ++i)
5832 builtin_type a = (builtin_type) va_arg (list, int);
5833 t = builtin_types[a];
5834 if (t == error_mark_node)
5839 t = builtin_types[ret];
5840 if (t == error_mark_node)
5843 t = build_varargs_function_type_array (t, n, args);
5845 t = build_function_type_array (t, n, args);
5848 builtin_types[def] = t;
5852 /* Build the builtin function types and install them in the builtin_types
5853 array for later use in builtin function decls. */
5856 install_builtin_function_types (void)
5858 tree va_list_ref_type_node;
5859 tree va_list_arg_type_node;
5861 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
5863 va_list_arg_type_node = va_list_ref_type_node =
5864 build_pointer_type (TREE_TYPE (va_list_type_node));
5868 va_list_arg_type_node = va_list_type_node;
5869 va_list_ref_type_node = build_reference_type (va_list_type_node);
5872 #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
5873 builtin_types[ENUM] = VALUE;
5874 #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
5875 def_fn_type (ENUM, RETURN, 0, 0);
5876 #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
5877 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
5878 #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
5879 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
5880 #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5881 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
5882 #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5883 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
5884 #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5885 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5886 #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5888 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5889 #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5891 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
5892 #define DEF_FUNCTION_TYPE_8(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5894 def_fn_type (ENUM, RETURN, 0, 8, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5896 #define DEF_FUNCTION_TYPE_9(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5897 ARG6, ARG7, ARG8, ARG9) \
5898 def_fn_type (ENUM, RETURN, 0, 9, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5900 #define DEF_FUNCTION_TYPE_10(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5,\
5901 ARG6, ARG7, ARG8, ARG9, ARG10) \
5902 def_fn_type (ENUM, RETURN, 0, 10, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5903 ARG7, ARG8, ARG9, ARG10);
5904 #define DEF_FUNCTION_TYPE_11(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5,\
5905 ARG6, ARG7, ARG8, ARG9, ARG10, ARG11) \
5906 def_fn_type (ENUM, RETURN, 0, 11, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5907 ARG7, ARG8, ARG9, ARG10, ARG11);
5908 #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
5909 def_fn_type (ENUM, RETURN, 1, 0);
5910 #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
5911 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
5912 #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
5913 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
5914 #define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5915 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
5916 #define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5917 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
5918 #define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5919 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5920 #define DEF_FUNCTION_TYPE_VAR_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5922 def_fn_type (ENUM, RETURN, 1, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5923 #define DEF_FUNCTION_TYPE_VAR_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5925 def_fn_type (ENUM, RETURN, 1, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
5926 #define DEF_POINTER_TYPE(ENUM, TYPE) \
5927 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
5929 #include "builtin-types.def"
5931 #undef DEF_PRIMITIVE_TYPE
5932 #undef DEF_FUNCTION_TYPE_0
5933 #undef DEF_FUNCTION_TYPE_1
5934 #undef DEF_FUNCTION_TYPE_2
5935 #undef DEF_FUNCTION_TYPE_3
5936 #undef DEF_FUNCTION_TYPE_4
5937 #undef DEF_FUNCTION_TYPE_5
5938 #undef DEF_FUNCTION_TYPE_6
5939 #undef DEF_FUNCTION_TYPE_7
5940 #undef DEF_FUNCTION_TYPE_8
5941 #undef DEF_FUNCTION_TYPE_9
5942 #undef DEF_FUNCTION_TYPE_10
5943 #undef DEF_FUNCTION_TYPE_11
5944 #undef DEF_FUNCTION_TYPE_VAR_0
5945 #undef DEF_FUNCTION_TYPE_VAR_1
5946 #undef DEF_FUNCTION_TYPE_VAR_2
5947 #undef DEF_FUNCTION_TYPE_VAR_3
5948 #undef DEF_FUNCTION_TYPE_VAR_4
5949 #undef DEF_FUNCTION_TYPE_VAR_5
5950 #undef DEF_FUNCTION_TYPE_VAR_6
5951 #undef DEF_FUNCTION_TYPE_VAR_7
5952 #undef DEF_POINTER_TYPE
5953 builtin_types[(int) BT_LAST] = NULL_TREE;
5956 /* ----------------------------------------------------------------------- *
5957 * BUILTIN ATTRIBUTES *
5958 * ----------------------------------------------------------------------- */
5960 enum built_in_attribute
5962 #define DEF_ATTR_NULL_TREE(ENUM) ENUM,
5963 #define DEF_ATTR_INT(ENUM, VALUE) ENUM,
5964 #define DEF_ATTR_STRING(ENUM, VALUE) ENUM,
5965 #define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
5966 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
5967 #include "builtin-attrs.def"
5968 #undef DEF_ATTR_NULL_TREE
5970 #undef DEF_ATTR_STRING
5971 #undef DEF_ATTR_IDENT
5972 #undef DEF_ATTR_TREE_LIST
5976 static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
5979 install_builtin_attributes (void)
5981 /* Fill in the built_in_attributes array. */
5982 #define DEF_ATTR_NULL_TREE(ENUM) \
5983 built_in_attributes[(int) ENUM] = NULL_TREE;
5984 #define DEF_ATTR_INT(ENUM, VALUE) \
5985 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
5986 #define DEF_ATTR_STRING(ENUM, VALUE) \
5987 built_in_attributes[(int) ENUM] = build_string (strlen (VALUE), VALUE);
5988 #define DEF_ATTR_IDENT(ENUM, STRING) \
5989 built_in_attributes[(int) ENUM] = get_identifier (STRING);
5990 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
5991 built_in_attributes[(int) ENUM] \
5992 = tree_cons (built_in_attributes[(int) PURPOSE], \
5993 built_in_attributes[(int) VALUE], \
5994 built_in_attributes[(int) CHAIN]);
5995 #include "builtin-attrs.def"
5996 #undef DEF_ATTR_NULL_TREE
5998 #undef DEF_ATTR_STRING
5999 #undef DEF_ATTR_IDENT
6000 #undef DEF_ATTR_TREE_LIST
6003 /* Handle a "const" attribute; arguments as in
6004 struct attribute_spec.handler. */
6007 handle_const_attribute (tree *node, tree ARG_UNUSED (name),
6008 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6011 if (TREE_CODE (*node) == FUNCTION_DECL)
6012 TREE_READONLY (*node) = 1;
6014 *no_add_attrs = true;
6019 /* Handle a "nothrow" attribute; arguments as in
6020 struct attribute_spec.handler. */
6023 handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
6024 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6027 if (TREE_CODE (*node) == FUNCTION_DECL)
6028 TREE_NOTHROW (*node) = 1;
6030 *no_add_attrs = true;
6035 /* Handle a "pure" attribute; arguments as in
6036 struct attribute_spec.handler. */
6039 handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6040 int ARG_UNUSED (flags), bool *no_add_attrs)
6042 if (TREE_CODE (*node) == FUNCTION_DECL)
6043 DECL_PURE_P (*node) = 1;
6044 /* TODO: support types. */
6047 warning (OPT_Wattributes, "%qs attribute ignored",
6048 IDENTIFIER_POINTER (name));
6049 *no_add_attrs = true;
6055 /* Handle a "no vops" attribute; arguments as in
6056 struct attribute_spec.handler. */
6059 handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
6060 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6061 bool *ARG_UNUSED (no_add_attrs))
6063 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
6064 DECL_IS_NOVOPS (*node) = 1;
6068 /* Helper for nonnull attribute handling; fetch the operand number
6069 from the attribute argument list. */
6072 get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
6074 /* Verify the arg number is a constant. */
6075 if (!tree_fits_uhwi_p (arg_num_expr))
6078 *valp = TREE_INT_CST_LOW (arg_num_expr);
6082 /* Handle the "nonnull" attribute. */
6084 handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
6085 tree args, int ARG_UNUSED (flags),
6089 unsigned HOST_WIDE_INT attr_arg_num;
6091 /* If no arguments are specified, all pointer arguments should be
6092 non-null. Verify a full prototype is given so that the arguments
6093 will have the correct types when we actually check them later.
6094 Avoid diagnosing type-generic built-ins since those have no
6098 if (!prototype_p (type)
6099 && (!TYPE_ATTRIBUTES (type)
6100 || !lookup_attribute ("type generic", TYPE_ATTRIBUTES (type))))
6102 error ("nonnull attribute without arguments on a non-prototype");
6103 *no_add_attrs = true;
6108 /* Argument list specified. Verify that each argument number references
6109 a pointer argument. */
6110 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
6112 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
6114 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
6116 error ("nonnull argument has invalid operand number (argument %lu)",
6117 (unsigned long) attr_arg_num);
6118 *no_add_attrs = true;
6122 if (prototype_p (type))
6124 function_args_iterator iter;
6127 function_args_iter_init (&iter, type);
6128 for (ck_num = 1; ; ck_num++, function_args_iter_next (&iter))
6130 argument = function_args_iter_cond (&iter);
6131 if (!argument || ck_num == arg_num)
6136 || TREE_CODE (argument) == VOID_TYPE)
6138 error ("nonnull argument with out-of-range operand number "
6139 "(argument %lu, operand %lu)",
6140 (unsigned long) attr_arg_num, (unsigned long) arg_num);
6141 *no_add_attrs = true;
6145 if (TREE_CODE (argument) != POINTER_TYPE)
6147 error ("nonnull argument references non-pointer operand "
6148 "(argument %lu, operand %lu)",
6149 (unsigned long) attr_arg_num, (unsigned long) arg_num);
6150 *no_add_attrs = true;
6159 /* Handle a "sentinel" attribute. */
6162 handle_sentinel_attribute (tree *node, tree name, tree args,
6163 int ARG_UNUSED (flags), bool *no_add_attrs)
6165 if (!prototype_p (*node))
6167 warning (OPT_Wattributes,
6168 "%qs attribute requires prototypes with named arguments",
6169 IDENTIFIER_POINTER (name));
6170 *no_add_attrs = true;
6174 if (!stdarg_p (*node))
6176 warning (OPT_Wattributes,
6177 "%qs attribute only applies to variadic functions",
6178 IDENTIFIER_POINTER (name));
6179 *no_add_attrs = true;
6185 tree position = TREE_VALUE (args);
6187 if (TREE_CODE (position) != INTEGER_CST)
6189 warning (0, "requested position is not an integer constant");
6190 *no_add_attrs = true;
6194 if (tree_int_cst_lt (position, integer_zero_node))
6196 warning (0, "requested position is less than zero");
6197 *no_add_attrs = true;
6205 /* Handle a "noreturn" attribute; arguments as in
6206 struct attribute_spec.handler. */
6209 handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6210 int ARG_UNUSED (flags), bool *no_add_attrs)
6212 tree type = TREE_TYPE (*node);
6214 /* See FIXME comment in c_common_attribute_table. */
6215 if (TREE_CODE (*node) == FUNCTION_DECL)
6216 TREE_THIS_VOLATILE (*node) = 1;
6217 else if (TREE_CODE (type) == POINTER_TYPE
6218 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
6220 = build_pointer_type
6221 (change_qualified_type (TREE_TYPE (type), TYPE_QUAL_VOLATILE));
6224 warning (OPT_Wattributes, "%qs attribute ignored",
6225 IDENTIFIER_POINTER (name));
6226 *no_add_attrs = true;
6232 /* Handle a "noinline" attribute; arguments as in
6233 struct attribute_spec.handler. */
6236 handle_noinline_attribute (tree *node, tree name,
6237 tree ARG_UNUSED (args),
6238 int ARG_UNUSED (flags), bool *no_add_attrs)
6240 if (TREE_CODE (*node) == FUNCTION_DECL)
6242 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (*node)))
6244 warning (OPT_Wattributes, "%qE attribute ignored due to conflict "
6245 "with attribute %qs", name, "always_inline");
6246 *no_add_attrs = true;
6249 DECL_UNINLINABLE (*node) = 1;
6253 warning (OPT_Wattributes, "%qE attribute ignored", name);
6254 *no_add_attrs = true;
6260 /* Handle a "noclone" attribute; arguments as in
6261 struct attribute_spec.handler. */
6264 handle_noclone_attribute (tree *node, tree name,
6265 tree ARG_UNUSED (args),
6266 int ARG_UNUSED (flags), bool *no_add_attrs)
6268 if (TREE_CODE (*node) != FUNCTION_DECL)
6270 warning (OPT_Wattributes, "%qE attribute ignored", name);
6271 *no_add_attrs = true;
6277 /* Handle a "leaf" attribute; arguments as in
6278 struct attribute_spec.handler. */
6281 handle_leaf_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6282 int ARG_UNUSED (flags), bool *no_add_attrs)
6284 if (TREE_CODE (*node) != FUNCTION_DECL)
6286 warning (OPT_Wattributes, "%qE attribute ignored", name);
6287 *no_add_attrs = true;
6289 if (!TREE_PUBLIC (*node))
6291 warning (OPT_Wattributes, "%qE attribute has no effect", name);
6292 *no_add_attrs = true;
6298 /* Handle a "always_inline" attribute; arguments as in
6299 struct attribute_spec.handler. */
6302 handle_always_inline_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6303 int ARG_UNUSED (flags), bool *no_add_attrs)
6305 if (TREE_CODE (*node) == FUNCTION_DECL)
6307 /* Set the attribute and mark it for disregarding inline limits. */
6308 DECL_DISREGARD_INLINE_LIMITS (*node) = 1;
6312 warning (OPT_Wattributes, "%qE attribute ignored", name);
6313 *no_add_attrs = true;
6319 /* Handle a "malloc" attribute; arguments as in
6320 struct attribute_spec.handler. */
6323 handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6324 int ARG_UNUSED (flags), bool *no_add_attrs)
6326 if (TREE_CODE (*node) == FUNCTION_DECL
6327 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
6328 DECL_IS_MALLOC (*node) = 1;
6331 warning (OPT_Wattributes, "%qs attribute ignored",
6332 IDENTIFIER_POINTER (name));
6333 *no_add_attrs = true;
6339 /* Fake handler for attributes we don't properly support. */
6342 fake_attribute_handler (tree * ARG_UNUSED (node),
6343 tree ARG_UNUSED (name),
6344 tree ARG_UNUSED (args),
6345 int ARG_UNUSED (flags),
6346 bool * ARG_UNUSED (no_add_attrs))
6351 /* Handle a "type_generic" attribute. */
6354 handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
6355 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6356 bool * ARG_UNUSED (no_add_attrs))
6358 /* Ensure we have a function type. */
6359 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
6361 /* Ensure we have a variadic function. */
6362 gcc_assert (!prototype_p (*node) || stdarg_p (*node));
6367 /* Handle a "vector_size" attribute; arguments as in
6368 struct attribute_spec.handler. */
6371 handle_vector_size_attribute (tree *node, tree name, tree args,
6372 int ARG_UNUSED (flags), bool *no_add_attrs)
6377 *no_add_attrs = true;
6379 /* We need to provide for vector pointers, vector arrays, and
6380 functions returning vectors. For example:
6382 __attribute__((vector_size(16))) short *foo;
6384 In this case, the mode is SI, but the type being modified is
6385 HI, so we need to look further. */
6386 while (POINTER_TYPE_P (type)
6387 || TREE_CODE (type) == FUNCTION_TYPE
6388 || TREE_CODE (type) == ARRAY_TYPE)
6389 type = TREE_TYPE (type);
6391 vector_type = build_vector_type_for_size (type, TREE_VALUE (args), name);
6395 /* Build back pointers if needed. */
6396 *node = reconstruct_complex_type (*node, vector_type);
6401 /* Handle a "vector_type" attribute; arguments as in
6402 struct attribute_spec.handler. */
6405 handle_vector_type_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6406 int ARG_UNUSED (flags), bool *no_add_attrs)
6411 *no_add_attrs = true;
6413 if (TREE_CODE (type) != ARRAY_TYPE)
6415 error ("attribute %qs applies to array types only",
6416 IDENTIFIER_POINTER (name));
6420 vector_type = build_vector_type_for_array (type, name);
6424 TYPE_REPRESENTATIVE_ARRAY (vector_type) = type;
6425 *node = vector_type;
6430 /* ----------------------------------------------------------------------- *
6431 * BUILTIN FUNCTIONS *
6432 * ----------------------------------------------------------------------- */
6434 /* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
6435 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
6436 if nonansi_p and flag_no_nonansi_builtin. */
6439 def_builtin_1 (enum built_in_function fncode,
6441 enum built_in_class fnclass,
6442 tree fntype, tree libtype,
6443 bool both_p, bool fallback_p,
6444 bool nonansi_p ATTRIBUTE_UNUSED,
6445 tree fnattrs, bool implicit_p)
6448 const char *libname;
6450 /* Preserve an already installed decl. It most likely was setup in advance
6451 (e.g. as part of the internal builtins) for specific reasons. */
6452 if (builtin_decl_explicit (fncode))
6455 if (fntype == error_mark_node)
6458 gcc_assert ((!both_p && !fallback_p)
6459 || !strncmp (name, "__builtin_",
6460 strlen ("__builtin_")));
6462 libname = name + strlen ("__builtin_");
6463 decl = add_builtin_function (name, fntype, fncode, fnclass,
6464 (fallback_p ? libname : NULL),
6467 /* ??? This is normally further controlled by command-line options
6468 like -fno-builtin, but we don't have them for Ada. */
6469 add_builtin_function (libname, libtype, fncode, fnclass,
6472 set_builtin_decl (fncode, decl, implicit_p);
6475 static int flag_isoc94 = 0;
6476 static int flag_isoc99 = 0;
6477 static int flag_isoc11 = 0;
6479 /* Install what the common builtins.def offers. */
6482 install_builtin_functions (void)
6484 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
6485 NONANSI_P, ATTRS, IMPLICIT, COND) \
6487 def_builtin_1 (ENUM, NAME, CLASS, \
6488 builtin_types[(int) TYPE], \
6489 builtin_types[(int) LIBTYPE], \
6490 BOTH_P, FALLBACK_P, NONANSI_P, \
6491 built_in_attributes[(int) ATTRS], IMPLICIT);
6492 #include "builtins.def"
6495 /* ----------------------------------------------------------------------- *
6496 * BUILTIN FUNCTIONS *
6497 * ----------------------------------------------------------------------- */
6499 /* Install the builtin functions we might need. */
6502 gnat_install_builtins (void)
6504 install_builtin_elementary_types ();
6505 install_builtin_function_types ();
6506 install_builtin_attributes ();
6508 /* Install builtins used by generic middle-end pieces first. Some of these
6509 know about internal specificities and control attributes accordingly, for
6510 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
6511 the generic definition from builtins.def. */
6512 build_common_builtin_nodes ();
6514 /* Now, install the target specific builtins, such as the AltiVec family on
6515 ppc, and the common set as exposed by builtins.def. */
6516 targetm.init_builtins ();
6517 install_builtin_functions ();
6520 #include "gt-ada-utils.h"
6521 #include "gtype-ada.h"