1 /****************************************************************************
3 * GNAT COMPILER COMPONENTS *
7 * C Implementation File *
9 * Copyright (C) 1992-2019, Free Software Foundation, Inc. *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
24 ****************************************************************************/
28 #include "coretypes.h"
32 #include "stringpool.h"
34 #include "diagnostic.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
44 #include "common/common-target.h"
45 #include "langhooks.h"
46 #include "tree-dump.h"
47 #include "tree-inline.h"
60 /* If nonzero, pretend we are allocating at global level. */
63 /* The default alignment of "double" floating-point types, i.e. floating
64 point types whose size is equal to 64 bits, or 0 if this alignment is
65 not specifically capped. */
66 int double_float_alignment;
68 /* The default alignment of "double" or larger scalar types, i.e. scalar
69 types whose size is greater or equal to 64 bits, or 0 if this alignment
70 is not specifically capped. */
71 int double_scalar_alignment;
73 /* True if floating-point arithmetics may use wider intermediate results. */
74 bool fp_arith_may_widen = true;
76 /* Tree nodes for the various types and decls we create. */
77 tree gnat_std_decls[(int) ADT_LAST];
79 /* Functions to call for each of the possible raise reasons. */
80 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
82 /* Likewise, but with extra info for each of the possible raise reasons. */
83 tree gnat_raise_decls_ext[(int) LAST_REASON_CODE + 1];
85 /* Forward declarations for handlers of attributes. */
86 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
87 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
88 static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
89 static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
90 static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
91 static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
92 static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
93 static tree handle_stack_protect_attribute (tree *, tree, tree, int, bool *);
94 static tree handle_noinline_attribute (tree *, tree, tree, int, bool *);
95 static tree handle_noclone_attribute (tree *, tree, tree, int, bool *);
96 static tree handle_leaf_attribute (tree *, tree, tree, int, bool *);
97 static tree handle_always_inline_attribute (tree *, tree, tree, int, bool *);
98 static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
99 static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
100 static tree handle_vector_size_attribute (tree *, tree, tree, int, bool *);
101 static tree handle_vector_type_attribute (tree *, tree, tree, int, bool *);
103 /* Fake handler for attributes we don't properly support, typically because
104 they'd require dragging a lot of the common-c front-end circuitry. */
105 static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
107 /* Table of machine-independent internal attributes for Ada. We support
108 this minimal set of attributes to accommodate the needs of builtins. */
109 const struct attribute_spec gnat_internal_attribute_table[] =
111 /* { name, min_len, max_len, decl_req, type_req, fn_type_req,
112 affects_type_identity, handler, exclude } */
113 { "const", 0, 0, true, false, false, false,
114 handle_const_attribute, NULL },
115 { "nothrow", 0, 0, true, false, false, false,
116 handle_nothrow_attribute, NULL },
117 { "pure", 0, 0, true, false, false, false,
118 handle_pure_attribute, NULL },
119 { "no vops", 0, 0, true, false, false, false,
120 handle_novops_attribute, NULL },
121 { "nonnull", 0, -1, false, true, true, false,
122 handle_nonnull_attribute, NULL },
123 { "sentinel", 0, 1, false, true, true, false,
124 handle_sentinel_attribute, NULL },
125 { "noreturn", 0, 0, true, false, false, false,
126 handle_noreturn_attribute, NULL },
127 { "stack_protect",0, 0, true, false, false, false,
128 handle_stack_protect_attribute, NULL },
129 { "noinline", 0, 0, true, false, false, false,
130 handle_noinline_attribute, NULL },
131 { "noclone", 0, 0, true, false, false, false,
132 handle_noclone_attribute, NULL },
133 { "leaf", 0, 0, true, false, false, false,
134 handle_leaf_attribute, NULL },
135 { "always_inline",0, 0, true, false, false, false,
136 handle_always_inline_attribute, NULL },
137 { "malloc", 0, 0, true, false, false, false,
138 handle_malloc_attribute, NULL },
139 { "type generic", 0, 0, false, true, true, false,
140 handle_type_generic_attribute, NULL },
142 { "vector_size", 1, 1, false, true, false, false,
143 handle_vector_size_attribute, NULL },
144 { "vector_type", 0, 0, false, true, false, false,
145 handle_vector_type_attribute, NULL },
146 { "may_alias", 0, 0, false, true, false, false, NULL, NULL },
148 /* ??? format and format_arg are heavy and not supported, which actually
149 prevents support for stdio builtins, which we however declare as part
150 of the common builtins.def contents. */
151 { "format", 3, 3, false, true, true, false, fake_attribute_handler,
153 { "format_arg", 1, 1, false, true, true, false, fake_attribute_handler,
156 { NULL, 0, 0, false, false, false, false, NULL, NULL }
159 /* Associates a GNAT tree node to a GCC tree node. It is used in
160 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
161 of `save_gnu_tree' for more info. */
162 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
164 #define GET_GNU_TREE(GNAT_ENTITY) \
165 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
167 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
168 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
170 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
171 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
173 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
174 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
176 #define GET_DUMMY_NODE(GNAT_ENTITY) \
177 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
179 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
180 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
182 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
183 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
185 /* This variable keeps a table for types for each precision so that we only
186 allocate each of them once. Signed and unsigned types are kept separate.
188 Note that these types are only used when fold-const requests something
189 special. Perhaps we should NOT share these types; we'll see how it
191 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
193 /* Likewise for float types, but record these by mode. */
194 static GTY(()) tree float_types[NUM_MACHINE_MODES];
196 /* For each binding contour we allocate a binding_level structure to indicate
197 the binding depth. */
199 struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
200 /* The binding level containing this one (the enclosing binding level). */
201 struct gnat_binding_level *chain;
202 /* The BLOCK node for this level. */
204 /* If nonzero, the setjmp buffer that needs to be updated for any
205 variable-sized definition within this context. */
209 /* The binding level currently in effect. */
210 static GTY(()) struct gnat_binding_level *current_binding_level;
212 /* A chain of gnat_binding_level structures awaiting reuse. */
213 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
215 /* The context to be used for global declarations. */
216 static GTY(()) tree global_context;
218 /* An array of global declarations. */
219 static GTY(()) vec<tree, va_gc> *global_decls;
221 /* An array of builtin function declarations. */
222 static GTY(()) vec<tree, va_gc> *builtin_decls;
224 /* A chain of unused BLOCK nodes. */
225 static GTY((deletable)) tree free_block_chain;
227 /* A hash table of padded types. It is modelled on the generic type
228 hash table in tree.c, which must thus be used as a reference. */
230 struct GTY((for_user)) pad_type_hash
236 struct pad_type_hasher : ggc_cache_ptr_hash<pad_type_hash>
238 static inline hashval_t hash (pad_type_hash *t) { return t->hash; }
239 static bool equal (pad_type_hash *a, pad_type_hash *b);
242 keep_cache_entry (pad_type_hash *&t)
244 return ggc_marked_p (t->type);
248 static GTY ((cache)) hash_table<pad_type_hasher> *pad_type_hash_table;
250 static tree merge_sizes (tree, tree, tree, bool, bool);
251 static tree fold_bit_position (const_tree);
252 static tree compute_related_constant (tree, tree);
253 static tree split_plus (tree, tree *);
254 static tree float_type_for_precision (int, machine_mode);
255 static tree convert_to_fat_pointer (tree, tree);
256 static unsigned int scale_by_factor_of (tree, unsigned int);
257 static bool potential_alignment_gap (tree, tree, tree);
259 /* Linked list used as a queue to defer the initialization of the DECL_CONTEXT
260 of ..._DECL nodes and of the TYPE_CONTEXT of ..._TYPE nodes. */
261 struct deferred_decl_context_node
263 /* The ..._DECL node to work on. */
266 /* The corresponding entity's Scope. */
267 Entity_Id gnat_scope;
269 /* The value of force_global when DECL was pushed. */
272 /* The list of ..._TYPE nodes to propagate the context to. */
275 /* The next queue item. */
276 struct deferred_decl_context_node *next;
279 static struct deferred_decl_context_node *deferred_decl_context_queue = NULL;
281 /* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
282 feed it with the elaboration of GNAT_SCOPE. */
283 static struct deferred_decl_context_node *
284 add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global);
286 /* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
287 feed it with the DECL_CONTEXT computed as part of N as soon as it is
289 static void add_deferred_type_context (struct deferred_decl_context_node *n,
292 /* Initialize data structures of the utils.c module. */
295 init_gnat_utils (void)
297 /* Initialize the association of GNAT nodes to GCC trees. */
298 associate_gnat_to_gnu = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
300 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
301 dummy_node_table = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
303 /* Initialize the hash table of padded types. */
304 pad_type_hash_table = hash_table<pad_type_hasher>::create_ggc (512);
307 /* Destroy data structures of the utils.c module. */
310 destroy_gnat_utils (void)
312 /* Destroy the association of GNAT nodes to GCC trees. */
313 ggc_free (associate_gnat_to_gnu);
314 associate_gnat_to_gnu = NULL;
316 /* Destroy the association of GNAT nodes to GCC trees as dummies. */
317 ggc_free (dummy_node_table);
318 dummy_node_table = NULL;
320 /* Destroy the hash table of padded types. */
321 pad_type_hash_table->empty ();
322 pad_type_hash_table = NULL;
325 /* GNAT_ENTITY is a GNAT tree node for an entity. Associate GNU_DECL, a GCC
326 tree node, with GNAT_ENTITY. If GNU_DECL is not a ..._DECL node, abort.
327 If NO_CHECK is true, the latter check is suppressed.
329 If GNU_DECL is zero, reset a previous association. */
332 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
334 /* Check that GNAT_ENTITY is not already defined and that it is being set
335 to something which is a decl. If that is not the case, this usually
336 means GNAT_ENTITY is defined twice, but occasionally is due to some
338 gcc_assert (!(gnu_decl
339 && (PRESENT_GNU_TREE (gnat_entity)
340 || (!no_check && !DECL_P (gnu_decl)))));
342 SET_GNU_TREE (gnat_entity, gnu_decl);
345 /* GNAT_ENTITY is a GNAT tree node for an entity. Return the GCC tree node
346 that was associated with it. If there is no such tree node, abort.
348 In some cases, such as delayed elaboration or expressions that need to
349 be elaborated only once, GNAT_ENTITY is really not an entity. */
352 get_gnu_tree (Entity_Id gnat_entity)
354 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
355 return GET_GNU_TREE (gnat_entity);
358 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
361 present_gnu_tree (Entity_Id gnat_entity)
363 return PRESENT_GNU_TREE (gnat_entity);
366 /* Make a dummy type corresponding to GNAT_TYPE. */
369 make_dummy_type (Entity_Id gnat_type)
371 Entity_Id gnat_equiv = Gigi_Equivalent_Type (Underlying_Type (gnat_type));
372 tree gnu_type, debug_type;
374 /* If there was no equivalent type (can only happen when just annotating
375 types) or underlying type, go back to the original type. */
377 gnat_equiv = gnat_type;
379 /* If it there already a dummy type, use that one. Else make one. */
380 if (PRESENT_DUMMY_NODE (gnat_equiv))
381 return GET_DUMMY_NODE (gnat_equiv);
383 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
385 gnu_type = make_node (Is_Record_Type (gnat_equiv)
386 ? tree_code_for_record_type (gnat_equiv)
388 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
389 TYPE_DUMMY_P (gnu_type) = 1;
390 TYPE_STUB_DECL (gnu_type)
391 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
392 if (Is_By_Reference_Type (gnat_equiv))
393 TYPE_BY_REFERENCE_P (gnu_type) = 1;
395 SET_DUMMY_NODE (gnat_equiv, gnu_type);
397 /* Create a debug type so that debuggers only see an unspecified type. */
398 if (Needs_Debug_Info (gnat_type))
400 debug_type = make_node (LANG_TYPE);
401 TYPE_NAME (debug_type) = TYPE_NAME (gnu_type);
402 TYPE_ARTIFICIAL (debug_type) = TYPE_ARTIFICIAL (gnu_type);
403 SET_TYPE_DEBUG_TYPE (gnu_type, debug_type);
409 /* Return the dummy type that was made for GNAT_TYPE, if any. */
412 get_dummy_type (Entity_Id gnat_type)
414 return GET_DUMMY_NODE (gnat_type);
417 /* Build dummy fat and thin pointer types whose designated type is specified
418 by GNAT_DESIG_TYPE/GNU_DESIG_TYPE and attach them to the latter. */
421 build_dummy_unc_pointer_types (Entity_Id gnat_desig_type, tree gnu_desig_type)
423 tree gnu_template_type, gnu_ptr_template, gnu_array_type, gnu_ptr_array;
424 tree gnu_fat_type, fields, gnu_object_type;
426 gnu_template_type = make_node (RECORD_TYPE);
427 TYPE_NAME (gnu_template_type) = create_concat_name (gnat_desig_type, "XUB");
428 TYPE_DUMMY_P (gnu_template_type) = 1;
429 gnu_ptr_template = build_pointer_type (gnu_template_type);
431 gnu_array_type = make_node (ENUMERAL_TYPE);
432 TYPE_NAME (gnu_array_type) = create_concat_name (gnat_desig_type, "XUA");
433 TYPE_DUMMY_P (gnu_array_type) = 1;
434 gnu_ptr_array = build_pointer_type (gnu_array_type);
436 gnu_fat_type = make_node (RECORD_TYPE);
437 /* Build a stub DECL to trigger the special processing for fat pointer types
439 TYPE_NAME (gnu_fat_type)
440 = create_type_stub_decl (create_concat_name (gnat_desig_type, "XUP"),
442 fields = create_field_decl (get_identifier ("P_ARRAY"), gnu_ptr_array,
443 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
445 = create_field_decl (get_identifier ("P_BOUNDS"), gnu_ptr_template,
446 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
447 finish_fat_pointer_type (gnu_fat_type, fields);
448 SET_TYPE_UNCONSTRAINED_ARRAY (gnu_fat_type, gnu_desig_type);
449 /* Suppress debug info until after the type is completed. */
450 TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (gnu_fat_type)) = 1;
452 gnu_object_type = make_node (RECORD_TYPE);
453 TYPE_NAME (gnu_object_type) = create_concat_name (gnat_desig_type, "XUT");
454 TYPE_DUMMY_P (gnu_object_type) = 1;
456 TYPE_POINTER_TO (gnu_desig_type) = gnu_fat_type;
457 TYPE_REFERENCE_TO (gnu_desig_type) = gnu_fat_type;
458 TYPE_OBJECT_RECORD_TYPE (gnu_desig_type) = gnu_object_type;
461 /* Return true if we are in the global binding level. */
464 global_bindings_p (void)
466 return force_global || !current_function_decl;
469 /* Enter a new binding level. */
472 gnat_pushlevel (void)
474 struct gnat_binding_level *newlevel = NULL;
476 /* Reuse a struct for this binding level, if there is one. */
477 if (free_binding_level)
479 newlevel = free_binding_level;
480 free_binding_level = free_binding_level->chain;
483 newlevel = ggc_alloc<gnat_binding_level> ();
485 /* Use a free BLOCK, if any; otherwise, allocate one. */
486 if (free_block_chain)
488 newlevel->block = free_block_chain;
489 free_block_chain = BLOCK_CHAIN (free_block_chain);
490 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
493 newlevel->block = make_node (BLOCK);
495 /* Point the BLOCK we just made to its parent. */
496 if (current_binding_level)
497 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
499 BLOCK_VARS (newlevel->block) = NULL_TREE;
500 BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
501 TREE_USED (newlevel->block) = 1;
503 /* Add this level to the front of the chain (stack) of active levels. */
504 newlevel->chain = current_binding_level;
505 newlevel->jmpbuf_decl = NULL_TREE;
506 current_binding_level = newlevel;
509 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
510 and point FNDECL to this BLOCK. */
513 set_current_block_context (tree fndecl)
515 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
516 DECL_INITIAL (fndecl) = current_binding_level->block;
517 set_block_for_group (current_binding_level->block);
520 /* Set the jmpbuf_decl for the current binding level to DECL. */
523 set_block_jmpbuf_decl (tree decl)
525 current_binding_level->jmpbuf_decl = decl;
528 /* Get the jmpbuf_decl, if any, for the current binding level. */
531 get_block_jmpbuf_decl (void)
533 return current_binding_level->jmpbuf_decl;
536 /* Exit a binding level. Set any BLOCK into the current code group. */
541 struct gnat_binding_level *level = current_binding_level;
542 tree block = level->block;
544 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
545 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
547 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
548 are no variables free the block and merge its subblocks into those of its
549 parent block. Otherwise, add it to the list of its parent. */
550 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
552 else if (!BLOCK_VARS (block))
554 BLOCK_SUBBLOCKS (level->chain->block)
555 = block_chainon (BLOCK_SUBBLOCKS (block),
556 BLOCK_SUBBLOCKS (level->chain->block));
557 BLOCK_CHAIN (block) = free_block_chain;
558 free_block_chain = block;
562 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
563 BLOCK_SUBBLOCKS (level->chain->block) = block;
564 TREE_USED (block) = 1;
565 set_block_for_group (block);
568 /* Free this binding structure. */
569 current_binding_level = level->chain;
570 level->chain = free_binding_level;
571 free_binding_level = level;
574 /* Exit a binding level and discard the associated BLOCK. */
579 struct gnat_binding_level *level = current_binding_level;
580 tree block = level->block;
582 BLOCK_CHAIN (block) = free_block_chain;
583 free_block_chain = block;
585 /* Free this binding structure. */
586 current_binding_level = level->chain;
587 level->chain = free_binding_level;
588 free_binding_level = level;
591 /* Set the context of TYPE and its parallel types (if any) to CONTEXT. */
594 gnat_set_type_context (tree type, tree context)
596 tree decl = TYPE_STUB_DECL (type);
598 TYPE_CONTEXT (type) = context;
600 while (decl && DECL_PARALLEL_TYPE (decl))
602 tree parallel_type = DECL_PARALLEL_TYPE (decl);
604 /* Give a context to the parallel types and their stub decl, if any.
605 Some parallel types seems to be present in multiple parallel type
606 chains, so don't mess with their context if they already have one. */
607 if (!TYPE_CONTEXT (parallel_type))
609 if (TYPE_STUB_DECL (parallel_type))
610 DECL_CONTEXT (TYPE_STUB_DECL (parallel_type)) = context;
611 TYPE_CONTEXT (parallel_type) = context;
614 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
618 /* Return the innermost scope, starting at GNAT_NODE, we are be interested in
619 the debug info, or Empty if there is no such scope. If not NULL, set
620 IS_SUBPROGRAM to whether the returned entity is a subprogram. */
623 get_debug_scope (Node_Id gnat_node, bool *is_subprogram)
625 Entity_Id gnat_entity;
628 *is_subprogram = false;
630 if (Nkind (gnat_node) == N_Defining_Identifier
631 || Nkind (gnat_node) == N_Defining_Operator_Symbol)
632 gnat_entity = Scope (gnat_node);
636 while (Present (gnat_entity))
638 switch (Ekind (gnat_entity))
642 if (Present (Protected_Body_Subprogram (gnat_entity)))
643 gnat_entity = Protected_Body_Subprogram (gnat_entity);
645 /* If the scope is a subprogram, then just rely on
646 current_function_decl, so that we don't have to defer
647 anything. This is needed because other places rely on the
648 validity of the DECL_CONTEXT attribute of FUNCTION_DECL nodes. */
650 *is_subprogram = true;
654 case E_Record_Subtype:
658 /* By default, we are not interested in this particular scope: go to
663 gnat_entity = Scope (gnat_entity);
669 /* If N is NULL, set TYPE's context to CONTEXT. Defer this to the processing
673 defer_or_set_type_context (tree type, tree context,
674 struct deferred_decl_context_node *n)
677 add_deferred_type_context (n, type);
679 gnat_set_type_context (type, context);
682 /* Return global_context, but create it first if need be. */
685 get_global_context (void)
690 = build_translation_unit_decl (get_identifier (main_input_filename));
691 debug_hooks->register_main_translation_unit (global_context);
694 return global_context;
697 /* Record DECL as belonging to the current lexical scope and use GNAT_NODE
698 for location information and flag propagation. */
701 gnat_pushdecl (tree decl, Node_Id gnat_node)
703 tree context = NULL_TREE;
704 struct deferred_decl_context_node *deferred_decl_context = NULL;
706 /* If explicitely asked to make DECL global or if it's an imported nested
707 object, short-circuit the regular Scope-based context computation. */
708 if (!((TREE_PUBLIC (decl) && DECL_EXTERNAL (decl)) || force_global == 1))
710 /* Rely on the GNAT scope, or fallback to the current_function_decl if
711 the GNAT scope reached the global scope, if it reached a subprogram
712 or the declaration is a subprogram or a variable (for them we skip
713 intermediate context types because the subprogram body elaboration
714 machinery and the inliner both expect a subprogram context).
716 Falling back to current_function_decl is necessary for implicit
717 subprograms created by gigi, such as the elaboration subprograms. */
718 bool context_is_subprogram = false;
719 const Entity_Id gnat_scope
720 = get_debug_scope (gnat_node, &context_is_subprogram);
722 if (Present (gnat_scope)
723 && !context_is_subprogram
724 && TREE_CODE (decl) != FUNCTION_DECL
725 && TREE_CODE (decl) != VAR_DECL)
726 /* Always assume the scope has not been elaborated, thus defer the
727 context propagation to the time its elaboration will be
729 deferred_decl_context
730 = add_deferred_decl_context (decl, gnat_scope, force_global);
732 /* External declarations (when force_global > 0) may not be in a
734 else if (current_function_decl && force_global == 0)
735 context = current_function_decl;
738 /* If either we are forced to be in global mode or if both the GNAT scope and
739 the current_function_decl did not help in determining the context, use the
741 if (!deferred_decl_context && !context)
742 context = get_global_context ();
744 /* Functions imported in another function are not really nested.
745 For really nested functions mark them initially as needing
746 a static chain for uses of that flag before unnesting;
747 lower_nested_functions will then recompute it. */
748 if (TREE_CODE (decl) == FUNCTION_DECL
749 && !TREE_PUBLIC (decl)
751 && (TREE_CODE (context) == FUNCTION_DECL
752 || decl_function_context (context)))
753 DECL_STATIC_CHAIN (decl) = 1;
755 if (!deferred_decl_context)
756 DECL_CONTEXT (decl) = context;
758 TREE_NO_WARNING (decl) = (No (gnat_node) || Warnings_Off (gnat_node));
760 /* Set the location of DECL and emit a declaration for it. */
761 if (Present (gnat_node) && !renaming_from_instantiation_p (gnat_node))
762 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
764 add_decl_expr (decl, gnat_node);
766 /* Put the declaration on the list. The list of declarations is in reverse
767 order. The list will be reversed later. Put global declarations in the
768 globals list and local ones in the current block. But skip TYPE_DECLs
769 for UNCONSTRAINED_ARRAY_TYPE in both cases, as they will cause trouble
770 with the debugger and aren't needed anyway. */
771 if (!(TREE_CODE (decl) == TYPE_DECL
772 && TREE_CODE (TREE_TYPE (decl)) == UNCONSTRAINED_ARRAY_TYPE))
774 /* External declarations must go to the binding level they belong to.
775 This will make corresponding imported entities are available in the
776 debugger at the proper time. */
777 if (DECL_EXTERNAL (decl)
778 && TREE_CODE (decl) == FUNCTION_DECL
779 && fndecl_built_in_p (decl))
780 vec_safe_push (builtin_decls, decl);
781 else if (global_bindings_p ())
782 vec_safe_push (global_decls, decl);
785 DECL_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
786 BLOCK_VARS (current_binding_level->block) = decl;
790 /* For the declaration of a type, set its name either if it isn't already
791 set or if the previous type name was not derived from a source name.
792 We'd rather have the type named with a real name and all the pointer
793 types to the same object have the same node, except when the names are
794 both derived from source names. */
795 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
797 tree t = TREE_TYPE (decl);
799 /* Array and pointer types aren't tagged types in the C sense so we need
800 to generate a typedef in DWARF for them and make sure it is preserved,
801 unless the type is artificial. */
802 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL)
803 && ((TREE_CODE (t) != ARRAY_TYPE && TREE_CODE (t) != POINTER_TYPE)
804 || DECL_ARTIFICIAL (decl)))
806 /* For array and pointer types, create the DECL_ORIGINAL_TYPE that will
807 generate the typedef in DWARF. Also do that for fat pointer types
808 because, even though they are tagged types in the C sense, they are
809 still XUP types attached to the base array type at this point. */
810 else if (!DECL_ARTIFICIAL (decl)
811 && (TREE_CODE (t) == ARRAY_TYPE
812 || TREE_CODE (t) == POINTER_TYPE
813 || TYPE_IS_FAT_POINTER_P (t)))
815 tree tt = build_variant_type_copy (t);
816 TYPE_NAME (tt) = decl;
817 defer_or_set_type_context (tt,
819 deferred_decl_context);
820 TREE_TYPE (decl) = tt;
822 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL
823 && DECL_ORIGINAL_TYPE (TYPE_NAME (t)))
824 DECL_ORIGINAL_TYPE (decl) = DECL_ORIGINAL_TYPE (TYPE_NAME (t));
826 DECL_ORIGINAL_TYPE (decl) = t;
827 /* Array types need to have a name so that they can be related to
828 their GNAT encodings. */
829 if (TREE_CODE (t) == ARRAY_TYPE && !TYPE_NAME (t))
830 TYPE_NAME (t) = DECL_NAME (decl);
833 else if (TYPE_NAME (t)
834 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL
835 && DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
840 /* Propagate the name to all the variants, this is needed for the type
841 qualifiers machinery to work properly (see check_qualified_type).
842 Also propagate the context to them. Note that it will be propagated
843 to all parallel types too thanks to gnat_set_type_context. */
845 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
846 /* ??? Because of the previous kludge, we can have variants of fat
847 pointer types with different names. */
848 if (!(TYPE_IS_FAT_POINTER_P (t)
850 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
852 TYPE_NAME (t) = decl;
853 defer_or_set_type_context (t,
855 deferred_decl_context);
860 /* Create a record type that contains a SIZE bytes long field of TYPE with a
861 starting bit position so that it is aligned to ALIGN bits, and leaving at
862 least ROOM bytes free before the field. BASE_ALIGN is the alignment the
863 record is guaranteed to get. GNAT_NODE is used for the position of the
864 associated TYPE_DECL. */
867 make_aligning_type (tree type, unsigned int align, tree size,
868 unsigned int base_align, int room, Node_Id gnat_node)
870 /* We will be crafting a record type with one field at a position set to be
871 the next multiple of ALIGN past record'address + room bytes. We use a
872 record placeholder to express record'address. */
873 tree record_type = make_node (RECORD_TYPE);
874 tree record = build0 (PLACEHOLDER_EXPR, record_type);
877 = convert (sizetype, build_unary_op (ADDR_EXPR, NULL_TREE, record));
879 /* The diagram below summarizes the shape of what we manipulate:
881 <--------- pos ---------->
882 { +------------+-------------+-----------------+
883 record =>{ |############| ... | field (type) |
884 { +------------+-------------+-----------------+
885 |<-- room -->|<- voffset ->|<---- size ----->|
888 record_addr vblock_addr
890 Every length is in sizetype bytes there, except "pos" which has to be
891 set as a bit position in the GCC tree for the record. */
892 tree room_st = size_int (room);
893 tree vblock_addr_st = size_binop (PLUS_EXPR, record_addr_st, room_st);
894 tree voffset_st, pos, field;
896 tree name = TYPE_IDENTIFIER (type);
898 name = concat_name (name, "ALIGN");
899 TYPE_NAME (record_type) = name;
901 /* Compute VOFFSET and then POS. The next byte position multiple of some
902 alignment after some address is obtained by "and"ing the alignment minus
903 1 with the two's complement of the address. */
904 voffset_st = size_binop (BIT_AND_EXPR,
905 fold_build1 (NEGATE_EXPR, sizetype, vblock_addr_st),
906 size_int ((align / BITS_PER_UNIT) - 1));
908 /* POS = (ROOM + VOFFSET) * BIT_PER_UNIT, in bitsizetype. */
909 pos = size_binop (MULT_EXPR,
910 convert (bitsizetype,
911 size_binop (PLUS_EXPR, room_st, voffset_st)),
914 /* Craft the GCC record representation. We exceptionally do everything
915 manually here because 1) our generic circuitry is not quite ready to
916 handle the complex position/size expressions we are setting up, 2) we
917 have a strong simplifying factor at hand: we know the maximum possible
918 value of voffset, and 3) we have to set/reset at least the sizes in
919 accordance with this maximum value anyway, as we need them to convey
920 what should be "alloc"ated for this type.
922 Use -1 as the 'addressable' indication for the field to prevent the
923 creation of a bitfield. We don't need one, it would have damaging
924 consequences on the alignment computation, and create_field_decl would
925 make one without this special argument, for instance because of the
926 complex position expression. */
927 field = create_field_decl (get_identifier ("F"), type, record_type, size,
929 TYPE_FIELDS (record_type) = field;
931 SET_TYPE_ALIGN (record_type, base_align);
932 TYPE_USER_ALIGN (record_type) = 1;
934 TYPE_SIZE (record_type)
935 = size_binop (PLUS_EXPR,
936 size_binop (MULT_EXPR, convert (bitsizetype, size),
938 bitsize_int (align + room * BITS_PER_UNIT));
939 TYPE_SIZE_UNIT (record_type)
940 = size_binop (PLUS_EXPR, size,
941 size_int (room + align / BITS_PER_UNIT));
943 SET_TYPE_MODE (record_type, BLKmode);
944 relate_alias_sets (record_type, type, ALIAS_SET_COPY);
946 /* Declare it now since it will never be declared otherwise. This is
947 necessary to ensure that its subtrees are properly marked. */
948 create_type_decl (name, record_type, true, false, gnat_node);
953 /* TYPE is a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE that is being used
954 as the field type of a packed record if IN_RECORD is true, or as the
955 component type of a packed array if IN_RECORD is false. See if we can
956 rewrite it either as a type that has non-BLKmode, which we can pack
957 tighter in the packed record case, or as a smaller type with at most
958 MAX_ALIGN alignment if the value is non-zero. If so, return the new
959 type; if not, return the original type. */
962 make_packable_type (tree type, bool in_record, unsigned int max_align)
964 unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE (type));
965 unsigned HOST_WIDE_INT new_size;
966 unsigned int align = TYPE_ALIGN (type);
967 unsigned int new_align;
969 /* No point in doing anything if the size is zero. */
973 tree new_type = make_node (TREE_CODE (type));
975 /* Copy the name and flags from the old type to that of the new.
976 Note that we rely on the pointer equality created here for
977 TYPE_NAME to look through conversions in various places. */
978 TYPE_NAME (new_type) = TYPE_NAME (type);
979 TYPE_PACKED (new_type) = 1;
980 TYPE_JUSTIFIED_MODULAR_P (new_type) = TYPE_JUSTIFIED_MODULAR_P (type);
981 TYPE_CONTAINS_TEMPLATE_P (new_type) = TYPE_CONTAINS_TEMPLATE_P (type);
982 TYPE_REVERSE_STORAGE_ORDER (new_type) = TYPE_REVERSE_STORAGE_ORDER (type);
983 if (TREE_CODE (type) == RECORD_TYPE)
984 TYPE_PADDING_P (new_type) = TYPE_PADDING_P (type);
986 /* If we are in a record and have a small size, set the alignment to
987 try for an integral mode. Otherwise set it to try for a smaller
988 type with BLKmode. */
989 if (in_record && size <= MAX_FIXED_MODE_SIZE)
991 new_size = ceil_pow2 (size);
992 new_align = MIN (new_size, BIGGEST_ALIGNMENT);
993 SET_TYPE_ALIGN (new_type, new_align);
997 tree type_size = TYPE_ADA_SIZE (type);
998 /* Do not try to shrink the size if the RM size is not constant. */
999 if (TYPE_CONTAINS_TEMPLATE_P (type)
1000 || !tree_fits_uhwi_p (type_size))
1003 /* Round the RM size up to a unit boundary to get the minimal size
1004 for a BLKmode record. Give up if it's already the size and we
1005 don't need to lower the alignment. */
1006 new_size = tree_to_uhwi (type_size);
1007 new_size = (new_size + BITS_PER_UNIT - 1) & -BITS_PER_UNIT;
1008 if (new_size == size && (max_align == 0 || align <= max_align))
1011 new_align = MIN (new_size & -new_size, BIGGEST_ALIGNMENT);
1012 if (max_align > 0 && new_align > max_align)
1013 new_align = max_align;
1014 SET_TYPE_ALIGN (new_type, MIN (align, new_align));
1017 TYPE_USER_ALIGN (new_type) = 1;
1019 /* Now copy the fields, keeping the position and size as we don't want
1020 to change the layout by propagating the packedness downwards. */
1021 tree new_field_list = NULL_TREE;
1022 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1024 tree new_field_type = TREE_TYPE (field);
1025 tree new_field, new_field_size;
1027 if (RECORD_OR_UNION_TYPE_P (new_field_type)
1028 && !TYPE_FAT_POINTER_P (new_field_type)
1029 && tree_fits_uhwi_p (TYPE_SIZE (new_field_type)))
1030 new_field_type = make_packable_type (new_field_type, true, max_align);
1032 /* However, for the last field in a not already packed record type
1033 that is of an aggregate type, we need to use the RM size in the
1034 packable version of the record type, see finish_record_type. */
1035 if (!DECL_CHAIN (field)
1036 && !TYPE_PACKED (type)
1037 && RECORD_OR_UNION_TYPE_P (new_field_type)
1038 && !TYPE_FAT_POINTER_P (new_field_type)
1039 && !TYPE_CONTAINS_TEMPLATE_P (new_field_type)
1040 && TYPE_ADA_SIZE (new_field_type))
1041 new_field_size = TYPE_ADA_SIZE (new_field_type);
1043 new_field_size = DECL_SIZE (field);
1045 /* This is a layout with full representation, alignment and size clauses
1046 so we simply pass 0 as PACKED like gnat_to_gnu_field in this case. */
1048 = create_field_decl (DECL_NAME (field), new_field_type, new_type,
1049 new_field_size, bit_position (field), 0,
1050 !DECL_NONADDRESSABLE_P (field));
1052 DECL_INTERNAL_P (new_field) = DECL_INTERNAL_P (field);
1053 SET_DECL_ORIGINAL_FIELD_TO_FIELD (new_field, field);
1054 if (TREE_CODE (new_type) == QUAL_UNION_TYPE)
1055 DECL_QUALIFIER (new_field) = DECL_QUALIFIER (field);
1057 DECL_CHAIN (new_field) = new_field_list;
1058 new_field_list = new_field;
1061 /* If this is a padding record, we never want to make the size smaller
1062 than what was specified. For QUAL_UNION_TYPE, also copy the size. */
1063 if (TYPE_IS_PADDING_P (type) || TREE_CODE (type) == QUAL_UNION_TYPE)
1065 TYPE_SIZE (new_type) = TYPE_SIZE (type);
1066 TYPE_SIZE_UNIT (new_type) = TYPE_SIZE_UNIT (type);
1071 TYPE_SIZE (new_type) = bitsize_int (new_size);
1072 TYPE_SIZE_UNIT (new_type) = size_int (new_size / BITS_PER_UNIT);
1075 if (!TYPE_CONTAINS_TEMPLATE_P (type))
1076 SET_TYPE_ADA_SIZE (new_type, TYPE_ADA_SIZE (type));
1078 finish_record_type (new_type, nreverse (new_field_list), 2, false);
1079 relate_alias_sets (new_type, type, ALIAS_SET_COPY);
1080 if (gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
1081 SET_TYPE_DEBUG_TYPE (new_type, TYPE_DEBUG_TYPE (type));
1082 else if (TYPE_STUB_DECL (type))
1083 SET_DECL_PARALLEL_TYPE (TYPE_STUB_DECL (new_type),
1084 DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type)));
1086 /* Try harder to get a packable type if necessary, for example
1087 in case the record itself contains a BLKmode field. */
1088 if (in_record && TYPE_MODE (new_type) == BLKmode)
1089 SET_TYPE_MODE (new_type,
1090 mode_for_size_tree (TYPE_SIZE (new_type),
1091 MODE_INT, 1).else_blk ());
1093 /* If neither mode nor size nor alignment shrunk, return the old type. */
1094 if (TYPE_MODE (new_type) == BLKmode && new_size >= size && max_align == 0)
1100 /* Return true if TYPE has an unsigned representation. This needs to be used
1101 when the representation of types whose precision is not equal to their size
1102 is manipulated based on the RM size. */
1105 type_unsigned_for_rm (tree type)
1107 /* This is the common case. */
1108 if (TYPE_UNSIGNED (type))
1111 /* See the E_Signed_Integer_Subtype case of gnat_to_gnu_entity. */
1112 if (TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST
1113 && tree_int_cst_sgn (TYPE_MIN_VALUE (type)) >= 0)
1119 /* Given a type TYPE, return a new type whose size is appropriate for SIZE.
1120 If TYPE is the best type, return it. Otherwise, make a new type. We
1121 only support new integral and pointer types. FOR_BIASED is true if
1122 we are making a biased type. */
1125 make_type_from_size (tree type, tree size_tree, bool for_biased)
1127 unsigned HOST_WIDE_INT size;
1131 /* If size indicates an error, just return TYPE to avoid propagating
1132 the error. Likewise if it's too large to represent. */
1133 if (!size_tree || !tree_fits_uhwi_p (size_tree))
1136 size = tree_to_uhwi (size_tree);
1138 switch (TREE_CODE (type))
1141 /* Do not mess with boolean types that have foreign convention. */
1142 if (TYPE_PRECISION (type) == 1 && TYPE_SIZE (type) == size_tree)
1145 /* ... fall through ... */
1149 biased_p = (TREE_CODE (type) == INTEGER_TYPE
1150 && TYPE_BIASED_REPRESENTATION_P (type));
1152 /* Integer types with precision 0 are forbidden. */
1156 /* Only do something if the type isn't a packed array type and doesn't
1157 already have the proper size and the size isn't too large. */
1158 if (TYPE_IS_PACKED_ARRAY_TYPE_P (type)
1159 || (TYPE_PRECISION (type) == size && biased_p == for_biased)
1160 || size > LONG_LONG_TYPE_SIZE)
1163 biased_p |= for_biased;
1165 /* The type should be an unsigned type if the original type is unsigned
1166 or if the lower bound is constant and non-negative or if the type is
1167 biased, see E_Signed_Integer_Subtype case of gnat_to_gnu_entity. */
1168 if (type_unsigned_for_rm (type) || biased_p)
1169 new_type = make_unsigned_type (size);
1171 new_type = make_signed_type (size);
1172 TREE_TYPE (new_type) = TREE_TYPE (type) ? TREE_TYPE (type) : type;
1173 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_MIN_VALUE (type));
1174 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_MAX_VALUE (type));
1175 /* Copy the name to show that it's essentially the same type and
1176 not a subrange type. */
1177 TYPE_NAME (new_type) = TYPE_NAME (type);
1178 TYPE_BIASED_REPRESENTATION_P (new_type) = biased_p;
1179 SET_TYPE_RM_SIZE (new_type, bitsize_int (size));
1183 /* Do something if this is a fat pointer, in which case we
1184 may need to return the thin pointer. */
1185 if (TYPE_FAT_POINTER_P (type) && size < POINTER_SIZE * 2)
1187 scalar_int_mode p_mode;
1188 if (!int_mode_for_size (size, 0).exists (&p_mode)
1189 || !targetm.valid_pointer_mode (p_mode))
1192 build_pointer_type_for_mode
1193 (TYPE_OBJECT_RECORD_TYPE (TYPE_UNCONSTRAINED_ARRAY (type)),
1199 /* Only do something if this is a thin pointer, in which case we
1200 may need to return the fat pointer. */
1201 if (TYPE_IS_THIN_POINTER_P (type) && size >= POINTER_SIZE * 2)
1203 build_pointer_type (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)));
1213 /* Return true iff the padded types are equivalent. */
1216 pad_type_hasher::equal (pad_type_hash *t1, pad_type_hash *t2)
1220 if (t1->hash != t2->hash)
1226 /* We consider that the padded types are equivalent if they pad the same type
1227 and have the same size, alignment, RM size and storage order. Taking the
1228 mode into account is redundant since it is determined by the others. */
1230 TREE_TYPE (TYPE_FIELDS (type1)) == TREE_TYPE (TYPE_FIELDS (type2))
1231 && TYPE_SIZE (type1) == TYPE_SIZE (type2)
1232 && TYPE_ALIGN (type1) == TYPE_ALIGN (type2)
1233 && TYPE_ADA_SIZE (type1) == TYPE_ADA_SIZE (type2)
1234 && TYPE_REVERSE_STORAGE_ORDER (type1) == TYPE_REVERSE_STORAGE_ORDER (type2);
1237 /* Compute the hash value for the padded TYPE. */
1240 hash_pad_type (tree type)
1245 = iterative_hash_object (TYPE_HASH (TREE_TYPE (TYPE_FIELDS (type))), 0);
1246 hashcode = iterative_hash_expr (TYPE_SIZE (type), hashcode);
1247 hashcode = iterative_hash_hashval_t (TYPE_ALIGN (type), hashcode);
1248 hashcode = iterative_hash_expr (TYPE_ADA_SIZE (type), hashcode);
1253 /* Look up the padded TYPE in the hash table and return its canonical version
1254 if it exists; otherwise, insert it into the hash table. */
1257 canonicalize_pad_type (tree type)
1259 const hashval_t hashcode = hash_pad_type (type);
1260 struct pad_type_hash in, *h, **slot;
1264 slot = pad_type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
1268 h = ggc_alloc<pad_type_hash> ();
1277 /* Ensure that TYPE has SIZE and ALIGN. Make and return a new padded type
1278 if needed. We have already verified that SIZE and ALIGN are large enough.
1279 GNAT_ENTITY is used to name the resulting record and to issue a warning.
1280 IS_COMPONENT_TYPE is true if this is being done for the component type of
1281 an array. IS_USER_TYPE is true if the original type needs to be completed.
1282 DEFINITION is true if this type is being defined. SET_RM_SIZE is true if
1283 the RM size of the resulting type is to be set to SIZE too; in this case,
1284 the padded type is canonicalized before being returned. */
1287 maybe_pad_type (tree type, tree size, unsigned int align,
1288 Entity_Id gnat_entity, bool is_component_type,
1289 bool is_user_type, bool definition, bool set_rm_size)
1291 tree orig_size = TYPE_SIZE (type);
1292 unsigned int orig_align = TYPE_ALIGN (type);
1295 /* If TYPE is a padded type, see if it agrees with any size and alignment
1296 we were given. If so, return the original type. Otherwise, strip
1297 off the padding, since we will either be returning the inner type
1298 or repadding it. If no size or alignment is specified, use that of
1299 the original padded type. */
1300 if (TYPE_IS_PADDING_P (type))
1303 || operand_equal_p (round_up (size, orig_align), orig_size, 0))
1304 && (align == 0 || align == orig_align))
1312 type = TREE_TYPE (TYPE_FIELDS (type));
1313 orig_size = TYPE_SIZE (type);
1314 orig_align = TYPE_ALIGN (type);
1317 /* If the size is either not being changed or is being made smaller (which
1318 is not done here and is only valid for bitfields anyway), show the size
1319 isn't changing. Likewise, clear the alignment if it isn't being
1320 changed. Then return if we aren't doing anything. */
1322 && (operand_equal_p (size, orig_size, 0)
1323 || (TREE_CODE (orig_size) == INTEGER_CST
1324 && tree_int_cst_lt (size, orig_size))))
1327 if (align == orig_align)
1330 if (align == 0 && !size)
1333 /* If requested, complete the original type and give it a name. */
1335 create_type_decl (get_entity_name (gnat_entity), type,
1336 !Comes_From_Source (gnat_entity),
1338 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1339 && DECL_IGNORED_P (TYPE_NAME (type))),
1342 /* We used to modify the record in place in some cases, but that could
1343 generate incorrect debugging information. So make a new record
1345 record = make_node (RECORD_TYPE);
1346 TYPE_PADDING_P (record) = 1;
1348 /* ??? Padding types around packed array implementation types will be
1349 considered as root types in the array descriptor language hook (see
1350 gnat_get_array_descr_info). Give them the original packed array type
1351 name so that the one coming from sources appears in the debugging
1353 if (TYPE_IMPL_PACKED_ARRAY_P (type)
1354 && TYPE_ORIGINAL_PACKED_ARRAY (type)
1355 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
1356 TYPE_NAME (record) = TYPE_NAME (TYPE_ORIGINAL_PACKED_ARRAY (type));
1357 else if (Present (gnat_entity))
1358 TYPE_NAME (record) = create_concat_name (gnat_entity, "PAD");
1360 SET_TYPE_ALIGN (record, align ? align : orig_align);
1361 TYPE_SIZE (record) = size ? size : orig_size;
1362 TYPE_SIZE_UNIT (record)
1363 = convert (sizetype,
1364 size_binop (CEIL_DIV_EXPR, TYPE_SIZE (record),
1365 bitsize_unit_node));
1367 /* If we are changing the alignment and the input type is a record with
1368 BLKmode and a small constant size, try to make a form that has an
1369 integral mode. This might allow the padding record to also have an
1370 integral mode, which will be much more efficient. There is no point
1371 in doing so if a size is specified unless it is also a small constant
1372 size and it is incorrect to do so if we cannot guarantee that the mode
1373 will be naturally aligned since the field must always be addressable.
1375 ??? This might not always be a win when done for a stand-alone object:
1376 since the nominal and the effective type of the object will now have
1377 different modes, a VIEW_CONVERT_EXPR will be required for converting
1378 between them and it might be hard to overcome afterwards, including
1379 at the RTL level when the stand-alone object is accessed as a whole. */
1381 && RECORD_OR_UNION_TYPE_P (type)
1382 && TYPE_MODE (type) == BLKmode
1383 && !TYPE_BY_REFERENCE_P (type)
1384 && TREE_CODE (orig_size) == INTEGER_CST
1385 && !TREE_OVERFLOW (orig_size)
1386 && compare_tree_int (orig_size, MAX_FIXED_MODE_SIZE) <= 0
1388 || (TREE_CODE (size) == INTEGER_CST
1389 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) <= 0)))
1391 tree packable_type = make_packable_type (type, true);
1392 if (TYPE_MODE (packable_type) != BLKmode
1393 && align >= TYPE_ALIGN (packable_type))
1394 type = packable_type;
1397 /* Now create the field with the original size. */
1398 field = create_field_decl (get_identifier ("F"), type, record, orig_size,
1399 bitsize_zero_node, 0, 1);
1400 DECL_INTERNAL_P (field) = 1;
1402 /* We will output additional debug info manually below. */
1403 finish_record_type (record, field, 1, false);
1405 /* Set the RM size if requested. */
1408 SET_TYPE_ADA_SIZE (record, size ? size : orig_size);
1410 /* If the padded type is complete and has constant size, we canonicalize
1411 it by means of the hash table. This is consistent with the language
1412 semantics and ensures that gigi and the middle-end have a common view
1413 of these padded types. */
1414 if (TREE_CONSTANT (TYPE_SIZE (record)))
1416 tree canonical = canonicalize_pad_type (record);
1417 if (canonical != record)
1425 if (gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
1426 SET_TYPE_DEBUG_TYPE (record, maybe_debug_type (type));
1428 /* Unless debugging information isn't being written for the input type,
1429 write a record that shows what we are a subtype of and also make a
1430 variable that indicates our size, if still variable. */
1431 if (TREE_CODE (orig_size) != INTEGER_CST
1432 && TYPE_NAME (record)
1434 && !(TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1435 && DECL_IGNORED_P (TYPE_NAME (type))))
1437 tree name = TYPE_IDENTIFIER (record);
1438 tree size_unit = TYPE_SIZE_UNIT (record);
1440 /* A variable that holds the size is required even with no encoding since
1441 it will be referenced by debugging information attributes. At global
1442 level, we need a single variable across all translation units. */
1444 && TREE_CODE (size) != INTEGER_CST
1445 && (definition || global_bindings_p ()))
1447 /* Whether or not gnat_entity comes from source, this XVZ variable is
1448 is a compilation artifact. */
1450 = create_var_decl (concat_name (name, "XVZ"), NULL_TREE, sizetype,
1451 size_unit, true, global_bindings_p (),
1452 !definition && global_bindings_p (), false,
1453 false, true, true, NULL, gnat_entity);
1454 TYPE_SIZE_UNIT (record) = size_unit;
1457 /* There is no need to show what we are a subtype of when outputting as
1458 few encodings as possible: regular debugging infomation makes this
1460 if (gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
1462 tree marker = make_node (RECORD_TYPE);
1463 tree orig_name = TYPE_IDENTIFIER (type);
1465 TYPE_NAME (marker) = concat_name (name, "XVS");
1466 finish_record_type (marker,
1467 create_field_decl (orig_name,
1468 build_reference_type (type),
1469 marker, NULL_TREE, NULL_TREE,
1472 TYPE_SIZE_UNIT (marker) = size_unit;
1474 add_parallel_type (record, marker);
1479 /* If a simple size was explicitly given, maybe issue a warning. */
1481 || TREE_CODE (size) == COND_EXPR
1482 || TREE_CODE (size) == MAX_EXPR
1483 || No (gnat_entity))
1486 /* But don't do it if we are just annotating types and the type is tagged or
1487 concurrent, since these types aren't fully laid out in this mode. */
1488 if (type_annotate_only)
1492 ? Component_Type (gnat_entity) : Etype (gnat_entity);
1494 if (Is_Tagged_Type (gnat_type) || Is_Concurrent_Type (gnat_type))
1498 /* Take the original size as the maximum size of the input if there was an
1499 unconstrained record involved and round it up to the specified alignment,
1500 if one was specified, but only for aggregate types. */
1501 if (CONTAINS_PLACEHOLDER_P (orig_size))
1502 orig_size = max_size (orig_size, true);
1504 if (align && AGGREGATE_TYPE_P (type))
1505 orig_size = round_up (orig_size, align);
1507 if (!operand_equal_p (size, orig_size, 0)
1508 && !(TREE_CODE (size) == INTEGER_CST
1509 && TREE_CODE (orig_size) == INTEGER_CST
1510 && (TREE_OVERFLOW (size)
1511 || TREE_OVERFLOW (orig_size)
1512 || tree_int_cst_lt (size, orig_size))))
1514 Node_Id gnat_error_node;
1516 /* For a packed array, post the message on the original array type. */
1517 if (Is_Packed_Array_Impl_Type (gnat_entity))
1518 gnat_entity = Original_Array_Type (gnat_entity);
1520 if ((Ekind (gnat_entity) == E_Component
1521 || Ekind (gnat_entity) == E_Discriminant)
1522 && Present (Component_Clause (gnat_entity)))
1523 gnat_error_node = Last_Bit (Component_Clause (gnat_entity));
1524 else if (Has_Size_Clause (gnat_entity))
1525 gnat_error_node = Expression (Size_Clause (gnat_entity));
1526 else if (Has_Object_Size_Clause (gnat_entity))
1527 gnat_error_node = Expression (Object_Size_Clause (gnat_entity));
1529 gnat_error_node = Empty;
1531 /* Generate message only for entities that come from source, since
1532 if we have an entity created by expansion, the message will be
1533 generated for some other corresponding source entity. */
1534 if (Comes_From_Source (gnat_entity))
1536 if (is_component_type)
1537 post_error_ne_tree ("component of& padded{ by ^ bits}?",
1538 gnat_entity, gnat_entity,
1539 size_diffop (size, orig_size));
1540 else if (Present (gnat_error_node))
1541 post_error_ne_tree ("{^ }bits of & unused?",
1542 gnat_error_node, gnat_entity,
1543 size_diffop (size, orig_size));
1550 /* Return true if padded TYPE was built with an RM size. */
1553 pad_type_has_rm_size (tree type)
1555 /* This is required for the lookup. */
1556 if (!TREE_CONSTANT (TYPE_SIZE (type)))
1559 const hashval_t hashcode = hash_pad_type (type);
1560 struct pad_type_hash in, *h;
1564 h = pad_type_hash_table->find_with_hash (&in, hashcode);
1566 /* The types built with an RM size are the canonicalized ones. */
1567 return h && h->type == type;
1570 /* Return a copy of the padded TYPE but with reverse storage order. */
1573 set_reverse_storage_order_on_pad_type (tree type)
1577 /* If the inner type is not scalar then the function does nothing. */
1578 tree inner_type = TREE_TYPE (TYPE_FIELDS (type));
1579 gcc_assert (!AGGREGATE_TYPE_P (inner_type)
1580 && !VECTOR_TYPE_P (inner_type));
1583 /* This is required for the canonicalization. */
1584 gcc_assert (TREE_CONSTANT (TYPE_SIZE (type)));
1586 tree field = copy_node (TYPE_FIELDS (type));
1587 type = copy_type (type);
1588 DECL_CONTEXT (field) = type;
1589 TYPE_FIELDS (type) = field;
1590 TYPE_REVERSE_STORAGE_ORDER (type) = 1;
1591 return canonicalize_pad_type (type);
1594 /* Relate the alias sets of GNU_NEW_TYPE and GNU_OLD_TYPE according to OP.
1595 If this is a multi-dimensional array type, do this recursively.
1598 - ALIAS_SET_COPY: the new set is made a copy of the old one.
1599 - ALIAS_SET_SUPERSET: the new set is made a superset of the old one.
1600 - ALIAS_SET_SUBSET: the new set is made a subset of the old one. */
1603 relate_alias_sets (tree gnu_new_type, tree gnu_old_type, enum alias_set_op op)
1605 /* Remove any padding from GNU_OLD_TYPE. It doesn't matter in the case
1606 of a one-dimensional array, since the padding has the same alias set
1607 as the field type, but if it's a multi-dimensional array, we need to
1608 see the inner types. */
1609 while (TREE_CODE (gnu_old_type) == RECORD_TYPE
1610 && (TYPE_JUSTIFIED_MODULAR_P (gnu_old_type)
1611 || TYPE_PADDING_P (gnu_old_type)))
1612 gnu_old_type = TREE_TYPE (TYPE_FIELDS (gnu_old_type));
1614 /* Unconstrained array types are deemed incomplete and would thus be given
1615 alias set 0. Retrieve the underlying array type. */
1616 if (TREE_CODE (gnu_old_type) == UNCONSTRAINED_ARRAY_TYPE)
1618 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_old_type))));
1619 if (TREE_CODE (gnu_new_type) == UNCONSTRAINED_ARRAY_TYPE)
1621 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_new_type))));
1623 if (TREE_CODE (gnu_new_type) == ARRAY_TYPE
1624 && TREE_CODE (TREE_TYPE (gnu_new_type)) == ARRAY_TYPE
1625 && TYPE_MULTI_ARRAY_P (TREE_TYPE (gnu_new_type)))
1626 relate_alias_sets (TREE_TYPE (gnu_new_type), TREE_TYPE (gnu_old_type), op);
1630 case ALIAS_SET_COPY:
1631 /* The alias set shouldn't be copied between array types with different
1632 aliasing settings because this can break the aliasing relationship
1633 between the array type and its element type. */
1634 if (flag_checking || flag_strict_aliasing)
1635 gcc_assert (!(TREE_CODE (gnu_new_type) == ARRAY_TYPE
1636 && TREE_CODE (gnu_old_type) == ARRAY_TYPE
1637 && TYPE_NONALIASED_COMPONENT (gnu_new_type)
1638 != TYPE_NONALIASED_COMPONENT (gnu_old_type)));
1640 TYPE_ALIAS_SET (gnu_new_type) = get_alias_set (gnu_old_type);
1643 case ALIAS_SET_SUBSET:
1644 case ALIAS_SET_SUPERSET:
1646 alias_set_type old_set = get_alias_set (gnu_old_type);
1647 alias_set_type new_set = get_alias_set (gnu_new_type);
1649 /* Do nothing if the alias sets conflict. This ensures that we
1650 never call record_alias_subset several times for the same pair
1651 or at all for alias set 0. */
1652 if (!alias_sets_conflict_p (old_set, new_set))
1654 if (op == ALIAS_SET_SUBSET)
1655 record_alias_subset (old_set, new_set);
1657 record_alias_subset (new_set, old_set);
1666 record_component_aliases (gnu_new_type);
1669 /* Record TYPE as a builtin type for Ada. NAME is the name of the type.
1670 ARTIFICIAL_P is true if the type was generated by the compiler. */
1673 record_builtin_type (const char *name, tree type, bool artificial_p)
1675 tree type_decl = build_decl (input_location,
1676 TYPE_DECL, get_identifier (name), type);
1677 DECL_ARTIFICIAL (type_decl) = artificial_p;
1678 TYPE_ARTIFICIAL (type) = artificial_p;
1679 gnat_pushdecl (type_decl, Empty);
1681 if (debug_hooks->type_decl)
1682 debug_hooks->type_decl (type_decl, false);
1685 /* Finish constructing the character type CHAR_TYPE.
1687 In Ada character types are enumeration types and, as a consequence, are
1688 represented in the front-end by integral types holding the positions of
1689 the enumeration values as defined by the language, which means that the
1690 integral types are unsigned.
1692 Unfortunately the signedness of 'char' in C is implementation-defined
1693 and GCC even has the option -f[un]signed-char to toggle it at run time.
1694 Since GNAT's philosophy is to be compatible with C by default, to wit
1695 Interfaces.C.char is defined as a mere copy of Character, we may need
1696 to declare character types as signed types in GENERIC and generate the
1697 necessary adjustments to make them behave as unsigned types.
1699 The overall strategy is as follows: if 'char' is unsigned, do nothing;
1700 if 'char' is signed, translate character types of CHAR_TYPE_SIZE and
1701 character subtypes with RM_Size = Esize = CHAR_TYPE_SIZE into signed
1702 types. The idea is to ensure that the bit pattern contained in the
1703 Esize'd objects is not changed, even though the numerical value will
1704 be interpreted differently depending on the signedness. */
1707 finish_character_type (tree char_type)
1709 if (TYPE_UNSIGNED (char_type))
1712 /* Make a copy of a generic unsigned version since we'll modify it. */
1713 tree unsigned_char_type
1714 = (char_type == char_type_node
1715 ? unsigned_char_type_node
1716 : copy_type (gnat_unsigned_type_for (char_type)));
1718 /* Create an unsigned version of the type and set it as debug type. */
1719 TYPE_NAME (unsigned_char_type) = TYPE_NAME (char_type);
1720 TYPE_STRING_FLAG (unsigned_char_type) = TYPE_STRING_FLAG (char_type);
1721 TYPE_ARTIFICIAL (unsigned_char_type) = TYPE_ARTIFICIAL (char_type);
1722 SET_TYPE_DEBUG_TYPE (char_type, unsigned_char_type);
1724 /* If this is a subtype, make the debug type a subtype of the debug type
1725 of the base type and convert literal RM bounds to unsigned. */
1726 if (TREE_TYPE (char_type))
1728 tree base_unsigned_char_type = TYPE_DEBUG_TYPE (TREE_TYPE (char_type));
1729 tree min_value = TYPE_RM_MIN_VALUE (char_type);
1730 tree max_value = TYPE_RM_MAX_VALUE (char_type);
1732 if (TREE_CODE (min_value) == INTEGER_CST)
1733 min_value = fold_convert (base_unsigned_char_type, min_value);
1734 if (TREE_CODE (max_value) == INTEGER_CST)
1735 max_value = fold_convert (base_unsigned_char_type, max_value);
1737 TREE_TYPE (unsigned_char_type) = base_unsigned_char_type;
1738 SET_TYPE_RM_MIN_VALUE (unsigned_char_type, min_value);
1739 SET_TYPE_RM_MAX_VALUE (unsigned_char_type, max_value);
1742 /* Adjust the RM bounds of the original type to unsigned; that's especially
1743 important for types since they are implicit in this case. */
1744 SET_TYPE_RM_MIN_VALUE (char_type, TYPE_MIN_VALUE (unsigned_char_type));
1745 SET_TYPE_RM_MAX_VALUE (char_type, TYPE_MAX_VALUE (unsigned_char_type));
1748 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1749 finish constructing the record type as a fat pointer type. */
1752 finish_fat_pointer_type (tree record_type, tree field_list)
1754 /* Make sure we can put it into a register. */
1755 if (STRICT_ALIGNMENT)
1756 SET_TYPE_ALIGN (record_type, MIN (BIGGEST_ALIGNMENT, 2 * POINTER_SIZE));
1758 /* Show what it really is. */
1759 TYPE_FAT_POINTER_P (record_type) = 1;
1761 /* Do not emit debug info for it since the types of its fields may still be
1762 incomplete at this point. */
1763 finish_record_type (record_type, field_list, 0, false);
1765 /* Force type_contains_placeholder_p to return true on it. Although the
1766 PLACEHOLDER_EXPRs are referenced only indirectly, this isn't a pointer
1767 type but the representation of the unconstrained array. */
1768 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (record_type) = 2;
1771 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1772 finish constructing the record or union type. If REP_LEVEL is zero, this
1773 record has no representation clause and so will be entirely laid out here.
1774 If REP_LEVEL is one, this record has a representation clause and has been
1775 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
1776 this record is derived from a parent record and thus inherits its layout;
1777 only make a pass on the fields to finalize them. DEBUG_INFO_P is true if
1778 additional debug info needs to be output for this type. */
1781 finish_record_type (tree record_type, tree field_list, int rep_level,
1784 enum tree_code code = TREE_CODE (record_type);
1785 tree name = TYPE_IDENTIFIER (record_type);
1786 tree ada_size = bitsize_zero_node;
1787 tree size = bitsize_zero_node;
1788 bool had_size = TYPE_SIZE (record_type) != 0;
1789 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
1790 bool had_align = TYPE_ALIGN (record_type) != 0;
1793 TYPE_FIELDS (record_type) = field_list;
1795 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
1796 generate debug info and have a parallel type. */
1797 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
1799 /* Globally initialize the record first. If this is a rep'ed record,
1800 that just means some initializations; otherwise, layout the record. */
1803 SET_TYPE_ALIGN (record_type, MAX (BITS_PER_UNIT,
1804 TYPE_ALIGN (record_type)));
1807 TYPE_SIZE_UNIT (record_type) = size_zero_node;
1810 TYPE_SIZE (record_type) = bitsize_zero_node;
1812 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
1813 out just like a UNION_TYPE, since the size will be fixed. */
1814 else if (code == QUAL_UNION_TYPE)
1819 /* Ensure there isn't a size already set. There can be in an error
1820 case where there is a rep clause but all fields have errors and
1821 no longer have a position. */
1822 TYPE_SIZE (record_type) = 0;
1824 /* Ensure we use the traditional GCC layout for bitfields when we need
1825 to pack the record type or have a representation clause. The other
1826 possible layout (Microsoft C compiler), if available, would prevent
1827 efficient packing in almost all cases. */
1828 #ifdef TARGET_MS_BITFIELD_LAYOUT
1829 if (TARGET_MS_BITFIELD_LAYOUT && TYPE_PACKED (record_type))
1830 decl_attributes (&record_type,
1831 tree_cons (get_identifier ("gcc_struct"),
1832 NULL_TREE, NULL_TREE),
1833 ATTR_FLAG_TYPE_IN_PLACE);
1836 layout_type (record_type);
1839 /* At this point, the position and size of each field is known. It was
1840 either set before entry by a rep clause, or by laying out the type above.
1842 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
1843 to compute the Ada size; the GCC size and alignment (for rep'ed records
1844 that are not padding types); and the mode (for rep'ed records). We also
1845 clear the DECL_BIT_FIELD indication for the cases we know have not been
1846 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
1848 if (code == QUAL_UNION_TYPE)
1849 field_list = nreverse (field_list);
1851 for (field = field_list; field; field = DECL_CHAIN (field))
1853 tree type = TREE_TYPE (field);
1854 tree pos = bit_position (field);
1855 tree this_size = DECL_SIZE (field);
1858 if (RECORD_OR_UNION_TYPE_P (type)
1859 && !TYPE_FAT_POINTER_P (type)
1860 && !TYPE_CONTAINS_TEMPLATE_P (type)
1861 && TYPE_ADA_SIZE (type))
1862 this_ada_size = TYPE_ADA_SIZE (type);
1864 this_ada_size = this_size;
1866 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
1867 if (DECL_BIT_FIELD (field)
1868 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
1870 unsigned int align = TYPE_ALIGN (type);
1872 /* In the general case, type alignment is required. */
1873 if (value_factor_p (pos, align))
1875 /* The enclosing record type must be sufficiently aligned.
1876 Otherwise, if no alignment was specified for it and it
1877 has been laid out already, bump its alignment to the
1878 desired one if this is compatible with its size and
1879 maximum alignment, if any. */
1880 if (TYPE_ALIGN (record_type) >= align)
1882 SET_DECL_ALIGN (field, MAX (DECL_ALIGN (field), align));
1883 DECL_BIT_FIELD (field) = 0;
1887 && value_factor_p (TYPE_SIZE (record_type), align)
1888 && (!TYPE_MAX_ALIGN (record_type)
1889 || TYPE_MAX_ALIGN (record_type) >= align))
1891 SET_TYPE_ALIGN (record_type, align);
1892 SET_DECL_ALIGN (field, MAX (DECL_ALIGN (field), align));
1893 DECL_BIT_FIELD (field) = 0;
1897 /* In the non-strict alignment case, only byte alignment is. */
1898 if (!STRICT_ALIGNMENT
1899 && DECL_BIT_FIELD (field)
1900 && value_factor_p (pos, BITS_PER_UNIT))
1901 DECL_BIT_FIELD (field) = 0;
1904 /* Clear DECL_BIT_FIELD_TYPE for a variant part at offset 0, it's simply
1905 not supported by the DECL_BIT_FIELD_REPRESENTATIVE machinery because
1906 the variant part is always the last field in the list. */
1907 if (DECL_INTERNAL_P (field)
1908 && TREE_CODE (TREE_TYPE (field)) == QUAL_UNION_TYPE
1909 && integer_zerop (pos))
1910 DECL_BIT_FIELD_TYPE (field) = NULL_TREE;
1912 /* If we still have DECL_BIT_FIELD set at this point, we know that the
1913 field is technically not addressable. Except that it can actually
1914 be addressed if it is BLKmode and happens to be properly aligned. */
1915 if (DECL_BIT_FIELD (field)
1916 && !(DECL_MODE (field) == BLKmode
1917 && value_factor_p (pos, BITS_PER_UNIT)))
1918 DECL_NONADDRESSABLE_P (field) = 1;
1920 /* A type must be as aligned as its most aligned field that is not
1921 a bit-field. But this is already enforced by layout_type. */
1922 if (rep_level > 0 && !DECL_BIT_FIELD (field))
1923 SET_TYPE_ALIGN (record_type,
1924 MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field)));
1929 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
1930 size = size_binop (MAX_EXPR, size, this_size);
1933 case QUAL_UNION_TYPE:
1935 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1936 this_ada_size, ada_size);
1937 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1942 /* Since we know here that all fields are sorted in order of
1943 increasing bit position, the size of the record is one
1944 higher than the ending bit of the last field processed
1945 unless we have a rep clause, since in that case we might
1946 have a field outside a QUAL_UNION_TYPE that has a higher ending
1947 position. So use a MAX in that case. Also, if this field is a
1948 QUAL_UNION_TYPE, we need to take into account the previous size in
1949 the case of empty variants. */
1951 = merge_sizes (ada_size, pos, this_ada_size,
1952 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1954 = merge_sizes (size, pos, this_size,
1955 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1963 if (code == QUAL_UNION_TYPE)
1964 nreverse (field_list);
1966 /* We need to set the regular sizes if REP_LEVEL is one. */
1969 /* If this is a padding record, we never want to make the size smaller
1970 than what was specified in it, if any. */
1971 if (TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
1972 size = TYPE_SIZE (record_type);
1974 tree size_unit = had_size_unit
1975 ? TYPE_SIZE_UNIT (record_type)
1976 : convert (sizetype,
1977 size_binop (CEIL_DIV_EXPR, size,
1978 bitsize_unit_node));
1979 const unsigned int align = TYPE_ALIGN (record_type);
1981 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
1982 TYPE_SIZE_UNIT (record_type)
1983 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
1986 /* We need to set the Ada size if REP_LEVEL is zero or one. */
1989 /* Now set any of the values we've just computed that apply. */
1990 if (!TYPE_FAT_POINTER_P (record_type)
1991 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
1992 SET_TYPE_ADA_SIZE (record_type, ada_size);
1995 /* We need to set the mode if REP_LEVEL is one or two. */
1998 compute_record_mode (record_type);
1999 finish_bitfield_layout (record_type);
2002 /* Reset the TYPE_MAX_ALIGN field since it's private to gigi. */
2003 TYPE_MAX_ALIGN (record_type) = 0;
2006 rest_of_record_type_compilation (record_type);
2009 /* Append PARALLEL_TYPE on the chain of parallel types of TYPE. If
2010 PARRALEL_TYPE has no context and its computation is not deferred yet, also
2011 propagate TYPE's context to PARALLEL_TYPE's or defer its propagation to the
2012 moment TYPE will get a context. */
2015 add_parallel_type (tree type, tree parallel_type)
2017 tree decl = TYPE_STUB_DECL (type);
2019 while (DECL_PARALLEL_TYPE (decl))
2020 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
2022 SET_DECL_PARALLEL_TYPE (decl, parallel_type);
2024 /* If PARALLEL_TYPE already has a context, we are done. */
2025 if (TYPE_CONTEXT (parallel_type))
2028 /* Otherwise, try to get one from TYPE's context. If so, simply propagate
2029 it to PARALLEL_TYPE. */
2030 if (TYPE_CONTEXT (type))
2031 gnat_set_type_context (parallel_type, TYPE_CONTEXT (type));
2033 /* Otherwise TYPE has not context yet. We know it will have one thanks to
2034 gnat_pushdecl and then its context will be propagated to PARALLEL_TYPE,
2035 so we have nothing to do in this case. */
2038 /* Return true if TYPE has a parallel type. */
2041 has_parallel_type (tree type)
2043 tree decl = TYPE_STUB_DECL (type);
2045 return DECL_PARALLEL_TYPE (decl) != NULL_TREE;
2048 /* Wrap up compilation of RECORD_TYPE, i.e. output additional debug info
2049 associated with it. It need not be invoked directly in most cases as
2050 finish_record_type takes care of doing so. */
2053 rest_of_record_type_compilation (tree record_type)
2055 bool var_size = false;
2058 /* If this is a padded type, the bulk of the debug info has already been
2059 generated for the field's type. */
2060 if (TYPE_IS_PADDING_P (record_type))
2063 /* If the type already has a parallel type (XVS type), then we're done. */
2064 if (has_parallel_type (record_type))
2067 for (field = TYPE_FIELDS (record_type); field; field = DECL_CHAIN (field))
2069 /* We need to make an XVE/XVU record if any field has variable size,
2070 whether or not the record does. For example, if we have a union,
2071 it may be that all fields, rounded up to the alignment, have the
2072 same size, in which case we'll use that size. But the debug
2073 output routines (except Dwarf2) won't be able to output the fields,
2074 so we need to make the special record. */
2075 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
2076 /* If a field has a non-constant qualifier, the record will have
2077 variable size too. */
2078 || (TREE_CODE (record_type) == QUAL_UNION_TYPE
2079 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
2086 /* If this record type is of variable size, make a parallel record type that
2087 will tell the debugger how the former is laid out (see exp_dbug.ads). */
2088 if (var_size && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
2090 tree new_record_type
2091 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
2092 ? UNION_TYPE : TREE_CODE (record_type));
2093 tree orig_name = TYPE_IDENTIFIER (record_type), new_name;
2094 tree last_pos = bitsize_zero_node;
2095 tree old_field, prev_old_field = NULL_TREE;
2098 = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
2100 TYPE_NAME (new_record_type) = new_name;
2101 SET_TYPE_ALIGN (new_record_type, BIGGEST_ALIGNMENT);
2102 TYPE_STUB_DECL (new_record_type)
2103 = create_type_stub_decl (new_name, new_record_type);
2104 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
2105 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
2106 gnat_pushdecl (TYPE_STUB_DECL (new_record_type), Empty);
2107 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
2108 TYPE_SIZE_UNIT (new_record_type)
2109 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
2111 /* Now scan all the fields, replacing each field with a new field
2112 corresponding to the new encoding. */
2113 for (old_field = TYPE_FIELDS (record_type); old_field;
2114 old_field = DECL_CHAIN (old_field))
2116 tree field_type = TREE_TYPE (old_field);
2117 tree field_name = DECL_NAME (old_field);
2118 tree curpos = fold_bit_position (old_field);
2119 tree pos, new_field;
2121 unsigned int align = 0;
2123 /* See how the position was modified from the last position.
2125 There are two basic cases we support: a value was added
2126 to the last position or the last position was rounded to
2127 a boundary and they something was added. Check for the
2128 first case first. If not, see if there is any evidence
2129 of rounding. If so, round the last position and retry.
2131 If this is a union, the position can be taken as zero. */
2132 if (TREE_CODE (new_record_type) == UNION_TYPE)
2133 pos = bitsize_zero_node;
2135 pos = compute_related_constant (curpos, last_pos);
2138 && TREE_CODE (curpos) == MULT_EXPR
2139 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1)))
2141 tree offset = TREE_OPERAND (curpos, 0);
2142 align = tree_to_uhwi (TREE_OPERAND (curpos, 1));
2143 align = scale_by_factor_of (offset, align);
2144 last_pos = round_up (last_pos, align);
2145 pos = compute_related_constant (curpos, last_pos);
2148 && TREE_CODE (curpos) == PLUS_EXPR
2149 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1))
2150 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
2152 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1)))
2154 tree offset = TREE_OPERAND (TREE_OPERAND (curpos, 0), 0);
2155 unsigned HOST_WIDE_INT addend
2156 = tree_to_uhwi (TREE_OPERAND (curpos, 1));
2158 = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1));
2159 align = scale_by_factor_of (offset, align);
2160 align = MIN (align, addend & -addend);
2161 last_pos = round_up (last_pos, align);
2162 pos = compute_related_constant (curpos, last_pos);
2164 else if (potential_alignment_gap (prev_old_field, old_field, pos))
2166 align = TYPE_ALIGN (field_type);
2167 last_pos = round_up (last_pos, align);
2168 pos = compute_related_constant (curpos, last_pos);
2171 /* If we can't compute a position, set it to zero.
2173 ??? We really should abort here, but it's too much work
2174 to get this correct for all cases. */
2176 pos = bitsize_zero_node;
2178 /* See if this type is variable-sized and make a pointer type
2179 and indicate the indirection if so. Beware that the debug
2180 back-end may adjust the position computed above according
2181 to the alignment of the field type, i.e. the pointer type
2182 in this case, if we don't preventively counter that. */
2183 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
2185 field_type = build_pointer_type (field_type);
2186 if (align != 0 && TYPE_ALIGN (field_type) > align)
2188 field_type = copy_type (field_type);
2189 SET_TYPE_ALIGN (field_type, align);
2194 /* Make a new field name, if necessary. */
2195 if (var || align != 0)
2200 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
2201 align / BITS_PER_UNIT);
2203 strcpy (suffix, "XVL");
2205 field_name = concat_name (field_name, suffix);
2209 = create_field_decl (field_name, field_type, new_record_type,
2210 DECL_SIZE (old_field), pos, 0, 0);
2211 DECL_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
2212 TYPE_FIELDS (new_record_type) = new_field;
2214 /* If old_field is a QUAL_UNION_TYPE, take its size as being
2215 zero. The only time it's not the last field of the record
2216 is when there are other components at fixed positions after
2217 it (meaning there was a rep clause for every field) and we
2218 want to be able to encode them. */
2219 last_pos = size_binop (PLUS_EXPR, curpos,
2220 (TREE_CODE (TREE_TYPE (old_field))
2223 : DECL_SIZE (old_field));
2224 prev_old_field = old_field;
2227 TYPE_FIELDS (new_record_type) = nreverse (TYPE_FIELDS (new_record_type));
2229 add_parallel_type (record_type, new_record_type);
2233 /* Utility function of above to merge LAST_SIZE, the previous size of a record
2234 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
2235 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
2236 replace a value of zero with the old size. If HAS_REP is true, we take the
2237 MAX of the end position of this field with LAST_SIZE. In all other cases,
2238 we use FIRST_BIT plus SIZE. Return an expression for the size. */
2241 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
2244 tree type = TREE_TYPE (last_size);
2247 if (!special || TREE_CODE (size) != COND_EXPR)
2249 new_size = size_binop (PLUS_EXPR, first_bit, size);
2251 new_size = size_binop (MAX_EXPR, last_size, new_size);
2255 new_size = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
2256 integer_zerop (TREE_OPERAND (size, 1))
2257 ? last_size : merge_sizes (last_size, first_bit,
2258 TREE_OPERAND (size, 1),
2260 integer_zerop (TREE_OPERAND (size, 2))
2261 ? last_size : merge_sizes (last_size, first_bit,
2262 TREE_OPERAND (size, 2),
2265 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
2266 when fed through SUBSTITUTE_IN_EXPR) into thinking that a constant
2267 size is not constant. */
2268 while (TREE_CODE (new_size) == NON_LVALUE_EXPR)
2269 new_size = TREE_OPERAND (new_size, 0);
2274 /* Return the bit position of FIELD, in bits from the start of the record,
2275 and fold it as much as possible. This is a tree of type bitsizetype. */
2278 fold_bit_position (const_tree field)
2280 tree offset = DECL_FIELD_OFFSET (field);
2281 if (TREE_CODE (offset) == MULT_EXPR || TREE_CODE (offset) == PLUS_EXPR)
2282 offset = size_binop (TREE_CODE (offset),
2283 fold_convert (bitsizetype, TREE_OPERAND (offset, 0)),
2284 fold_convert (bitsizetype, TREE_OPERAND (offset, 1)));
2286 offset = fold_convert (bitsizetype, offset);
2287 return size_binop (PLUS_EXPR, DECL_FIELD_BIT_OFFSET (field),
2288 size_binop (MULT_EXPR, offset, bitsize_unit_node));
2291 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
2292 related by the addition of a constant. Return that constant if so. */
2295 compute_related_constant (tree op0, tree op1)
2297 tree factor, op0_var, op1_var, op0_cst, op1_cst, result;
2299 if (TREE_CODE (op0) == MULT_EXPR
2300 && TREE_CODE (op1) == MULT_EXPR
2301 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST
2302 && TREE_OPERAND (op1, 1) == TREE_OPERAND (op0, 1))
2304 factor = TREE_OPERAND (op0, 1);
2305 op0 = TREE_OPERAND (op0, 0);
2306 op1 = TREE_OPERAND (op1, 0);
2311 op0_cst = split_plus (op0, &op0_var);
2312 op1_cst = split_plus (op1, &op1_var);
2313 result = size_binop (MINUS_EXPR, op0_cst, op1_cst);
2315 if (operand_equal_p (op0_var, op1_var, 0))
2316 return factor ? size_binop (MULT_EXPR, factor, result) : result;
2321 /* Utility function of above to split a tree OP which may be a sum, into a
2322 constant part, which is returned, and a variable part, which is stored
2323 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
2327 split_plus (tree in, tree *pvar)
2329 /* Strip conversions in order to ease the tree traversal and maximize the
2330 potential for constant or plus/minus discovery. We need to be careful
2331 to always return and set *pvar to bitsizetype trees, but it's worth
2333 in = remove_conversions (in, false);
2335 *pvar = convert (bitsizetype, in);
2337 if (TREE_CODE (in) == INTEGER_CST)
2339 *pvar = bitsize_zero_node;
2340 return convert (bitsizetype, in);
2342 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
2344 tree lhs_var, rhs_var;
2345 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
2346 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
2348 if (lhs_var == TREE_OPERAND (in, 0)
2349 && rhs_var == TREE_OPERAND (in, 1))
2350 return bitsize_zero_node;
2352 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
2353 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
2356 return bitsize_zero_node;
2359 /* Return a copy of TYPE but safe to modify in any way. */
2362 copy_type (tree type)
2364 tree new_type = copy_node (type);
2366 /* Unshare the language-specific data. */
2367 if (TYPE_LANG_SPECIFIC (type))
2369 TYPE_LANG_SPECIFIC (new_type) = NULL;
2370 SET_TYPE_LANG_SPECIFIC (new_type, GET_TYPE_LANG_SPECIFIC (type));
2373 /* And the contents of the language-specific slot if needed. */
2374 if ((INTEGRAL_TYPE_P (type) || TREE_CODE (type) == REAL_TYPE)
2375 && TYPE_RM_VALUES (type))
2377 TYPE_RM_VALUES (new_type) = NULL_TREE;
2378 SET_TYPE_RM_SIZE (new_type, TYPE_RM_SIZE (type));
2379 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_RM_MIN_VALUE (type));
2380 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_RM_MAX_VALUE (type));
2383 /* copy_node clears this field instead of copying it, because it is
2384 aliased with TREE_CHAIN. */
2385 TYPE_STUB_DECL (new_type) = TYPE_STUB_DECL (type);
2387 TYPE_POINTER_TO (new_type) = NULL_TREE;
2388 TYPE_REFERENCE_TO (new_type) = NULL_TREE;
2389 TYPE_MAIN_VARIANT (new_type) = new_type;
2390 TYPE_NEXT_VARIANT (new_type) = NULL_TREE;
2391 TYPE_CANONICAL (new_type) = new_type;
2396 /* Return a subtype of sizetype with range MIN to MAX and whose
2397 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
2398 of the associated TYPE_DECL. */
2401 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
2403 /* First build a type for the desired range. */
2404 tree type = build_nonshared_range_type (sizetype, min, max);
2406 /* Then set the index type. */
2407 SET_TYPE_INDEX_TYPE (type, index);
2408 create_type_decl (NULL_TREE, type, true, false, gnat_node);
2413 /* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
2414 sizetype is used. */
2417 create_range_type (tree type, tree min, tree max)
2424 /* First build a type with the base range. */
2425 range_type = build_nonshared_range_type (type, TYPE_MIN_VALUE (type),
2426 TYPE_MAX_VALUE (type));
2428 /* Then set the actual range. */
2429 SET_TYPE_RM_MIN_VALUE (range_type, min);
2430 SET_TYPE_RM_MAX_VALUE (range_type, max);
2435 \f/* Return an extra subtype of TYPE with range MIN to MAX. */
2438 create_extra_subtype (tree type, tree min, tree max)
2440 const bool uns = TYPE_UNSIGNED (type);
2441 const unsigned prec = TYPE_PRECISION (type);
2442 tree subtype = uns ? make_unsigned_type (prec) : make_signed_type (prec);
2444 TREE_TYPE (subtype) = type;
2445 TYPE_EXTRA_SUBTYPE_P (subtype) = 1;
2447 SET_TYPE_RM_MIN_VALUE (subtype, min);
2448 SET_TYPE_RM_MAX_VALUE (subtype, max);
2453 /* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of TYPE.
2454 NAME gives the name of the type to be used in the declaration. */
2457 create_type_stub_decl (tree name, tree type)
2459 tree type_decl = build_decl (input_location, TYPE_DECL, name, type);
2460 DECL_ARTIFICIAL (type_decl) = 1;
2461 TYPE_ARTIFICIAL (type) = 1;
2465 /* Return a TYPE_DECL node for TYPE. NAME gives the name of the type to be
2466 used in the declaration. ARTIFICIAL_P is true if the declaration was
2467 generated by the compiler. DEBUG_INFO_P is true if we need to write
2468 debug information about this type. GNAT_NODE is used for the position
2472 create_type_decl (tree name, tree type, bool artificial_p, bool debug_info_p,
2475 enum tree_code code = TREE_CODE (type);
2477 = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
2480 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
2481 gcc_assert (!TYPE_IS_DUMMY_P (type));
2483 /* If the type hasn't been named yet, we're naming it; preserve an existing
2484 TYPE_STUB_DECL that has been attached to it for some purpose. */
2485 if (!is_named && TYPE_STUB_DECL (type))
2487 type_decl = TYPE_STUB_DECL (type);
2488 DECL_NAME (type_decl) = name;
2491 type_decl = build_decl (input_location, TYPE_DECL, name, type);
2493 DECL_ARTIFICIAL (type_decl) = artificial_p;
2494 TYPE_ARTIFICIAL (type) = artificial_p;
2496 /* Add this decl to the current binding level. */
2497 gnat_pushdecl (type_decl, gnat_node);
2499 /* If we're naming the type, equate the TYPE_STUB_DECL to the name. This
2500 causes the name to be also viewed as a "tag" by the debug back-end, with
2501 the advantage that no DW_TAG_typedef is emitted for artificial "tagged"
2504 Note that if "type" is used as a DECL_ORIGINAL_TYPE, it may be referenced
2505 from multiple contexts, and "type_decl" references a copy of it: in such a
2506 case, do not mess TYPE_STUB_DECL: we do not want to re-use the TYPE_DECL
2507 with the mechanism above. */
2508 if (!is_named && type != DECL_ORIGINAL_TYPE (type_decl))
2509 TYPE_STUB_DECL (type) = type_decl;
2511 /* Do not generate debug info for UNCONSTRAINED_ARRAY_TYPE that the
2512 back-end doesn't support, and for others if we don't need to. */
2513 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
2514 DECL_IGNORED_P (type_decl) = 1;
2519 /* Return a VAR_DECL or CONST_DECL node.
2521 NAME gives the name of the variable. ASM_NAME is its assembler name
2522 (if provided). TYPE is its data type (a GCC ..._TYPE node). INIT is
2523 the GCC tree for an optional initial expression; NULL_TREE if none.
2525 CONST_FLAG is true if this variable is constant, in which case we might
2526 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
2528 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
2529 definition to be made visible outside of the current compilation unit, for
2530 instance variable definitions in a package specification.
2532 EXTERN_FLAG is true when processing an external variable declaration (as
2533 opposed to a definition: no storage is to be allocated for the variable).
2535 STATIC_FLAG is only relevant when not at top level and indicates whether
2536 to always allocate storage to the variable.
2538 VOLATILE_FLAG is true if this variable is declared as volatile.
2540 ARTIFICIAL_P is true if the variable was generated by the compiler.
2542 DEBUG_INFO_P is true if we need to write debug information for it.
2544 ATTR_LIST is the list of attributes to be attached to the variable.
2546 GNAT_NODE is used for the position of the decl. */
2549 create_var_decl (tree name, tree asm_name, tree type, tree init,
2550 bool const_flag, bool public_flag, bool extern_flag,
2551 bool static_flag, bool volatile_flag, bool artificial_p,
2552 bool debug_info_p, struct attrib *attr_list,
2553 Node_Id gnat_node, bool const_decl_allowed_p)
2555 /* Whether the object has static storage duration, either explicitly or by
2556 virtue of being declared at the global level. */
2557 const bool static_storage = static_flag || global_bindings_p ();
2559 /* Whether the initializer is constant: for an external object or an object
2560 with static storage duration, we check that the initializer is a valid
2561 constant expression for initializing a static variable; otherwise, we
2562 only check that it is constant. */
2563 const bool init_const
2565 && gnat_types_compatible_p (type, TREE_TYPE (init))
2566 && (extern_flag || static_storage
2567 ? initializer_constant_valid_p (init, TREE_TYPE (init))
2569 : TREE_CONSTANT (init)));
2571 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
2572 case the initializer may be used in lieu of the DECL node (as done in
2573 Identifier_to_gnu). This is useful to prevent the need of elaboration
2574 code when an identifier for which such a DECL is made is in turn used
2575 as an initializer. We used to rely on CONST_DECL vs VAR_DECL for this,
2576 but extra constraints apply to this choice (see below) and they are not
2577 relevant to the distinction we wish to make. */
2578 const bool constant_p = const_flag && init_const;
2580 /* The actual DECL node. CONST_DECL was initially intended for enumerals
2581 and may be used for scalars in general but not for aggregates. */
2583 = build_decl (input_location,
2585 && const_decl_allowed_p
2586 && !AGGREGATE_TYPE_P (type) ? CONST_DECL : VAR_DECL),
2589 /* Detect constants created by the front-end to hold 'reference to function
2590 calls for stabilization purposes. This is needed for renaming. */
2591 if (const_flag && init && POINTER_TYPE_P (type))
2594 if (TREE_CODE (inner) == COMPOUND_EXPR)
2595 inner = TREE_OPERAND (inner, 1);
2596 inner = remove_conversions (inner, true);
2597 if (TREE_CODE (inner) == ADDR_EXPR
2598 && ((TREE_CODE (TREE_OPERAND (inner, 0)) == CALL_EXPR
2599 && !call_is_atomic_load (TREE_OPERAND (inner, 0)))
2600 || (TREE_CODE (TREE_OPERAND (inner, 0)) == VAR_DECL
2601 && DECL_RETURN_VALUE_P (TREE_OPERAND (inner, 0)))))
2602 DECL_RETURN_VALUE_P (var_decl) = 1;
2605 /* If this is external, throw away any initializations (they will be done
2606 elsewhere) unless this is a constant for which we would like to remain
2607 able to get the initializer. If we are defining a global here, leave a
2608 constant initialization and save any variable elaborations for the
2609 elaboration routine. If we are just annotating types, throw away the
2610 initialization if it isn't a constant. */
2611 if ((extern_flag && !constant_p)
2612 || (type_annotate_only && init && !TREE_CONSTANT (init)))
2615 /* At the global level, a non-constant initializer generates elaboration
2616 statements. Check that such statements are allowed, that is to say,
2617 not violating a No_Elaboration_Code restriction. */
2618 if (init && !init_const && global_bindings_p ())
2619 Check_Elaboration_Code_Allowed (gnat_node);
2621 /* Attach the initializer, if any. */
2622 DECL_INITIAL (var_decl) = init;
2624 /* Directly set some flags. */
2625 DECL_ARTIFICIAL (var_decl) = artificial_p;
2626 DECL_EXTERNAL (var_decl) = extern_flag;
2628 TREE_CONSTANT (var_decl) = constant_p;
2629 TREE_READONLY (var_decl) = const_flag;
2631 /* The object is public if it is external or if it is declared public
2632 and has static storage duration. */
2633 TREE_PUBLIC (var_decl) = extern_flag || (public_flag && static_storage);
2635 /* We need to allocate static storage for an object with static storage
2636 duration if it isn't external. */
2637 TREE_STATIC (var_decl) = !extern_flag && static_storage;
2639 TREE_SIDE_EFFECTS (var_decl)
2640 = TREE_THIS_VOLATILE (var_decl)
2641 = TYPE_VOLATILE (type) | volatile_flag;
2643 if (TREE_SIDE_EFFECTS (var_decl))
2644 TREE_ADDRESSABLE (var_decl) = 1;
2646 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
2647 try to fiddle with DECL_COMMON. However, on platforms that don't
2648 support global BSS sections, uninitialized global variables would
2649 go in DATA instead, thus increasing the size of the executable. */
2651 && TREE_CODE (var_decl) == VAR_DECL
2652 && TREE_PUBLIC (var_decl)
2653 && !have_global_bss_p ())
2654 DECL_COMMON (var_decl) = 1;
2656 /* Do not emit debug info for a CONST_DECL if optimization isn't enabled,
2657 since we will create an associated variable. Likewise for an external
2658 constant whose initializer is not absolute, because this would mean a
2659 global relocation in a read-only section which runs afoul of the PE-COFF
2660 run-time relocation mechanism. */
2662 || (TREE_CODE (var_decl) == CONST_DECL && !optimize)
2666 && initializer_constant_valid_p (init, TREE_TYPE (init))
2667 != null_pointer_node))
2668 DECL_IGNORED_P (var_decl) = 1;
2670 /* ??? Some attributes cannot be applied to CONST_DECLs. */
2671 if (TREE_CODE (var_decl) == VAR_DECL)
2672 process_attributes (&var_decl, &attr_list, true, gnat_node);
2674 /* Add this decl to the current binding level. */
2675 gnat_pushdecl (var_decl, gnat_node);
2677 if (TREE_CODE (var_decl) == VAR_DECL && asm_name)
2679 /* Let the target mangle the name if this isn't a verbatim asm. */
2680 if (*IDENTIFIER_POINTER (asm_name) != '*')
2681 asm_name = targetm.mangle_decl_assembler_name (var_decl, asm_name);
2683 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
2689 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
2692 aggregate_type_contains_array_p (tree type)
2694 switch (TREE_CODE (type))
2698 case QUAL_UNION_TYPE:
2701 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
2702 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
2703 && aggregate_type_contains_array_p (TREE_TYPE (field)))
2716 /* Return a FIELD_DECL node. NAME is the field's name, TYPE is its type and
2717 RECORD_TYPE is the type of the enclosing record. If SIZE is nonzero, it
2718 is the specified size of the field. If POS is nonzero, it is the bit
2719 position. PACKED is 1 if the enclosing record is packed, -1 if it has
2720 Component_Alignment of Storage_Unit. If ADDRESSABLE is nonzero, it
2721 means we are allowed to take the address of the field; if it is negative,
2722 we should not make a bitfield, which is used by make_aligning_type. */
2725 create_field_decl (tree name, tree type, tree record_type, tree size, tree pos,
2726 int packed, int addressable)
2728 tree field_decl = build_decl (input_location, FIELD_DECL, name, type);
2730 DECL_CONTEXT (field_decl) = record_type;
2731 TREE_READONLY (field_decl) = TYPE_READONLY (type);
2733 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
2734 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
2735 Likewise for an aggregate without specified position that contains an
2736 array, because in this case slices of variable length of this array
2737 must be handled by GCC and variable-sized objects need to be aligned
2738 to at least a byte boundary. */
2739 if (packed && (TYPE_MODE (type) == BLKmode
2741 && AGGREGATE_TYPE_P (type)
2742 && aggregate_type_contains_array_p (type))))
2743 SET_DECL_ALIGN (field_decl, BITS_PER_UNIT);
2745 /* If a size is specified, use it. Otherwise, if the record type is packed
2746 compute a size to use, which may differ from the object's natural size.
2747 We always set a size in this case to trigger the checks for bitfield
2748 creation below, which is typically required when no position has been
2751 size = convert (bitsizetype, size);
2752 else if (packed == 1)
2754 size = rm_size (type);
2755 if (TYPE_MODE (type) == BLKmode)
2756 size = round_up (size, BITS_PER_UNIT);
2759 /* If we may, according to ADDRESSABLE, make a bitfield when the size is
2760 specified for two reasons: first if the size differs from the natural
2761 size; second, if the alignment is insufficient. There are a number of
2762 ways the latter can be true.
2764 We never make a bitfield if the type of the field has a nonconstant size,
2765 because no such entity requiring bitfield operations should reach here.
2767 We do *preventively* make a bitfield when there might be the need for it
2768 but we don't have all the necessary information to decide, as is the case
2769 of a field in a packed record.
2771 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
2772 in layout_decl or finish_record_type to clear the bit_field indication if
2773 it is in fact not needed. */
2774 if (addressable >= 0
2776 && TREE_CODE (size) == INTEGER_CST
2777 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
2778 && (!tree_int_cst_equal (size, TYPE_SIZE (type))
2779 || (pos && !value_factor_p (pos, TYPE_ALIGN (type)))
2781 || (TYPE_ALIGN (record_type) != 0
2782 && TYPE_ALIGN (record_type) < TYPE_ALIGN (type))))
2784 DECL_BIT_FIELD (field_decl) = 1;
2785 DECL_SIZE (field_decl) = size;
2786 if (!packed && !pos)
2788 if (TYPE_ALIGN (record_type) != 0
2789 && TYPE_ALIGN (record_type) < TYPE_ALIGN (type))
2790 SET_DECL_ALIGN (field_decl, TYPE_ALIGN (record_type));
2792 SET_DECL_ALIGN (field_decl, TYPE_ALIGN (type));
2796 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
2798 /* Bump the alignment if need be, either for bitfield/packing purposes or
2799 to satisfy the type requirements if no such consideration applies. When
2800 we get the alignment from the type, indicate if this is from an explicit
2801 user request, which prevents stor-layout from lowering it later on. */
2803 unsigned int bit_align
2804 = (DECL_BIT_FIELD (field_decl) ? 1
2805 : packed && TYPE_MODE (type) != BLKmode ? BITS_PER_UNIT : 0);
2807 if (bit_align > DECL_ALIGN (field_decl))
2808 SET_DECL_ALIGN (field_decl, bit_align);
2809 else if (!bit_align && TYPE_ALIGN (type) > DECL_ALIGN (field_decl))
2811 SET_DECL_ALIGN (field_decl, TYPE_ALIGN (type));
2812 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (type);
2818 /* We need to pass in the alignment the DECL is known to have.
2819 This is the lowest-order bit set in POS, but no more than
2820 the alignment of the record, if one is specified. Note
2821 that an alignment of 0 is taken as infinite. */
2822 unsigned int known_align;
2824 if (tree_fits_uhwi_p (pos))
2825 known_align = tree_to_uhwi (pos) & - tree_to_uhwi (pos);
2827 known_align = BITS_PER_UNIT;
2829 if (TYPE_ALIGN (record_type)
2830 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
2831 known_align = TYPE_ALIGN (record_type);
2833 layout_decl (field_decl, known_align);
2834 SET_DECL_OFFSET_ALIGN (field_decl,
2835 tree_fits_uhwi_p (pos)
2836 ? BIGGEST_ALIGNMENT : BITS_PER_UNIT);
2837 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
2838 &DECL_FIELD_BIT_OFFSET (field_decl),
2839 DECL_OFFSET_ALIGN (field_decl), pos);
2842 /* In addition to what our caller says, claim the field is addressable if we
2843 know that its type is not suitable.
2845 The field may also be "technically" nonaddressable, meaning that even if
2846 we attempt to take the field's address we will actually get the address
2847 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
2848 value we have at this point is not accurate enough, so we don't account
2849 for this here and let finish_record_type decide. */
2850 if (!addressable && !type_for_nonaliased_component_p (type))
2853 /* Note that there is a trade-off in making a field nonaddressable because
2854 this will cause type-based alias analysis to use the same alias set for
2855 accesses to the field as for accesses to the whole record: while doing
2856 so will make it more likely to disambiguate accesses to other objects
2857 and accesses to the field, it will make it less likely to disambiguate
2858 accesses to the other fields of the record and accesses to the field.
2859 If the record is fully static, then the trade-off is irrelevant since
2860 the fields of the record can always be disambiguated by their offsets
2861 but, if the record is dynamic, then it can become problematic. */
2862 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
2867 /* Return a PARM_DECL node with NAME and TYPE. */
2870 create_param_decl (tree name, tree type)
2872 tree param_decl = build_decl (input_location, PARM_DECL, name, type);
2874 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
2875 can lead to various ABI violations. */
2876 if (targetm.calls.promote_prototypes (NULL_TREE)
2877 && INTEGRAL_TYPE_P (type)
2878 && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node))
2880 /* We have to be careful about biased types here. Make a subtype
2881 of integer_type_node with the proper biasing. */
2882 if (TREE_CODE (type) == INTEGER_TYPE
2883 && TYPE_BIASED_REPRESENTATION_P (type))
2886 = make_unsigned_type (TYPE_PRECISION (integer_type_node));
2887 TREE_TYPE (subtype) = integer_type_node;
2888 TYPE_BIASED_REPRESENTATION_P (subtype) = 1;
2889 SET_TYPE_RM_MIN_VALUE (subtype, TYPE_MIN_VALUE (type));
2890 SET_TYPE_RM_MAX_VALUE (subtype, TYPE_MAX_VALUE (type));
2894 type = integer_type_node;
2897 DECL_ARG_TYPE (param_decl) = type;
2901 /* Process the attributes in ATTR_LIST for NODE, which is either a DECL or
2902 a TYPE. If IN_PLACE is true, the tree pointed to by NODE should not be
2903 changed. GNAT_NODE is used for the position of error messages. */
2906 process_attributes (tree *node, struct attrib **attr_list, bool in_place,
2909 struct attrib *attr;
2911 for (attr = *attr_list; attr; attr = attr->next)
2914 case ATTR_MACHINE_ATTRIBUTE:
2915 Sloc_to_locus (Sloc (gnat_node), &input_location);
2916 decl_attributes (node, tree_cons (attr->name, attr->args, NULL_TREE),
2917 in_place ? ATTR_FLAG_TYPE_IN_PLACE : 0);
2920 case ATTR_LINK_ALIAS:
2921 if (!DECL_EXTERNAL (*node))
2923 TREE_STATIC (*node) = 1;
2924 assemble_alias (*node, attr->name);
2928 case ATTR_WEAK_EXTERNAL:
2930 declare_weak (*node);
2932 post_error ("?weak declarations not supported on this target",
2936 case ATTR_LINK_SECTION:
2937 if (targetm_common.have_named_sections)
2939 set_decl_section_name (*node, IDENTIFIER_POINTER (attr->name));
2940 DECL_COMMON (*node) = 0;
2943 post_error ("?section attributes are not supported for this target",
2947 case ATTR_LINK_CONSTRUCTOR:
2948 DECL_STATIC_CONSTRUCTOR (*node) = 1;
2949 TREE_USED (*node) = 1;
2952 case ATTR_LINK_DESTRUCTOR:
2953 DECL_STATIC_DESTRUCTOR (*node) = 1;
2954 TREE_USED (*node) = 1;
2957 case ATTR_THREAD_LOCAL_STORAGE:
2958 set_decl_tls_model (*node, decl_default_tls_model (*node));
2959 DECL_COMMON (*node) = 0;
2966 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
2970 value_factor_p (tree value, HOST_WIDE_INT factor)
2972 if (tree_fits_uhwi_p (value))
2973 return tree_to_uhwi (value) % factor == 0;
2975 if (TREE_CODE (value) == MULT_EXPR)
2976 return (value_factor_p (TREE_OPERAND (value, 0), factor)
2977 || value_factor_p (TREE_OPERAND (value, 1), factor));
2982 /* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
2983 feed it with the elaboration of GNAT_SCOPE. */
2985 static struct deferred_decl_context_node *
2986 add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global)
2988 struct deferred_decl_context_node *new_node;
2991 = (struct deferred_decl_context_node * ) xmalloc (sizeof (*new_node));
2992 new_node->decl = decl;
2993 new_node->gnat_scope = gnat_scope;
2994 new_node->force_global = force_global;
2995 new_node->types.create (1);
2996 new_node->next = deferred_decl_context_queue;
2997 deferred_decl_context_queue = new_node;
3001 /* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
3002 feed it with the DECL_CONTEXT computed as part of N as soon as it is
3006 add_deferred_type_context (struct deferred_decl_context_node *n, tree type)
3008 n->types.safe_push (type);
3011 /* Get the GENERIC node corresponding to GNAT_SCOPE, if available. Return
3012 NULL_TREE if it is not available. */
3015 compute_deferred_decl_context (Entity_Id gnat_scope)
3019 if (present_gnu_tree (gnat_scope))
3020 context = get_gnu_tree (gnat_scope);
3024 if (TREE_CODE (context) == TYPE_DECL)
3026 const tree context_type = TREE_TYPE (context);
3028 /* Skip dummy types: only the final ones can appear in the context
3030 if (TYPE_DUMMY_P (context_type))
3033 /* ..._TYPE nodes are more useful than TYPE_DECL nodes in the context
3036 context = context_type;
3042 /* Try to process all deferred nodes in the queue. Keep in the queue the ones
3043 that cannot be processed yet, remove the other ones. If FORCE is true,
3044 force the processing for all nodes, use the global context when nodes don't
3045 have a GNU translation. */
3048 process_deferred_decl_context (bool force)
3050 struct deferred_decl_context_node **it = &deferred_decl_context_queue;
3051 struct deferred_decl_context_node *node;
3055 bool processed = false;
3056 tree context = NULL_TREE;
3057 Entity_Id gnat_scope;
3061 /* If FORCE, get the innermost elaborated scope. Otherwise, just try to
3062 get the first scope. */
3063 gnat_scope = node->gnat_scope;
3064 while (Present (gnat_scope))
3066 context = compute_deferred_decl_context (gnat_scope);
3067 if (!force || context)
3069 gnat_scope = get_debug_scope (gnat_scope, NULL);
3072 /* Imported declarations must not be in a local context (i.e. not inside
3074 if (context && node->force_global > 0)
3080 gcc_assert (TREE_CODE (ctx) != FUNCTION_DECL);
3081 ctx = DECL_P (ctx) ? DECL_CONTEXT (ctx) : TYPE_CONTEXT (ctx);
3085 /* If FORCE, we want to get rid of all nodes in the queue: in case there
3086 was no elaborated scope, use the global context. */
3087 if (force && !context)
3088 context = get_global_context ();
3095 DECL_CONTEXT (node->decl) = context;
3097 /* Propagate it to the TYPE_CONTEXT attributes of the requested
3099 FOR_EACH_VEC_ELT (node->types, i, t)
3101 gnat_set_type_context (t, context);
3106 /* If this node has been successfuly processed, remove it from the
3107 queue. Then move to the next node. */
3111 node->types.release ();
3119 /* Return VALUE scaled by the biggest power-of-2 factor of EXPR. */
3122 scale_by_factor_of (tree expr, unsigned int value)
3124 unsigned HOST_WIDE_INT addend = 0;
3125 unsigned HOST_WIDE_INT factor = 1;
3127 /* Peel conversions around EXPR and try to extract bodies from function
3128 calls: it is possible to get the scale factor from size functions. */
3129 expr = remove_conversions (expr, true);
3130 if (TREE_CODE (expr) == CALL_EXPR)
3131 expr = maybe_inline_call_in_expr (expr);
3133 /* Sometimes we get PLUS_EXPR (BIT_AND_EXPR (..., X), Y), where Y is a
3134 multiple of the scale factor we are looking for. */
3135 if (TREE_CODE (expr) == PLUS_EXPR
3136 && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
3137 && tree_fits_uhwi_p (TREE_OPERAND (expr, 1)))
3139 addend = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
3140 expr = TREE_OPERAND (expr, 0);
3143 /* An expression which is a bitwise AND with a mask has a power-of-2 factor
3144 corresponding to the number of trailing zeros of the mask. */
3145 if (TREE_CODE (expr) == BIT_AND_EXPR
3146 && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST)
3148 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
3151 while ((mask & 1) == 0 && i < HOST_BITS_PER_WIDE_INT)
3159 /* If the addend is not a multiple of the factor we found, give up. In
3160 theory we could find a smaller common factor but it's useless for our
3161 needs. This situation arises when dealing with a field F1 with no
3162 alignment requirement but that is following a field F2 with such
3163 requirements. As long as we have F2's offset, we don't need alignment
3164 information to compute F1's. */
3165 if (addend % factor != 0)
3168 return factor * value;
3171 /* Given two consecutive field decls PREV_FIELD and CURR_FIELD, return true
3172 unless we can prove these 2 fields are laid out in such a way that no gap
3173 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
3174 is the distance in bits between the end of PREV_FIELD and the starting
3175 position of CURR_FIELD. It is ignored if null. */
3178 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
3180 /* If this is the first field of the record, there cannot be any gap */
3184 /* If the previous field is a union type, then return false: The only
3185 time when such a field is not the last field of the record is when
3186 there are other components at fixed positions after it (meaning there
3187 was a rep clause for every field), in which case we don't want the
3188 alignment constraint to override them. */
3189 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
3192 /* If the distance between the end of prev_field and the beginning of
3193 curr_field is constant, then there is a gap if the value of this
3194 constant is not null. */
3195 if (offset && tree_fits_uhwi_p (offset))
3196 return !integer_zerop (offset);
3198 /* If the size and position of the previous field are constant,
3199 then check the sum of this size and position. There will be a gap
3200 iff it is not multiple of the current field alignment. */
3201 if (tree_fits_uhwi_p (DECL_SIZE (prev_field))
3202 && tree_fits_uhwi_p (bit_position (prev_field)))
3203 return ((tree_to_uhwi (bit_position (prev_field))
3204 + tree_to_uhwi (DECL_SIZE (prev_field)))
3205 % DECL_ALIGN (curr_field) != 0);
3207 /* If both the position and size of the previous field are multiples
3208 of the current field alignment, there cannot be any gap. */
3209 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
3210 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
3213 /* Fallback, return that there may be a potential gap */
3217 /* Return a LABEL_DECL with NAME. GNAT_NODE is used for the position of
3221 create_label_decl (tree name, Node_Id gnat_node)
3224 = build_decl (input_location, LABEL_DECL, name, void_type_node);
3226 SET_DECL_MODE (label_decl, VOIDmode);
3228 /* Add this decl to the current binding level. */
3229 gnat_pushdecl (label_decl, gnat_node);
3234 /* Return a FUNCTION_DECL node. NAME is the name of the subprogram, ASM_NAME
3235 its assembler name, TYPE its type (a FUNCTION_TYPE or METHOD_TYPE node),
3236 PARAM_DECL_LIST the list of its parameters (a list of PARM_DECL nodes
3237 chained through the DECL_CHAIN field).
3239 INLINE_STATUS describes the inline flags to be set on the FUNCTION_DECL.
3241 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
3242 definition to be made visible outside of the current compilation unit.
3244 EXTERN_FLAG is true when processing an external subprogram declaration.
3246 ARTIFICIAL_P is true if the subprogram was generated by the compiler.
3248 DEBUG_INFO_P is true if we need to write debug information for it.
3250 DEFINITION is true if the subprogram is to be considered as a definition.
3252 ATTR_LIST is the list of attributes to be attached to the subprogram.
3254 GNAT_NODE is used for the position of the decl. */
3257 create_subprog_decl (tree name, tree asm_name, tree type, tree param_decl_list,
3258 enum inline_status_t inline_status, bool public_flag,
3259 bool extern_flag, bool artificial_p, bool debug_info_p,
3260 bool definition, struct attrib *attr_list,
3263 tree subprog_decl = build_decl (input_location, FUNCTION_DECL, name, type);
3264 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
3266 DECL_ARTIFICIAL (subprog_decl) = artificial_p;
3267 DECL_EXTERNAL (subprog_decl) = extern_flag;
3268 DECL_FUNCTION_IS_DEF (subprog_decl) = definition;
3269 DECL_IGNORED_P (subprog_decl) = !debug_info_p;
3270 TREE_PUBLIC (subprog_decl) = public_flag;
3272 switch (inline_status)
3275 DECL_UNINLINABLE (subprog_decl) = 1;
3282 if (Back_End_Inlining)
3284 decl_attributes (&subprog_decl,
3285 tree_cons (get_identifier ("always_inline"),
3286 NULL_TREE, NULL_TREE),
3287 ATTR_FLAG_TYPE_IN_PLACE);
3289 /* Inline_Always guarantees that every direct call is inlined and
3290 that there is no indirect reference to the subprogram, so the
3291 instance in the original package (as well as its clones in the
3292 client packages created for inter-unit inlining) can be made
3293 private, which causes the out-of-line body to be eliminated. */
3294 TREE_PUBLIC (subprog_decl) = 0;
3297 /* ... fall through ... */
3300 DECL_DISREGARD_INLINE_LIMITS (subprog_decl) = 1;
3302 /* ... fall through ... */
3305 DECL_DECLARED_INLINE_P (subprog_decl) = 1;
3306 if (!Debug_Generated_Code)
3307 DECL_NO_INLINE_WARNING_P (subprog_decl) = artificial_p;
3314 process_attributes (&subprog_decl, &attr_list, true, gnat_node);
3316 /* Once everything is processed, finish the subprogram declaration. */
3317 finish_subprog_decl (subprog_decl, asm_name, type);
3319 /* Add this decl to the current binding level. */
3320 gnat_pushdecl (subprog_decl, gnat_node);
3322 /* Output the assembler code and/or RTL for the declaration. */
3323 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
3325 return subprog_decl;
3328 /* Given a subprogram declaration DECL, its assembler name and its type,
3329 finish constructing the subprogram declaration from ASM_NAME and TYPE. */
3332 finish_subprog_decl (tree decl, tree asm_name, tree type)
3335 = build_decl (DECL_SOURCE_LOCATION (decl), RESULT_DECL, NULL_TREE,
3338 DECL_ARTIFICIAL (result_decl) = 1;
3339 DECL_IGNORED_P (result_decl) = 1;
3340 DECL_CONTEXT (result_decl) = decl;
3341 DECL_BY_REFERENCE (result_decl) = TREE_ADDRESSABLE (type);
3342 DECL_RESULT (decl) = result_decl;
3344 /* Propagate the "const" property. */
3345 TREE_READONLY (decl) = TYPE_READONLY (type);
3347 /* Propagate the "pure" property. */
3348 DECL_PURE_P (decl) = TYPE_RESTRICT (type);
3350 /* Propagate the "noreturn" property. */
3351 TREE_THIS_VOLATILE (decl) = TYPE_VOLATILE (type);
3355 /* Let the target mangle the name if this isn't a verbatim asm. */
3356 if (*IDENTIFIER_POINTER (asm_name) != '*')
3357 asm_name = targetm.mangle_decl_assembler_name (decl, asm_name);
3359 SET_DECL_ASSEMBLER_NAME (decl, asm_name);
3361 /* The expand_main_function circuitry expects "main_identifier_node" to
3362 designate the DECL_NAME of the 'main' entry point, in turn expected
3363 to be declared as the "main" function literally by default. Ada
3364 program entry points are typically declared with a different name
3365 within the binder generated file, exported as 'main' to satisfy the
3366 system expectations. Force main_identifier_node in this case. */
3367 if (asm_name == main_identifier_node)
3368 DECL_NAME (decl) = main_identifier_node;
3372 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
3373 body. This routine needs to be invoked before processing the declarations
3374 appearing in the subprogram. */
3377 begin_subprog_body (tree subprog_decl)
3381 announce_function (subprog_decl);
3383 /* This function is being defined. */
3384 TREE_STATIC (subprog_decl) = 1;
3386 /* The failure of this assertion will likely come from a wrong context for
3387 the subprogram body, e.g. another procedure for a procedure declared at
3389 gcc_assert (current_function_decl == decl_function_context (subprog_decl));
3391 current_function_decl = subprog_decl;
3393 /* Enter a new binding level and show that all the parameters belong to
3397 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
3398 param_decl = DECL_CHAIN (param_decl))
3399 DECL_CONTEXT (param_decl) = subprog_decl;
3401 make_decl_rtl (subprog_decl);
3404 /* Finish translating the current subprogram and set its BODY. */
3407 end_subprog_body (tree body)
3409 tree fndecl = current_function_decl;
3411 /* Attach the BLOCK for this level to the function and pop the level. */
3412 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
3413 DECL_INITIAL (fndecl) = current_binding_level->block;
3416 /* The body should be a BIND_EXPR whose BLOCK is the top-level one. */
3417 if (TREE_CODE (body) == BIND_EXPR)
3419 BLOCK_SUPERCONTEXT (BIND_EXPR_BLOCK (body)) = fndecl;
3420 DECL_INITIAL (fndecl) = BIND_EXPR_BLOCK (body);
3423 DECL_SAVED_TREE (fndecl) = body;
3425 current_function_decl = decl_function_context (fndecl);
3428 /* Wrap up compilation of SUBPROG_DECL, a subprogram body. */
3431 rest_of_subprog_body_compilation (tree subprog_decl)
3433 /* We cannot track the location of errors past this point. */
3434 Current_Error_Node = Empty;
3436 /* If we're only annotating types, don't actually compile this function. */
3437 if (type_annotate_only)
3440 /* Dump functions before gimplification. */
3441 dump_function (TDI_original, subprog_decl);
3443 if (!decl_function_context (subprog_decl))
3444 cgraph_node::finalize_function (subprog_decl, false);
3446 /* Register this function with cgraph just far enough to get it
3447 added to our parent's nested function list. */
3448 (void) cgraph_node::get_create (subprog_decl);
3452 gnat_builtin_function (tree decl)
3454 gnat_pushdecl (decl, Empty);
3458 /* Return an integer type with the number of bits of precision given by
3459 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
3460 it is a signed type. */
3463 gnat_type_for_size (unsigned precision, int unsignedp)
3468 if (precision <= 2 * MAX_BITS_PER_WORD
3469 && signed_and_unsigned_types[precision][unsignedp])
3470 return signed_and_unsigned_types[precision][unsignedp];
3473 t = make_unsigned_type (precision);
3475 t = make_signed_type (precision);
3476 TYPE_ARTIFICIAL (t) = 1;
3478 if (precision <= 2 * MAX_BITS_PER_WORD)
3479 signed_and_unsigned_types[precision][unsignedp] = t;
3483 sprintf (type_name, "%sSIGNED_%u", unsignedp ? "UN" : "", precision);
3484 TYPE_NAME (t) = get_identifier (type_name);
3490 /* Likewise for floating-point types. */
3493 float_type_for_precision (int precision, machine_mode mode)
3498 if (float_types[(int) mode])
3499 return float_types[(int) mode];
3501 float_types[(int) mode] = t = make_node (REAL_TYPE);
3502 TYPE_PRECISION (t) = precision;
3505 gcc_assert (TYPE_MODE (t) == mode);
3508 sprintf (type_name, "FLOAT_%d", precision);
3509 TYPE_NAME (t) = get_identifier (type_name);
3515 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
3516 an unsigned type; otherwise a signed type is returned. */
3519 gnat_type_for_mode (machine_mode mode, int unsignedp)
3521 if (mode == BLKmode)
3524 if (mode == VOIDmode)
3525 return void_type_node;
3527 if (COMPLEX_MODE_P (mode))
3530 scalar_float_mode float_mode;
3531 if (is_a <scalar_float_mode> (mode, &float_mode))
3532 return float_type_for_precision (GET_MODE_PRECISION (float_mode),
3535 scalar_int_mode int_mode;
3536 if (is_a <scalar_int_mode> (mode, &int_mode))
3537 return gnat_type_for_size (GET_MODE_BITSIZE (int_mode), unsignedp);
3539 if (VECTOR_MODE_P (mode))
3541 machine_mode inner_mode = GET_MODE_INNER (mode);
3542 tree inner_type = gnat_type_for_mode (inner_mode, unsignedp);
3544 return build_vector_type_for_mode (inner_type, mode);
3550 /* Return the signed or unsigned version of TYPE_NODE, a scalar type, the
3551 signedness being specified by UNSIGNEDP. */
3554 gnat_signed_or_unsigned_type_for (int unsignedp, tree type_node)
3556 if (type_node == char_type_node)
3557 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
3559 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), unsignedp);
3561 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
3563 type = copy_type (type);
3564 TREE_TYPE (type) = type_node;
3566 else if (TREE_TYPE (type_node)
3567 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
3568 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
3570 type = copy_type (type);
3571 TREE_TYPE (type) = TREE_TYPE (type_node);
3577 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
3578 transparently converted to each other. */
3581 gnat_types_compatible_p (tree t1, tree t2)
3583 enum tree_code code;
3585 /* This is the default criterion. */
3586 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
3589 /* We only check structural equivalence here. */
3590 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
3593 /* Vector types are also compatible if they have the same number of subparts
3594 and the same form of (scalar) element type. */
3595 if (code == VECTOR_TYPE
3596 && known_eq (TYPE_VECTOR_SUBPARTS (t1), TYPE_VECTOR_SUBPARTS (t2))
3597 && TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2))
3598 && TYPE_PRECISION (TREE_TYPE (t1)) == TYPE_PRECISION (TREE_TYPE (t2)))
3601 /* Array types are also compatible if they are constrained and have the same
3602 domain(s), the same component type and the same scalar storage order. */
3603 if (code == ARRAY_TYPE
3604 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
3605 || (TYPE_DOMAIN (t1)
3607 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
3608 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
3609 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
3610 TYPE_MAX_VALUE (TYPE_DOMAIN (t2)))))
3611 && (TREE_TYPE (t1) == TREE_TYPE (t2)
3612 || (TREE_CODE (TREE_TYPE (t1)) == ARRAY_TYPE
3613 && gnat_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2))))
3614 && TYPE_REVERSE_STORAGE_ORDER (t1) == TYPE_REVERSE_STORAGE_ORDER (t2))
3620 /* Return true if EXPR is a useless type conversion. */
3623 gnat_useless_type_conversion (tree expr)
3625 if (CONVERT_EXPR_P (expr)
3626 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
3627 || TREE_CODE (expr) == NON_LVALUE_EXPR)
3628 return gnat_types_compatible_p (TREE_TYPE (expr),
3629 TREE_TYPE (TREE_OPERAND (expr, 0)));
3634 /* Return true if T, a {FUNCTION,METHOD}_TYPE, has the specified flags. */
3637 fntype_same_flags_p (const_tree t, tree cico_list, bool return_unconstrained_p,
3638 bool return_by_direct_ref_p, bool return_by_invisi_ref_p)
3640 return TYPE_CI_CO_LIST (t) == cico_list
3641 && TYPE_RETURN_UNCONSTRAINED_P (t) == return_unconstrained_p
3642 && TYPE_RETURN_BY_DIRECT_REF_P (t) == return_by_direct_ref_p
3643 && TREE_ADDRESSABLE (t) == return_by_invisi_ref_p;
3646 /* EXP is an expression for the size of an object. If this size contains
3647 discriminant references, replace them with the maximum (if MAX_P) or
3648 minimum (if !MAX_P) possible value of the discriminant.
3650 Note that the expression may have already been gimplified,in which case
3651 COND_EXPRs have VOID_TYPE and no operands, and this must be handled. */
3654 max_size (tree exp, bool max_p)
3656 enum tree_code code = TREE_CODE (exp);
3657 tree type = TREE_TYPE (exp);
3660 switch (TREE_CODE_CLASS (code))
3662 case tcc_declaration:
3666 case tcc_exceptional:
3667 gcc_assert (code == SSA_NAME);
3671 if (code == CALL_EXPR)
3676 t = maybe_inline_call_in_expr (exp);
3678 return max_size (t, max_p);
3680 n = call_expr_nargs (exp);
3682 argarray = XALLOCAVEC (tree, n);
3683 for (i = 0; i < n; i++)
3684 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
3685 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
3690 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
3691 modify. Otherwise, we treat it like a variable. */
3692 if (CONTAINS_PLACEHOLDER_P (exp))
3694 tree base_type = get_base_type (TREE_TYPE (TREE_OPERAND (exp, 1)));
3696 = fold_convert (base_type,
3698 ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type));
3700 /* Walk down the extra subtypes to get more restrictive bounds. */
3701 while (TYPE_IS_EXTRA_SUBTYPE_P (type))
3703 type = TREE_TYPE (type);
3705 val = fold_build2 (MIN_EXPR, base_type, val,
3706 fold_convert (base_type,
3707 TYPE_MAX_VALUE (type)));
3709 val = fold_build2 (MAX_EXPR, base_type, val,
3710 fold_convert (base_type,
3711 TYPE_MIN_VALUE (type)));
3714 return fold_convert (type, max_size (val, max_p));
3719 case tcc_comparison:
3720 return build_int_cst (type, max_p ? 1 : 0);
3723 op0 = TREE_OPERAND (exp, 0);
3725 if (code == NON_LVALUE_EXPR)
3726 return max_size (op0, max_p);
3728 if (VOID_TYPE_P (TREE_TYPE (op0)))
3729 return max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type);
3731 op0 = max_size (op0, code == NEGATE_EXPR ? !max_p : max_p);
3733 if (op0 == TREE_OPERAND (exp, 0))
3736 return fold_build1 (code, type, op0);
3739 op0 = TREE_OPERAND (exp, 0);
3740 op1 = TREE_OPERAND (exp, 1);
3742 /* If we have a multiply-add with a "negative" value in an unsigned
3743 type, do a multiply-subtract with the negated value, in order to
3744 avoid creating a spurious overflow below. */
3745 if (code == PLUS_EXPR
3746 && TREE_CODE (op0) == MULT_EXPR
3747 && TYPE_UNSIGNED (type)
3748 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST
3749 && !TREE_OVERFLOW (TREE_OPERAND (op0, 1))
3750 && tree_int_cst_sign_bit (TREE_OPERAND (op0, 1)))
3753 op1 = build2 (MULT_EXPR, type, TREE_OPERAND (op0, 0),
3754 fold_build1 (NEGATE_EXPR, type,
3755 TREE_OPERAND (op0, 1)));
3760 op0 = max_size (op0, max_p);
3761 op1 = max_size (op1, code == MINUS_EXPR ? !max_p : max_p);
3763 if ((code == MINUS_EXPR || code == PLUS_EXPR))
3765 /* If the op0 has overflowed and the op1 is a variable,
3766 propagate the overflow by returning the op0. */
3767 if (TREE_CODE (op0) == INTEGER_CST
3768 && TREE_OVERFLOW (op0)
3769 && TREE_CODE (op1) != INTEGER_CST)
3772 /* If we have a "negative" value in an unsigned type, do the
3773 opposite operation on the negated value, in order to avoid
3774 creating a spurious overflow below. */
3775 if (TYPE_UNSIGNED (type)
3776 && TREE_CODE (op1) == INTEGER_CST
3777 && !TREE_OVERFLOW (op1)
3778 && tree_int_cst_sign_bit (op1))
3780 op1 = fold_build1 (NEGATE_EXPR, type, op1);
3781 code = (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR);
3785 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3788 /* We need to detect overflows so we call size_binop here. */
3789 return size_binop (code, op0, op1);
3791 case tcc_expression:
3792 switch (TREE_CODE_LENGTH (code))
3795 if (code == SAVE_EXPR)
3798 op0 = max_size (TREE_OPERAND (exp, 0),
3799 code == TRUTH_NOT_EXPR ? !max_p : max_p);
3801 if (op0 == TREE_OPERAND (exp, 0))
3804 return fold_build1 (code, type, op0);
3807 if (code == COMPOUND_EXPR)
3808 return max_size (TREE_OPERAND (exp, 1), max_p);
3810 op0 = max_size (TREE_OPERAND (exp, 0), max_p);
3811 op1 = max_size (TREE_OPERAND (exp, 1), max_p);
3813 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3816 return fold_build2 (code, type, op0, op1);
3819 if (code == COND_EXPR)
3821 op0 = TREE_OPERAND (exp, 0);
3822 op1 = TREE_OPERAND (exp, 1);
3823 op2 = TREE_OPERAND (exp, 2);
3828 op1 = max_size (op1, max_p);
3829 op2 = max_size (op2, max_p);
3831 /* If we have the MAX of a "negative" value in an unsigned type
3832 and zero for a length expression, just return zero. */
3834 && TREE_CODE (op0) == LE_EXPR
3835 && TYPE_UNSIGNED (type)
3836 && TREE_CODE (op1) == INTEGER_CST
3837 && !TREE_OVERFLOW (op1)
3838 && tree_int_cst_sign_bit (op1)
3839 && integer_zerop (op2))
3842 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type, op1, op2);
3850 /* Other tree classes cannot happen. */
3858 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
3859 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
3860 Return a constructor for the template. */
3863 build_template (tree template_type, tree array_type, tree expr)
3865 vec<constructor_elt, va_gc> *template_elts = NULL;
3866 tree bound_list = NULL_TREE;
3869 while (TREE_CODE (array_type) == RECORD_TYPE
3870 && (TYPE_PADDING_P (array_type)
3871 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
3872 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
3874 if (TREE_CODE (array_type) == ARRAY_TYPE
3875 || (TREE_CODE (array_type) == INTEGER_TYPE
3876 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
3877 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
3879 /* First make the list for a CONSTRUCTOR for the template. Go down the
3880 field list of the template instead of the type chain because this
3881 array might be an Ada array of arrays and we can't tell where the
3882 nested arrays stop being the underlying object. */
3884 for (field = TYPE_FIELDS (template_type); field;
3886 ? (bound_list = TREE_CHAIN (bound_list))
3887 : (array_type = TREE_TYPE (array_type))),
3888 field = DECL_CHAIN (DECL_CHAIN (field)))
3890 tree bounds, min, max;
3892 /* If we have a bound list, get the bounds from there. Likewise
3893 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
3894 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
3895 This will give us a maximum range. */
3897 bounds = TREE_VALUE (bound_list);
3898 else if (TREE_CODE (array_type) == ARRAY_TYPE)
3899 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
3900 else if (expr && TREE_CODE (expr) == PARM_DECL
3901 && DECL_BY_COMPONENT_PTR_P (expr))
3902 bounds = TREE_TYPE (field);
3906 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
3907 max = convert (TREE_TYPE (DECL_CHAIN (field)), TYPE_MAX_VALUE (bounds));
3909 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
3910 substitute it from OBJECT. */
3911 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
3912 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
3914 CONSTRUCTOR_APPEND_ELT (template_elts, field, min);
3915 CONSTRUCTOR_APPEND_ELT (template_elts, DECL_CHAIN (field), max);
3918 return gnat_build_constructor (template_type, template_elts);
3921 /* Return true if TYPE is suitable for the element type of a vector. */
3924 type_for_vector_element_p (tree type)
3928 if (!INTEGRAL_TYPE_P (type)
3929 && !SCALAR_FLOAT_TYPE_P (type)
3930 && !FIXED_POINT_TYPE_P (type))
3933 mode = TYPE_MODE (type);
3934 if (GET_MODE_CLASS (mode) != MODE_INT
3935 && !SCALAR_FLOAT_MODE_P (mode)
3936 && !ALL_SCALAR_FIXED_POINT_MODE_P (mode))
3942 /* Return a vector type given the SIZE and the INNER_TYPE, or NULL_TREE if
3943 this is not possible. If ATTRIBUTE is non-zero, we are processing the
3944 attribute declaration and want to issue error messages on failure. */
3947 build_vector_type_for_size (tree inner_type, tree size, tree attribute)
3949 unsigned HOST_WIDE_INT size_int, inner_size_int;
3952 /* Silently punt on variable sizes. We can't make vector types for them,
3953 need to ignore them on front-end generated subtypes of unconstrained
3954 base types, and this attribute is for binding implementors, not end
3955 users, so we should never get there from legitimate explicit uses. */
3956 if (!tree_fits_uhwi_p (size))
3958 size_int = tree_to_uhwi (size);
3960 if (!type_for_vector_element_p (inner_type))
3963 error ("invalid element type for attribute %qs",
3964 IDENTIFIER_POINTER (attribute));
3967 inner_size_int = tree_to_uhwi (TYPE_SIZE_UNIT (inner_type));
3969 if (size_int % inner_size_int)
3972 error ("vector size not an integral multiple of component size");
3979 error ("zero vector size");
3983 nunits = size_int / inner_size_int;
3984 if (nunits & (nunits - 1))
3987 error ("number of components of vector not a power of two");
3991 return build_vector_type (inner_type, nunits);
3994 /* Return a vector type whose representative array type is ARRAY_TYPE, or
3995 NULL_TREE if this is not possible. If ATTRIBUTE is non-zero, we are
3996 processing the attribute and want to issue error messages on failure. */
3999 build_vector_type_for_array (tree array_type, tree attribute)
4001 tree vector_type = build_vector_type_for_size (TREE_TYPE (array_type),
4002 TYPE_SIZE_UNIT (array_type),
4007 TYPE_REPRESENTATIVE_ARRAY (vector_type) = array_type;
4011 /* Build a type to be used to represent an aliased object whose nominal type
4012 is an unconstrained array. This consists of a RECORD_TYPE containing a
4013 field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an ARRAY_TYPE.
4014 If ARRAY_TYPE is that of an unconstrained array, this is used to represent
4015 an arbitrary unconstrained object. Use NAME as the name of the record.
4016 DEBUG_INFO_P is true if we need to write debug information for the type. */
4019 build_unc_object_type (tree template_type, tree object_type, tree name,
4023 tree type = make_node (RECORD_TYPE);
4025 = create_field_decl (get_identifier ("BOUNDS"), template_type, type,
4026 NULL_TREE, NULL_TREE, 0, 1);
4028 = create_field_decl (get_identifier ("ARRAY"), object_type, type,
4029 NULL_TREE, NULL_TREE, 0, 1);
4031 TYPE_NAME (type) = name;
4032 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
4033 DECL_CHAIN (template_field) = array_field;
4034 finish_record_type (type, template_field, 0, true);
4036 /* Declare it now since it will never be declared otherwise. This is
4037 necessary to ensure that its subtrees are properly marked. */
4038 decl = create_type_decl (name, type, true, debug_info_p, Empty);
4040 /* template_type will not be used elsewhere than here, so to keep the debug
4041 info clean and in order to avoid scoping issues, make decl its
4043 gnat_set_type_context (template_type, decl);
4048 /* Same, taking a thin or fat pointer type instead of a template type. */
4051 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
4052 tree name, bool debug_info_p)
4056 gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
4059 = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type)
4060 ? TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
4061 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
4064 build_unc_object_type (template_type, object_type, name, debug_info_p);
4067 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
4068 In the normal case this is just two adjustments, but we have more to
4069 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
4072 update_pointer_to (tree old_type, tree new_type)
4074 tree ptr = TYPE_POINTER_TO (old_type);
4075 tree ref = TYPE_REFERENCE_TO (old_type);
4078 /* If this is the main variant, process all the other variants first. */
4079 if (TYPE_MAIN_VARIANT (old_type) == old_type)
4080 for (t = TYPE_NEXT_VARIANT (old_type); t; t = TYPE_NEXT_VARIANT (t))
4081 update_pointer_to (t, new_type);
4083 /* If no pointers and no references, we are done. */
4087 /* Merge the old type qualifiers in the new type.
4089 Each old variant has qualifiers for specific reasons, and the new
4090 designated type as well. Each set of qualifiers represents useful
4091 information grabbed at some point, and merging the two simply unifies
4092 these inputs into the final type description.
4094 Consider for instance a volatile type frozen after an access to constant
4095 type designating it; after the designated type's freeze, we get here with
4096 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
4097 when the access type was processed. We will make a volatile and readonly
4098 designated type, because that's what it really is.
4100 We might also get here for a non-dummy OLD_TYPE variant with different
4101 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
4102 to private record type elaboration (see the comments around the call to
4103 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
4104 the qualifiers in those cases too, to avoid accidentally discarding the
4105 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
4107 = build_qualified_type (new_type,
4108 TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
4110 /* If old type and new type are identical, there is nothing to do. */
4111 if (old_type == new_type)
4114 /* Otherwise, first handle the simple case. */
4115 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
4117 tree new_ptr, new_ref;
4119 /* If pointer or reference already points to new type, nothing to do.
4120 This can happen as update_pointer_to can be invoked multiple times
4121 on the same couple of types because of the type variants. */
4122 if ((ptr && TREE_TYPE (ptr) == new_type)
4123 || (ref && TREE_TYPE (ref) == new_type))
4126 /* Chain PTR and its variants at the end. */
4127 new_ptr = TYPE_POINTER_TO (new_type);
4130 while (TYPE_NEXT_PTR_TO (new_ptr))
4131 new_ptr = TYPE_NEXT_PTR_TO (new_ptr);
4132 TYPE_NEXT_PTR_TO (new_ptr) = ptr;
4135 TYPE_POINTER_TO (new_type) = ptr;
4137 /* Now adjust them. */
4138 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
4139 for (t = TYPE_MAIN_VARIANT (ptr); t; t = TYPE_NEXT_VARIANT (t))
4141 TREE_TYPE (t) = new_type;
4142 if (TYPE_NULL_BOUNDS (t))
4143 TREE_TYPE (TREE_OPERAND (TYPE_NULL_BOUNDS (t), 0)) = new_type;
4146 /* Chain REF and its variants at the end. */
4147 new_ref = TYPE_REFERENCE_TO (new_type);
4150 while (TYPE_NEXT_REF_TO (new_ref))
4151 new_ref = TYPE_NEXT_REF_TO (new_ref);
4152 TYPE_NEXT_REF_TO (new_ref) = ref;
4155 TYPE_REFERENCE_TO (new_type) = ref;
4157 /* Now adjust them. */
4158 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
4159 for (t = TYPE_MAIN_VARIANT (ref); t; t = TYPE_NEXT_VARIANT (t))
4160 TREE_TYPE (t) = new_type;
4162 TYPE_POINTER_TO (old_type) = NULL_TREE;
4163 TYPE_REFERENCE_TO (old_type) = NULL_TREE;
4166 /* Now deal with the unconstrained array case. In this case the pointer
4167 is actually a record where both fields are pointers to dummy nodes.
4168 Turn them into pointers to the correct types using update_pointer_to.
4169 Likewise for the pointer to the object record (thin pointer). */
4172 tree new_ptr = TYPE_POINTER_TO (new_type);
4174 gcc_assert (TYPE_IS_FAT_POINTER_P (ptr));
4176 /* If PTR already points to NEW_TYPE, nothing to do. This can happen
4177 since update_pointer_to can be invoked multiple times on the same
4178 couple of types because of the type variants. */
4179 if (TYPE_UNCONSTRAINED_ARRAY (ptr) == new_type)
4183 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
4184 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))));
4187 (TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (ptr)))),
4188 TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (new_ptr)))));
4190 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type),
4191 TYPE_OBJECT_RECORD_TYPE (new_type));
4193 TYPE_POINTER_TO (old_type) = NULL_TREE;
4194 TYPE_REFERENCE_TO (old_type) = NULL_TREE;
4198 /* Convert EXPR, a pointer to a constrained array, into a pointer to an
4199 unconstrained one. This involves making or finding a template. */
4202 convert_to_fat_pointer (tree type, tree expr)
4204 tree template_type = TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))));
4205 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
4206 tree etype = TREE_TYPE (expr);
4208 vec<constructor_elt, va_gc> *v;
4211 /* If EXPR is null, make a fat pointer that contains a null pointer to the
4212 array (compare_fat_pointers ensures that this is the full discriminant)
4213 and a valid pointer to the bounds. This latter property is necessary
4214 since the compiler can hoist the load of the bounds done through it. */
4215 if (integer_zerop (expr))
4217 tree ptr_template_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4218 tree null_bounds, t;
4220 if (TYPE_NULL_BOUNDS (ptr_template_type))
4221 null_bounds = TYPE_NULL_BOUNDS (ptr_template_type);
4224 /* The template type can still be dummy at this point so we build an
4225 empty constructor. The middle-end will fill it in with zeros. */
4226 t = build_constructor (template_type, NULL);
4227 TREE_CONSTANT (t) = TREE_STATIC (t) = 1;
4228 null_bounds = build_unary_op (ADDR_EXPR, NULL_TREE, t);
4229 SET_TYPE_NULL_BOUNDS (ptr_template_type, null_bounds);
4232 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4233 fold_convert (p_array_type, null_pointer_node));
4234 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), null_bounds);
4235 t = build_constructor (type, v);
4236 /* Do not set TREE_CONSTANT so as to force T to static memory. */
4237 TREE_CONSTANT (t) = 0;
4238 TREE_STATIC (t) = 1;
4243 /* If EXPR is a thin pointer, make template and data from the record. */
4244 if (TYPE_IS_THIN_POINTER_P (etype))
4246 tree field = TYPE_FIELDS (TREE_TYPE (etype));
4248 expr = gnat_protect_expr (expr);
4250 /* If we have a TYPE_UNCONSTRAINED_ARRAY attached to the RECORD_TYPE,
4251 the thin pointer value has been shifted so we shift it back to get
4252 the template address. */
4253 if (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)))
4256 = build_binary_op (POINTER_PLUS_EXPR, etype, expr,
4257 fold_build1 (NEGATE_EXPR, sizetype,
4259 (DECL_CHAIN (field))));
4261 = fold_convert (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))),
4265 /* Otherwise we explicitly take the address of the fields. */
4268 expr = build_unary_op (INDIRECT_REF, NULL_TREE, expr);
4270 = build_unary_op (ADDR_EXPR, NULL_TREE,
4271 build_component_ref (expr, field, false));
4272 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
4273 build_component_ref (expr, DECL_CHAIN (field),
4278 /* Otherwise, build the constructor for the template. */
4281 = build_unary_op (ADDR_EXPR, NULL_TREE,
4282 build_template (template_type, TREE_TYPE (etype),
4285 /* The final result is a constructor for the fat pointer.
4287 If EXPR is an argument of a foreign convention subprogram, the type it
4288 points to is directly the component type. In this case, the expression
4289 type may not match the corresponding FIELD_DECL type at this point, so we
4290 call "convert" here to fix that up if necessary. This type consistency is
4291 required, for instance because it ensures that possible later folding of
4292 COMPONENT_REFs against this constructor always yields something of the
4293 same type as the initial reference.
4295 Note that the call to "build_template" above is still fine because it
4296 will only refer to the provided TEMPLATE_TYPE in this case. */
4297 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type), convert (p_array_type, expr));
4298 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), template_addr);
4299 return gnat_build_constructor (type, v);
4302 /* Create an expression whose value is that of EXPR,
4303 converted to type TYPE. The TREE_TYPE of the value
4304 is always TYPE. This function implements all reasonable
4305 conversions; callers should filter out those that are
4306 not permitted by the language being compiled. */
4309 convert (tree type, tree expr)
4311 tree etype = TREE_TYPE (expr);
4312 enum tree_code ecode = TREE_CODE (etype);
4313 enum tree_code code = TREE_CODE (type);
4315 /* If the expression is already of the right type, we are done. */
4319 /* If both input and output have padding and are of variable size, do this
4320 as an unchecked conversion. Likewise if one is a mere variant of the
4321 other, so we avoid a pointless unpad/repad sequence. */
4322 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
4323 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4324 && (!TREE_CONSTANT (TYPE_SIZE (type))
4325 || !TREE_CONSTANT (TYPE_SIZE (etype))
4326 || TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
4327 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
4328 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
4331 /* If the output type has padding, convert to the inner type and make a
4332 constructor to build the record, unless a variable size is involved. */
4333 else if (code == RECORD_TYPE && TYPE_PADDING_P (type))
4335 /* If we previously converted from another type and our type is
4336 of variable size, remove the conversion to avoid the need for
4337 variable-sized temporaries. Likewise for a conversion between
4338 original and packable version. */
4339 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4340 && (!TREE_CONSTANT (TYPE_SIZE (type))
4341 || (ecode == RECORD_TYPE
4342 && TYPE_NAME (etype)
4343 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
4344 expr = TREE_OPERAND (expr, 0);
4346 /* If we are just removing the padding from expr, convert the original
4347 object if we have variable size in order to avoid the need for some
4348 variable-sized temporaries. Likewise if the padding is a variant
4349 of the other, so we avoid a pointless unpad/repad sequence. */
4350 if (TREE_CODE (expr) == COMPONENT_REF
4351 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
4352 && (!TREE_CONSTANT (TYPE_SIZE (type))
4353 || TYPE_MAIN_VARIANT (type)
4354 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr, 0)))
4355 || (ecode == RECORD_TYPE
4356 && TYPE_NAME (etype)
4357 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
4358 return convert (type, TREE_OPERAND (expr, 0));
4360 /* If the inner type is of self-referential size and the expression type
4361 is a record, do this as an unchecked conversion unless both types are
4362 essentially the same. */
4363 if (ecode == RECORD_TYPE
4364 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type)))
4365 && TYPE_MAIN_VARIANT (etype)
4366 != TYPE_MAIN_VARIANT (TREE_TYPE (TYPE_FIELDS (type))))
4367 return unchecked_convert (type, expr, false);
4369 /* If we are converting between array types with variable size, do the
4370 final conversion as an unchecked conversion, again to avoid the need
4371 for some variable-sized temporaries. If valid, this conversion is
4372 very likely purely technical and without real effects. */
4373 if (ecode == ARRAY_TYPE
4374 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == ARRAY_TYPE
4375 && !TREE_CONSTANT (TYPE_SIZE (etype))
4376 && !TREE_CONSTANT (TYPE_SIZE (type)))
4377 return unchecked_convert (type,
4378 convert (TREE_TYPE (TYPE_FIELDS (type)),
4382 tree t = convert (TREE_TYPE (TYPE_FIELDS (type)), expr);
4384 /* If converting to the inner type has already created a CONSTRUCTOR with
4385 the right size, then reuse it instead of creating another one. This
4386 can happen for the padding type built to overalign local variables. */
4387 if (TREE_CODE (t) == VIEW_CONVERT_EXPR
4388 && TREE_CODE (TREE_OPERAND (t, 0)) == CONSTRUCTOR
4389 && TREE_CONSTANT (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (t, 0))))
4390 && tree_int_cst_equal (TYPE_SIZE (type),
4391 TYPE_SIZE (TREE_TYPE (TREE_OPERAND (t, 0)))))
4392 return build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (t, 0));
4394 vec<constructor_elt, va_gc> *v;
4396 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type), t);
4397 return gnat_build_constructor (type, v);
4400 /* If the input type has padding, remove it and convert to the output type.
4401 The conditions ordering is arranged to ensure that the output type is not
4402 a padding type here, as it is not clear whether the conversion would
4403 always be correct if this was to happen. */
4404 else if (ecode == RECORD_TYPE && TYPE_PADDING_P (etype))
4408 /* If we have just converted to this padded type, just get the
4409 inner expression. */
4410 if (TREE_CODE (expr) == CONSTRUCTOR)
4411 unpadded = CONSTRUCTOR_ELT (expr, 0)->value;
4413 /* Otherwise, build an explicit component reference. */
4415 unpadded = build_component_ref (expr, TYPE_FIELDS (etype), false);
4417 return convert (type, unpadded);
4420 /* If the input is a biased type, convert first to the base type and add
4421 the bias. Note that the bias must go through a full conversion to the
4422 base type, lest it is itself a biased value; this happens for subtypes
4424 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4425 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
4426 fold_convert (TREE_TYPE (etype), expr),
4427 convert (TREE_TYPE (etype),
4428 TYPE_MIN_VALUE (etype))));
4430 /* If the input is a justified modular type, we need to extract the actual
4431 object before converting it to an other type with the exceptions of an
4432 [unconstrained] array or a mere type variant. It is useful to avoid
4433 the extraction and conversion in these cases because it could end up
4434 replacing a VAR_DECL by a constructor and we might be about the take
4435 the address of the result. */
4436 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
4437 && code != ARRAY_TYPE
4438 && code != UNCONSTRAINED_ARRAY_TYPE
4439 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
4441 convert (type, build_component_ref (expr, TYPE_FIELDS (etype), false));
4443 /* If converting to a type that contains a template, convert to the data
4444 type and then build the template. */
4445 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
4447 tree obj_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4448 vec<constructor_elt, va_gc> *v;
4451 /* If the source already has a template, get a reference to the
4452 associated array only, as we are going to rebuild a template
4453 for the target type anyway. */
4454 expr = maybe_unconstrained_array (expr);
4456 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4457 build_template (TREE_TYPE (TYPE_FIELDS (type)),
4458 obj_type, NULL_TREE));
4460 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
4461 convert (obj_type, expr));
4462 return gnat_build_constructor (type, v);
4465 /* There are some cases of expressions that we process specially. */
4466 switch (TREE_CODE (expr))
4472 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
4473 conversion in gnat_expand_expr. NULL_EXPR does not represent
4474 and actual value, so no conversion is needed. */
4475 expr = copy_node (expr);
4476 TREE_TYPE (expr) = type;
4480 /* If we are converting a STRING_CST to another constrained array type,
4481 just make a new one in the proper type. */
4483 && !(TREE_CONSTANT (TYPE_SIZE (etype))
4484 && !TREE_CONSTANT (TYPE_SIZE (type))))
4486 expr = copy_node (expr);
4487 TREE_TYPE (expr) = type;
4493 /* If we are converting a VECTOR_CST to a mere type variant, just make
4494 a new one in the proper type. */
4495 if (code == ecode && gnat_types_compatible_p (type, etype))
4497 expr = copy_node (expr);
4498 TREE_TYPE (expr) = type;
4504 /* If we are converting a CONSTRUCTOR to a mere type variant, or to
4505 another padding type around the same type, just make a new one in
4508 && (gnat_types_compatible_p (type, etype)
4509 || (code == RECORD_TYPE
4510 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4511 && TREE_TYPE (TYPE_FIELDS (type))
4512 == TREE_TYPE (TYPE_FIELDS (etype)))))
4514 expr = copy_node (expr);
4515 TREE_TYPE (expr) = type;
4516 CONSTRUCTOR_ELTS (expr) = vec_safe_copy (CONSTRUCTOR_ELTS (expr));
4520 /* Likewise for a conversion between original and packable version, or
4521 conversion between types of the same size and with the same list of
4522 fields, but we have to work harder to preserve type consistency. */
4524 && code == RECORD_TYPE
4525 && (TYPE_NAME (type) == TYPE_NAME (etype)
4526 || tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (etype))))
4529 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4530 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4531 vec<constructor_elt, va_gc> *v;
4533 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
4534 unsigned HOST_WIDE_INT idx;
4537 /* Whether we need to clear TREE_CONSTANT et al. on the output
4538 constructor when we convert in place. */
4539 bool clear_constant = false;
4541 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
4543 /* Skip the missing fields in the CONSTRUCTOR. */
4544 while (efield && field && !SAME_FIELD_P (efield, index))
4546 efield = DECL_CHAIN (efield);
4547 field = DECL_CHAIN (field);
4549 /* The field must be the same. */
4550 if (!(efield && field && SAME_FIELD_P (efield, field)))
4553 = {field, convert (TREE_TYPE (field), value)};
4554 v->quick_push (elt);
4556 /* If packing has made this field a bitfield and the input
4557 value couldn't be emitted statically any more, we need to
4558 clear TREE_CONSTANT on our output. */
4560 && TREE_CONSTANT (expr)
4561 && !CONSTRUCTOR_BITFIELD_P (efield)
4562 && CONSTRUCTOR_BITFIELD_P (field)
4563 && !initializer_constant_valid_for_bitfield_p (value))
4564 clear_constant = true;
4566 efield = DECL_CHAIN (efield);
4567 field = DECL_CHAIN (field);
4570 /* If we have been able to match and convert all the input fields
4571 to their output type, convert in place now. We'll fallback to a
4572 view conversion downstream otherwise. */
4575 expr = copy_node (expr);
4576 TREE_TYPE (expr) = type;
4577 CONSTRUCTOR_ELTS (expr) = v;
4579 TREE_CONSTANT (expr) = TREE_STATIC (expr) = 0;
4584 /* Likewise for a conversion between array type and vector type with a
4585 compatible representative array. */
4586 else if (code == VECTOR_TYPE
4587 && ecode == ARRAY_TYPE
4588 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4591 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4592 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4593 vec<constructor_elt, va_gc> *v;
4594 unsigned HOST_WIDE_INT ix;
4597 /* Build a VECTOR_CST from a *constant* array constructor. */
4598 if (TREE_CONSTANT (expr))
4600 bool constant_p = true;
4602 /* Iterate through elements and check if all constructor
4603 elements are *_CSTs. */
4604 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4605 if (!CONSTANT_CLASS_P (value))
4612 return build_vector_from_ctor (type,
4613 CONSTRUCTOR_ELTS (expr));
4616 /* Otherwise, build a regular vector constructor. */
4618 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4620 constructor_elt elt = {NULL_TREE, value};
4621 v->quick_push (elt);
4623 expr = copy_node (expr);
4624 TREE_TYPE (expr) = type;
4625 CONSTRUCTOR_ELTS (expr) = v;
4630 case UNCONSTRAINED_ARRAY_REF:
4631 /* First retrieve the underlying array. */
4632 expr = maybe_unconstrained_array (expr);
4633 etype = TREE_TYPE (expr);
4634 ecode = TREE_CODE (etype);
4637 case VIEW_CONVERT_EXPR:
4639 /* GCC 4.x is very sensitive to type consistency overall, and view
4640 conversions thus are very frequent. Even though just "convert"ing
4641 the inner operand to the output type is fine in most cases, it
4642 might expose unexpected input/output type mismatches in special
4643 circumstances so we avoid such recursive calls when we can. */
4644 tree op0 = TREE_OPERAND (expr, 0);
4646 /* If we are converting back to the original type, we can just
4647 lift the input conversion. This is a common occurrence with
4648 switches back-and-forth amongst type variants. */
4649 if (type == TREE_TYPE (op0))
4652 /* Otherwise, if we're converting between two aggregate or vector
4653 types, we might be allowed to substitute the VIEW_CONVERT_EXPR
4654 target type in place or to just convert the inner expression. */
4655 if ((AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
4656 || (VECTOR_TYPE_P (type) && VECTOR_TYPE_P (etype)))
4658 /* If we are converting between mere variants, we can just
4659 substitute the VIEW_CONVERT_EXPR in place. */
4660 if (gnat_types_compatible_p (type, etype))
4661 return build1 (VIEW_CONVERT_EXPR, type, op0);
4663 /* Otherwise, we may just bypass the input view conversion unless
4664 one of the types is a fat pointer, which is handled by
4665 specialized code below which relies on exact type matching. */
4666 else if (!TYPE_IS_FAT_POINTER_P (type)
4667 && !TYPE_IS_FAT_POINTER_P (etype))
4668 return convert (type, op0);
4678 /* Check for converting to a pointer to an unconstrained array. */
4679 if (TYPE_IS_FAT_POINTER_P (type) && !TYPE_IS_FAT_POINTER_P (etype))
4680 return convert_to_fat_pointer (type, expr);
4682 /* If we are converting between two aggregate or vector types that are mere
4683 variants, just make a VIEW_CONVERT_EXPR. Likewise when we are converting
4684 to a vector type from its representative array type. */
4685 else if ((code == ecode
4686 && (AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type))
4687 && gnat_types_compatible_p (type, etype))
4688 || (code == VECTOR_TYPE
4689 && ecode == ARRAY_TYPE
4690 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4692 return build1 (VIEW_CONVERT_EXPR, type, expr);
4694 /* If we are converting between tagged types, try to upcast properly.
4695 But don't do it if we are just annotating types since tagged types
4696 aren't fully laid out in this mode. */
4697 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4698 && TYPE_ALIGN_OK (etype) && TYPE_ALIGN_OK (type)
4699 && !type_annotate_only)
4701 tree child_etype = etype;
4703 tree field = TYPE_FIELDS (child_etype);
4704 if (DECL_NAME (field) == parent_name_id && TREE_TYPE (field) == type)
4705 return build_component_ref (expr, field, false);
4706 child_etype = TREE_TYPE (field);
4707 } while (TREE_CODE (child_etype) == RECORD_TYPE);
4710 /* If we are converting from a smaller form of record type back to it, just
4711 make a VIEW_CONVERT_EXPR. But first pad the expression to have the same
4712 size on both sides. */
4713 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4714 && smaller_form_type_p (etype, type))
4716 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4717 false, false, false, true),
4719 return build1 (VIEW_CONVERT_EXPR, type, expr);
4722 /* In all other cases of related types, make a NOP_EXPR. */
4723 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
4724 return fold_convert (type, expr);
4729 return fold_build1 (CONVERT_EXPR, type, expr);
4732 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4733 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4734 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4735 return unchecked_convert (type, expr, false);
4737 /* If the output is a biased type, convert first to the base type and
4738 subtract the bias. Note that the bias itself must go through a full
4739 conversion to the base type, lest it is a biased value; this happens
4740 for subtypes of biased types. */
4741 if (TYPE_BIASED_REPRESENTATION_P (type))
4742 return fold_convert (type,
4743 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4744 convert (TREE_TYPE (type), expr),
4745 convert (TREE_TYPE (type),
4746 TYPE_MIN_VALUE (type))));
4748 /* ... fall through ... */
4752 /* If we are converting an additive expression to an integer type
4753 with lower precision, be wary of the optimization that can be
4754 applied by convert_to_integer. There are 2 problematic cases:
4755 - if the first operand was originally of a biased type,
4756 because we could be recursively called to convert it
4757 to an intermediate type and thus rematerialize the
4758 additive operator endlessly,
4759 - if the expression contains a placeholder, because an
4760 intermediate conversion that changes the sign could
4761 be inserted and thus introduce an artificial overflow
4762 at compile time when the placeholder is substituted. */
4763 if (code == INTEGER_TYPE
4764 && ecode == INTEGER_TYPE
4765 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4766 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4768 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4770 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4771 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4772 || CONTAINS_PLACEHOLDER_P (expr))
4773 return build1 (NOP_EXPR, type, expr);
4776 return fold (convert_to_integer (type, expr));
4779 case REFERENCE_TYPE:
4780 /* If converting between two thin pointers, adjust if needed to account
4781 for differing offsets from the base pointer, depending on whether
4782 there is a TYPE_UNCONSTRAINED_ARRAY attached to the record type. */
4783 if (TYPE_IS_THIN_POINTER_P (etype) && TYPE_IS_THIN_POINTER_P (type))
4786 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype))
4787 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (etype))))
4790 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))
4791 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (type))))
4793 tree byte_diff = size_diffop (type_pos, etype_pos);
4795 expr = build1 (NOP_EXPR, type, expr);
4796 if (integer_zerop (byte_diff))
4799 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
4800 fold_convert (sizetype, byte_diff));
4803 /* If converting fat pointer to normal or thin pointer, get the pointer
4804 to the array and then convert it. */
4805 if (TYPE_IS_FAT_POINTER_P (etype))
4806 expr = build_component_ref (expr, TYPE_FIELDS (etype), false);
4808 return fold (convert_to_pointer (type, expr));
4811 return fold (convert_to_real (type, expr));
4814 /* Do a normal conversion between scalar and justified modular type. */
4815 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
4817 vec<constructor_elt, va_gc> *v;
4820 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4821 convert (TREE_TYPE (TYPE_FIELDS (type)),
4823 return gnat_build_constructor (type, v);
4826 /* In these cases, assume the front-end has validated the conversion.
4827 If the conversion is valid, it will be a bit-wise conversion, so
4828 it can be viewed as an unchecked conversion. */
4829 return unchecked_convert (type, expr, false);
4832 /* Do a normal conversion between unconstrained and constrained array
4833 type, assuming the latter is a constrained version of the former. */
4834 if (TREE_CODE (expr) == INDIRECT_REF
4835 && ecode == ARRAY_TYPE
4836 && TREE_TYPE (etype) == TREE_TYPE (type))
4838 tree ptr_type = build_pointer_type (type);
4839 tree t = build_unary_op (INDIRECT_REF, NULL_TREE,
4840 fold_convert (ptr_type,
4841 TREE_OPERAND (expr, 0)));
4842 TREE_READONLY (t) = TREE_READONLY (expr);
4843 TREE_THIS_NOTRAP (t) = TREE_THIS_NOTRAP (expr);
4847 /* In these cases, assume the front-end has validated the conversion.
4848 If the conversion is valid, it will be a bit-wise conversion, so
4849 it can be viewed as an unchecked conversion. */
4850 return unchecked_convert (type, expr, false);
4853 /* This is a either a conversion between a tagged type and some
4854 subtype, which we have to mark as a UNION_TYPE because of
4855 overlapping fields or a conversion of an Unchecked_Union. */
4856 return unchecked_convert (type, expr, false);
4858 case UNCONSTRAINED_ARRAY_TYPE:
4859 /* If the input is a VECTOR_TYPE, convert to the representative
4860 array type first. */
4861 if (ecode == VECTOR_TYPE)
4863 expr = convert (TYPE_REPRESENTATIVE_ARRAY (etype), expr);
4864 etype = TREE_TYPE (expr);
4865 ecode = TREE_CODE (etype);
4868 /* If EXPR is a constrained array, take its address, convert it to a
4869 fat pointer, and then dereference it. Likewise if EXPR is a
4870 record containing both a template and a constrained array.
4871 Note that a record representing a justified modular type
4872 always represents a packed constrained array. */
4873 if (ecode == ARRAY_TYPE
4874 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4875 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4876 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4879 (INDIRECT_REF, NULL_TREE,
4880 convert_to_fat_pointer (TREE_TYPE (type),
4881 build_unary_op (ADDR_EXPR,
4884 /* Do something very similar for converting one unconstrained
4885 array to another. */
4886 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
4888 build_unary_op (INDIRECT_REF, NULL_TREE,
4889 convert (TREE_TYPE (type),
4890 build_unary_op (ADDR_EXPR,
4896 return fold (convert_to_complex (type, expr));
4903 /* Create an expression whose value is that of EXPR converted to the common
4904 index type, which is sizetype. EXPR is supposed to be in the base type
4905 of the GNAT index type. Calling it is equivalent to doing
4907 convert (sizetype, expr)
4909 but we try to distribute the type conversion with the knowledge that EXPR
4910 cannot overflow in its type. This is a best-effort approach and we fall
4911 back to the above expression as soon as difficulties are encountered.
4913 This is necessary to overcome issues that arise when the GNAT base index
4914 type and the GCC common index type (sizetype) don't have the same size,
4915 which is quite frequent on 64-bit architectures. In this case, and if
4916 the GNAT base index type is signed but the iteration type of the loop has
4917 been forced to unsigned, the loop scalar evolution engine cannot compute
4918 a simple evolution for the general induction variables associated with the
4919 array indices, because it will preserve the wrap-around semantics in the
4920 unsigned type of their "inner" part. As a result, many loop optimizations
4923 The solution is to use a special (basic) induction variable that is at
4924 least as large as sizetype, and to express the aforementioned general
4925 induction variables in terms of this induction variable, eliminating
4926 the problematic intermediate truncation to the GNAT base index type.
4927 This is possible as long as the original expression doesn't overflow
4928 and if the middle-end hasn't introduced artificial overflows in the
4929 course of the various simplification it can make to the expression. */
4932 convert_to_index_type (tree expr)
4934 enum tree_code code = TREE_CODE (expr);
4935 tree type = TREE_TYPE (expr);
4937 /* If the type is unsigned, overflow is allowed so we cannot be sure that
4938 EXPR doesn't overflow. Keep it simple if optimization is disabled. */
4939 if (TYPE_UNSIGNED (type) || !optimize)
4940 return convert (sizetype, expr);
4945 /* The main effect of the function: replace a loop parameter with its
4946 associated special induction variable. */
4947 if (DECL_LOOP_PARM_P (expr) && DECL_INDUCTION_VAR (expr))
4948 expr = DECL_INDUCTION_VAR (expr);
4953 tree otype = TREE_TYPE (TREE_OPERAND (expr, 0));
4954 /* Bail out as soon as we suspect some sort of type frobbing. */
4955 if (TYPE_PRECISION (type) != TYPE_PRECISION (otype)
4956 || TYPE_UNSIGNED (type) != TYPE_UNSIGNED (otype))
4960 /* ... fall through ... */
4962 case NON_LVALUE_EXPR:
4963 return fold_build1 (code, sizetype,
4964 convert_to_index_type (TREE_OPERAND (expr, 0)));
4969 return fold_build2 (code, sizetype,
4970 convert_to_index_type (TREE_OPERAND (expr, 0)),
4971 convert_to_index_type (TREE_OPERAND (expr, 1)));
4974 return fold_build2 (code, sizetype, TREE_OPERAND (expr, 0),
4975 convert_to_index_type (TREE_OPERAND (expr, 1)));
4978 return fold_build3 (code, sizetype, TREE_OPERAND (expr, 0),
4979 convert_to_index_type (TREE_OPERAND (expr, 1)),
4980 convert_to_index_type (TREE_OPERAND (expr, 2)));
4986 return convert (sizetype, expr);
4989 /* Remove all conversions that are done in EXP. This includes converting
4990 from a padded type or to a justified modular type. If TRUE_ADDRESS
4991 is true, always return the address of the containing object even if
4992 the address is not bit-aligned. */
4995 remove_conversions (tree exp, bool true_address)
4997 switch (TREE_CODE (exp))
5001 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
5002 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
5004 remove_conversions (CONSTRUCTOR_ELT (exp, 0)->value, true);
5008 if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
5009 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
5013 case VIEW_CONVERT_EXPR:
5014 case NON_LVALUE_EXPR:
5015 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
5024 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
5025 refers to the underlying array. If it has TYPE_CONTAINS_TEMPLATE_P,
5026 likewise return an expression pointing to the underlying array. */
5029 maybe_unconstrained_array (tree exp)
5031 enum tree_code code = TREE_CODE (exp);
5032 tree type = TREE_TYPE (exp);
5034 switch (TREE_CODE (type))
5036 case UNCONSTRAINED_ARRAY_TYPE:
5037 if (code == UNCONSTRAINED_ARRAY_REF)
5039 const bool read_only = TREE_READONLY (exp);
5040 const bool no_trap = TREE_THIS_NOTRAP (exp);
5042 exp = TREE_OPERAND (exp, 0);
5043 type = TREE_TYPE (exp);
5045 if (TREE_CODE (exp) == COND_EXPR)
5048 = build_unary_op (INDIRECT_REF, NULL_TREE,
5049 build_component_ref (TREE_OPERAND (exp, 1),
5053 = build_unary_op (INDIRECT_REF, NULL_TREE,
5054 build_component_ref (TREE_OPERAND (exp, 2),
5058 exp = build3 (COND_EXPR,
5059 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))),
5060 TREE_OPERAND (exp, 0), op1, op2);
5064 exp = build_unary_op (INDIRECT_REF, NULL_TREE,
5065 build_component_ref (exp,
5068 TREE_READONLY (exp) = read_only;
5069 TREE_THIS_NOTRAP (exp) = no_trap;
5073 else if (code == NULL_EXPR)
5074 exp = build1 (NULL_EXPR,
5075 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type)))),
5076 TREE_OPERAND (exp, 0));
5080 /* If this is a padded type and it contains a template, convert to the
5081 unpadded type first. */
5082 if (TYPE_PADDING_P (type)
5083 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
5084 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
5086 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
5087 code = TREE_CODE (exp);
5088 type = TREE_TYPE (exp);
5091 if (TYPE_CONTAINS_TEMPLATE_P (type))
5093 /* If the array initializer is a box, return NULL_TREE. */
5094 if (code == CONSTRUCTOR && CONSTRUCTOR_NELTS (exp) < 2)
5097 exp = build_component_ref (exp, DECL_CHAIN (TYPE_FIELDS (type)),
5099 type = TREE_TYPE (exp);
5101 /* If the array type is padded, convert to the unpadded type. */
5102 if (TYPE_IS_PADDING_P (type))
5103 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
5114 /* Return true if EXPR is an expression that can be folded as an operand
5115 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
5118 can_fold_for_view_convert_p (tree expr)
5122 /* The folder will fold NOP_EXPRs between integral types with the same
5123 precision (in the middle-end's sense). We cannot allow it if the
5124 types don't have the same precision in the Ada sense as well. */
5125 if (TREE_CODE (expr) != NOP_EXPR)
5128 t1 = TREE_TYPE (expr);
5129 t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
5131 /* Defer to the folder for non-integral conversions. */
5132 if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
5135 /* Only fold conversions that preserve both precisions. */
5136 if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
5137 && operand_equal_p (rm_size (t1), rm_size (t2), 0))
5143 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
5144 If NOTRUNC_P is true, truncation operations should be suppressed.
5146 Special care is required with (source or target) integral types whose
5147 precision is not equal to their size, to make sure we fetch or assign
5148 the value bits whose location might depend on the endianness, e.g.
5150 Rmsize : constant := 8;
5151 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
5153 type Bit_Array is array (1 .. Rmsize) of Boolean;
5154 pragma Pack (Bit_Array);
5156 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
5158 Value : Int := 2#1000_0001#;
5159 Vbits : Bit_Array := To_Bit_Array (Value);
5161 we expect the 8 bits at Vbits'Address to always contain Value, while
5162 their original location depends on the endianness, at Value'Address
5163 on a little-endian architecture but not on a big-endian one.
5165 One pitfall is that we cannot use TYPE_UNSIGNED directly to decide how
5166 the bits between the precision and the size are filled, because of the
5167 trick used in the E_Signed_Integer_Subtype case of gnat_to_gnu_entity.
5168 So we use the special predicate type_unsigned_for_rm above. */
5171 unchecked_convert (tree type, tree expr, bool notrunc_p)
5173 tree etype = TREE_TYPE (expr);
5174 enum tree_code ecode = TREE_CODE (etype);
5175 enum tree_code code = TREE_CODE (type);
5177 = (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype));
5179 = (code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type));
5181 = (AGGREGATE_TYPE_P (etype) && TYPE_REVERSE_STORAGE_ORDER (etype));
5183 = (AGGREGATE_TYPE_P (type) && TYPE_REVERSE_STORAGE_ORDER (type));
5187 /* If the expression is already of the right type, we are done. */
5191 /* If both types are integral just do a normal conversion.
5192 Likewise for a conversion to an unconstrained array. */
5193 if (((INTEGRAL_TYPE_P (type)
5194 || (POINTER_TYPE_P (type) && !TYPE_IS_THIN_POINTER_P (type))
5195 || (code == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (type)))
5196 && (INTEGRAL_TYPE_P (etype)
5197 || (POINTER_TYPE_P (etype) && !TYPE_IS_THIN_POINTER_P (etype))
5198 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype))))
5199 || code == UNCONSTRAINED_ARRAY_TYPE)
5203 tree ntype = copy_type (etype);
5204 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
5205 TYPE_MAIN_VARIANT (ntype) = ntype;
5206 expr = build1 (NOP_EXPR, ntype, expr);
5211 tree rtype = copy_type (type);
5212 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
5213 TYPE_MAIN_VARIANT (rtype) = rtype;
5214 expr = convert (rtype, expr);
5215 expr = build1 (NOP_EXPR, type, expr);
5218 expr = convert (type, expr);
5221 /* If we are converting to an integral type whose precision is not equal
5222 to its size, first unchecked convert to a record type that contains a
5223 field of the given precision. Then extract the result from the field.
5225 There is a subtlety if the source type is an aggregate type with reverse
5226 storage order because its representation is not contiguous in the native
5227 storage order, i.e. a direct unchecked conversion to an integral type
5228 with N bits of precision cannot read the first N bits of the aggregate
5229 type. To overcome it, we do an unchecked conversion to an integral type
5230 with reverse storage order and return the resulting value. This also
5231 ensures that the result of the unchecked conversion doesn't depend on
5232 the endianness of the target machine, but only on the storage order of
5235 Finally, for the sake of consistency, we do the unchecked conversion
5236 to an integral type with reverse storage order as soon as the source
5237 type is an aggregate type with reverse storage order, even if there
5238 are no considerations of precision or size involved. Ultimately, we
5239 further extend this processing to any scalar type. */
5240 else if ((INTEGRAL_TYPE_P (type)
5241 && TYPE_RM_SIZE (type)
5242 && ((c = tree_int_cst_compare (TYPE_RM_SIZE (type),
5243 TYPE_SIZE (type))) < 0
5245 || (SCALAR_FLOAT_TYPE_P (type) && ereverse))
5247 tree rec_type = make_node (RECORD_TYPE);
5248 tree field_type, field;
5250 TYPE_REVERSE_STORAGE_ORDER (rec_type) = ereverse;
5254 const unsigned HOST_WIDE_INT prec
5255 = TREE_INT_CST_LOW (TYPE_RM_SIZE (type));
5256 if (type_unsigned_for_rm (type))
5257 field_type = make_unsigned_type (prec);
5259 field_type = make_signed_type (prec);
5260 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (type));
5265 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
5266 NULL_TREE, bitsize_zero_node, c < 0, 0);
5268 finish_record_type (rec_type, field, 1, false);
5270 expr = unchecked_convert (rec_type, expr, notrunc_p);
5271 expr = build_component_ref (expr, field, false);
5272 expr = fold_build1 (NOP_EXPR, type, expr);
5275 /* Similarly if we are converting from an integral type whose precision is
5276 not equal to its size, first copy into a field of the given precision
5277 and unchecked convert the record type.
5279 The same considerations as above apply if the target type is an aggregate
5280 type with reverse storage order and we also proceed similarly. */
5281 else if ((INTEGRAL_TYPE_P (etype)
5282 && TYPE_RM_SIZE (etype)
5283 && ((c = tree_int_cst_compare (TYPE_RM_SIZE (etype),
5284 TYPE_SIZE (etype))) < 0
5286 || (SCALAR_FLOAT_TYPE_P (etype) && reverse))
5288 tree rec_type = make_node (RECORD_TYPE);
5289 vec<constructor_elt, va_gc> *v;
5291 tree field_type, field;
5293 TYPE_REVERSE_STORAGE_ORDER (rec_type) = reverse;
5297 const unsigned HOST_WIDE_INT prec
5298 = TREE_INT_CST_LOW (TYPE_RM_SIZE (etype));
5299 if (type_unsigned_for_rm (etype))
5300 field_type = make_unsigned_type (prec);
5302 field_type = make_signed_type (prec);
5303 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (etype));
5308 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
5309 NULL_TREE, bitsize_zero_node, c < 0, 0);
5311 finish_record_type (rec_type, field, 1, false);
5313 expr = fold_build1 (NOP_EXPR, field_type, expr);
5314 CONSTRUCTOR_APPEND_ELT (v, field, expr);
5315 expr = gnat_build_constructor (rec_type, v);
5316 expr = unchecked_convert (type, expr, notrunc_p);
5319 /* If we are converting from a scalar type to a type with a different size,
5320 we need to pad to have the same size on both sides.
5322 ??? We cannot do it unconditionally because unchecked conversions are
5323 used liberally by the front-end to implement polymorphism, e.g. in:
5325 S191s : constant ada__tags__addr_ptr := ada__tags__addr_ptr!(S190s);
5326 return p___size__4 (p__object!(S191s.all));
5328 so we skip all expressions that are references. */
5329 else if (!REFERENCE_CLASS_P (expr)
5330 && !AGGREGATE_TYPE_P (etype)
5331 && TREE_CONSTANT (TYPE_SIZE (type))
5332 && (c = tree_int_cst_compare (TYPE_SIZE (etype), TYPE_SIZE (type))))
5336 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
5337 false, false, false, true),
5339 expr = unchecked_convert (type, expr, notrunc_p);
5343 tree rec_type = maybe_pad_type (type, TYPE_SIZE (etype), 0, Empty,
5344 false, false, false, true);
5345 expr = unchecked_convert (rec_type, expr, notrunc_p);
5346 expr = build_component_ref (expr, TYPE_FIELDS (rec_type), false);
5350 /* We have a special case when we are converting between two unconstrained
5351 array types. In that case, take the address, convert the fat pointer
5352 types, and dereference. */
5353 else if (ecode == code && code == UNCONSTRAINED_ARRAY_TYPE)
5354 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
5355 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
5356 build_unary_op (ADDR_EXPR, NULL_TREE,
5359 /* Another special case is when we are converting to a vector type from its
5360 representative array type; this a regular conversion. */
5361 else if (code == VECTOR_TYPE
5362 && ecode == ARRAY_TYPE
5363 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
5365 expr = convert (type, expr);
5367 /* And, if the array type is not the representative, we try to build an
5368 intermediate vector type of which the array type is the representative
5369 and to do the unchecked conversion between the vector types, in order
5370 to enable further simplifications in the middle-end. */
5371 else if (code == VECTOR_TYPE
5372 && ecode == ARRAY_TYPE
5373 && (tem = build_vector_type_for_array (etype, NULL_TREE)))
5375 expr = convert (tem, expr);
5376 return unchecked_convert (type, expr, notrunc_p);
5379 /* If we are converting a CONSTRUCTOR to a more aligned aggregate type, bump
5380 the alignment of the CONSTRUCTOR to speed up the copy operation. But do
5381 not do it for a conversion between original and packable version to avoid
5382 an infinite recursion. */
5383 else if (TREE_CODE (expr) == CONSTRUCTOR
5384 && AGGREGATE_TYPE_P (type)
5385 && TYPE_NAME (type) != TYPE_NAME (etype)
5386 && TYPE_ALIGN (etype) < TYPE_ALIGN (type))
5388 expr = convert (maybe_pad_type (etype, NULL_TREE, TYPE_ALIGN (type),
5389 Empty, false, false, false, true),
5391 return unchecked_convert (type, expr, notrunc_p);
5394 /* If we are converting a CONSTRUCTOR to a larger aggregate type, bump the
5395 size of the CONSTRUCTOR to make sure there are enough allocated bytes.
5396 But do not do it for a conversion between original and packable version
5397 to avoid an infinite recursion. */
5398 else if (TREE_CODE (expr) == CONSTRUCTOR
5399 && AGGREGATE_TYPE_P (type)
5400 && TYPE_NAME (type) != TYPE_NAME (etype)
5401 && TREE_CONSTANT (TYPE_SIZE (type))
5402 && (!TREE_CONSTANT (TYPE_SIZE (etype))
5403 || tree_int_cst_lt (TYPE_SIZE (etype), TYPE_SIZE (type))))
5405 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0,
5406 Empty, false, false, false, true),
5408 return unchecked_convert (type, expr, notrunc_p);
5411 /* Otherwise, just build a VIEW_CONVERT_EXPR of the expression. */
5414 expr = maybe_unconstrained_array (expr);
5415 etype = TREE_TYPE (expr);
5416 ecode = TREE_CODE (etype);
5417 if (can_fold_for_view_convert_p (expr))
5418 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
5420 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
5423 /* If the result is a non-biased integral type whose precision is not equal
5424 to its size, sign- or zero-extend the result. But we need not do this
5425 if the input is also an integral type and both are unsigned or both are
5426 signed and have the same precision. */
5430 && INTEGRAL_TYPE_P (type)
5431 && (type_rm_size = TYPE_RM_SIZE (type))
5432 && tree_int_cst_compare (type_rm_size, TYPE_SIZE (type)) < 0
5433 && !(INTEGRAL_TYPE_P (etype)
5434 && type_unsigned_for_rm (type) == type_unsigned_for_rm (etype)
5435 && (type_unsigned_for_rm (type)
5436 || tree_int_cst_compare (type_rm_size,
5437 TYPE_RM_SIZE (etype)
5438 ? TYPE_RM_SIZE (etype)
5439 : TYPE_SIZE (etype)) == 0)))
5441 if (integer_zerop (type_rm_size))
5442 expr = build_int_cst (type, 0);
5446 = gnat_type_for_size (TREE_INT_CST_LOW (TYPE_SIZE (type)),
5447 type_unsigned_for_rm (type));
5449 = convert (base_type,
5450 size_binop (MINUS_EXPR,
5451 TYPE_SIZE (type), type_rm_size));
5454 build_binary_op (RSHIFT_EXPR, base_type,
5455 build_binary_op (LSHIFT_EXPR, base_type,
5463 /* An unchecked conversion should never raise Constraint_Error. The code
5464 below assumes that GCC's conversion routines overflow the same way that
5465 the underlying hardware does. This is probably true. In the rare case
5466 when it is false, we can rely on the fact that such conversions are
5467 erroneous anyway. */
5468 if (TREE_CODE (expr) == INTEGER_CST)
5469 TREE_OVERFLOW (expr) = 0;
5471 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
5472 show no longer constant. */
5473 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
5474 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
5476 TREE_CONSTANT (expr) = 0;
5481 /* Return the appropriate GCC tree code for the specified GNAT_TYPE,
5482 the latter being a record type as predicated by Is_Record_Type. */
5485 tree_code_for_record_type (Entity_Id gnat_type)
5487 Node_Id component_list, component;
5489 /* Return UNION_TYPE if it's an Unchecked_Union whose non-discriminant
5490 fields are all in the variant part. Otherwise, return RECORD_TYPE. */
5491 if (!Is_Unchecked_Union (gnat_type))
5494 gnat_type = Implementation_Base_Type (gnat_type);
5496 = Component_List (Type_Definition (Declaration_Node (gnat_type)));
5498 for (component = First_Non_Pragma (Component_Items (component_list));
5499 Present (component);
5500 component = Next_Non_Pragma (component))
5501 if (Ekind (Defining_Entity (component)) == E_Component)
5507 /* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
5508 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
5509 according to the presence of an alignment clause on the type or, if it
5510 is an array, on the component type. */
5513 is_double_float_or_array (Entity_Id gnat_type, bool *align_clause)
5515 gnat_type = Underlying_Type (gnat_type);
5517 *align_clause = Present (Alignment_Clause (gnat_type));
5519 if (Is_Array_Type (gnat_type))
5521 gnat_type = Underlying_Type (Component_Type (gnat_type));
5522 if (Present (Alignment_Clause (gnat_type)))
5523 *align_clause = true;
5526 if (!Is_Floating_Point_Type (gnat_type))
5529 if (UI_To_Int (Esize (gnat_type)) != 64)
5535 /* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
5536 size is greater or equal to 64 bits, or an array of such a type. Set
5537 ALIGN_CLAUSE according to the presence of an alignment clause on the
5538 type or, if it is an array, on the component type. */
5541 is_double_scalar_or_array (Entity_Id gnat_type, bool *align_clause)
5543 gnat_type = Underlying_Type (gnat_type);
5545 *align_clause = Present (Alignment_Clause (gnat_type));
5547 if (Is_Array_Type (gnat_type))
5549 gnat_type = Underlying_Type (Component_Type (gnat_type));
5550 if (Present (Alignment_Clause (gnat_type)))
5551 *align_clause = true;
5554 if (!Is_Scalar_Type (gnat_type))
5557 if (UI_To_Int (Esize (gnat_type)) < 64)
5563 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
5564 component of an aggregate type. */
5567 type_for_nonaliased_component_p (tree gnu_type)
5569 /* If the type is passed by reference, we may have pointers to the
5570 component so it cannot be made non-aliased. */
5571 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
5574 /* We used to say that any component of aggregate type is aliased
5575 because the front-end may take 'Reference of it. The front-end
5576 has been enhanced in the meantime so as to use a renaming instead
5577 in most cases, but the back-end can probably take the address of
5578 such a component too so we go for the conservative stance.
5580 For instance, we might need the address of any array type, even
5581 if normally passed by copy, to construct a fat pointer if the
5582 component is used as an actual for an unconstrained formal.
5584 Likewise for record types: even if a specific record subtype is
5585 passed by copy, the parent type might be passed by ref (e.g. if
5586 it's of variable size) and we might take the address of a child
5587 component to pass to a parent formal. We have no way to check
5588 for such conditions here. */
5589 if (AGGREGATE_TYPE_P (gnu_type))
5595 /* Return true if TYPE is a smaller form of ORIG_TYPE. */
5598 smaller_form_type_p (tree type, tree orig_type)
5602 /* We're not interested in variants here. */
5603 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig_type))
5606 /* Like a variant, a packable version keeps the original TYPE_NAME. */
5607 if (TYPE_NAME (type) != TYPE_NAME (orig_type))
5610 size = TYPE_SIZE (type);
5611 osize = TYPE_SIZE (orig_type);
5613 if (!(TREE_CODE (size) == INTEGER_CST && TREE_CODE (osize) == INTEGER_CST))
5616 return tree_int_cst_lt (size, osize) != 0;
5619 /* Return whether EXPR, which is the renamed object in an object renaming
5620 declaration, can be materialized as a reference (with a REFERENCE_TYPE).
5621 This should be synchronized with Exp_Dbug.Debug_Renaming_Declaration. */
5624 can_materialize_object_renaming_p (Node_Id expr)
5628 expr = Original_Node (expr);
5633 case N_Expanded_Name:
5634 if (!Present (Renamed_Object (Entity (expr))))
5636 expr = Renamed_Object (Entity (expr));
5639 case N_Selected_Component:
5641 if (Is_Packed (Underlying_Type (Etype (Prefix (expr)))))
5645 = Normalized_First_Bit (Entity (Selector_Name (expr)));
5646 if (!UI_Is_In_Int_Range (bitpos)
5647 || (bitpos != UI_No_Uint && bitpos != UI_From_Int (0)))
5650 expr = Prefix (expr);
5654 case N_Indexed_Component:
5657 const Entity_Id t = Underlying_Type (Etype (Prefix (expr)));
5659 if (Is_Array_Type (t) && Present (Packed_Array_Impl_Type (t)))
5662 expr = Prefix (expr);
5666 case N_Explicit_Dereference:
5667 expr = Prefix (expr);
5676 /* Perform final processing on global declarations. */
5678 static GTY (()) tree dummy_global;
5681 gnat_write_global_declarations (void)
5686 /* If we have declared types as used at the global level, insert them in
5687 the global hash table. We use a dummy variable for this purpose, but
5688 we need to build it unconditionally to avoid -fcompare-debug issues. */
5689 if (first_global_object_name)
5691 struct varpool_node *node;
5694 ASM_FORMAT_PRIVATE_NAME (label, first_global_object_name, 0);
5696 = build_decl (BUILTINS_LOCATION, VAR_DECL, get_identifier (label),
5698 DECL_HARD_REGISTER (dummy_global) = 1;
5699 TREE_STATIC (dummy_global) = 1;
5700 node = varpool_node::get_create (dummy_global);
5701 node->definition = 1;
5702 node->force_output = 1;
5704 if (types_used_by_cur_var_decl)
5705 while (!types_used_by_cur_var_decl->is_empty ())
5707 tree t = types_used_by_cur_var_decl->pop ();
5708 types_used_by_var_decl_insert (t, dummy_global);
5712 /* Output debug information for all global type declarations first. This
5713 ensures that global types whose compilation hasn't been finalized yet,
5714 for example pointers to Taft amendment types, have their compilation
5715 finalized in the right context. */
5716 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5717 if (TREE_CODE (iter) == TYPE_DECL && !DECL_IGNORED_P (iter))
5718 debug_hooks->type_decl (iter, false);
5720 /* Output imported functions. */
5721 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5722 if (TREE_CODE (iter) == FUNCTION_DECL
5723 && DECL_EXTERNAL (iter)
5724 && DECL_INITIAL (iter) == NULL
5725 && !DECL_IGNORED_P (iter)
5726 && DECL_FUNCTION_IS_DEF (iter))
5727 debug_hooks->early_global_decl (iter);
5729 /* Then output the global variables. We need to do that after the debug
5730 information for global types is emitted so that they are finalized. Skip
5731 external global variables, unless we need to emit debug info for them:
5732 this is useful for imported variables, for instance. */
5733 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5734 if (TREE_CODE (iter) == VAR_DECL
5735 && (!DECL_EXTERNAL (iter) || !DECL_IGNORED_P (iter)))
5736 rest_of_decl_compilation (iter, true, 0);
5738 /* Output the imported modules/declarations. In GNAT, these are only
5739 materializing subprogram. */
5740 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5741 if (TREE_CODE (iter) == IMPORTED_DECL && !DECL_IGNORED_P (iter))
5742 debug_hooks->imported_module_or_decl (iter, DECL_NAME (iter),
5743 DECL_CONTEXT (iter), false, false);
5746 /* ************************************************************************
5747 * * GCC builtins support *
5748 * ************************************************************************ */
5750 /* The general scheme is fairly simple:
5752 For each builtin function/type to be declared, gnat_install_builtins calls
5753 internal facilities which eventually get to gnat_pushdecl, which in turn
5754 tracks the so declared builtin function decls in the 'builtin_decls' global
5755 datastructure. When an Intrinsic subprogram declaration is processed, we
5756 search this global datastructure to retrieve the associated BUILT_IN DECL
5759 /* Search the chain of currently available builtin declarations for a node
5760 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
5761 found, if any, or NULL_TREE otherwise. */
5763 builtin_decl_for (tree name)
5768 FOR_EACH_VEC_SAFE_ELT (builtin_decls, i, decl)
5769 if (DECL_NAME (decl) == name)
5775 /* The code below eventually exposes gnat_install_builtins, which declares
5776 the builtin types and functions we might need, either internally or as
5777 user accessible facilities.
5779 ??? This is a first implementation shot, still in rough shape. It is
5780 heavily inspired from the "C" family implementation, with chunks copied
5781 verbatim from there.
5783 Two obvious improvement candidates are:
5784 o Use a more efficient name/decl mapping scheme
5785 o Devise a middle-end infrastructure to avoid having to copy
5786 pieces between front-ends. */
5788 /* ----------------------------------------------------------------------- *
5789 * BUILTIN ELEMENTARY TYPES *
5790 * ----------------------------------------------------------------------- */
5792 /* Standard data types to be used in builtin argument declarations. */
5796 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
5798 CTI_CONST_STRING_TYPE,
5803 static tree c_global_trees[CTI_MAX];
5805 #define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
5806 #define string_type_node c_global_trees[CTI_STRING_TYPE]
5807 #define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
5809 /* ??? In addition some attribute handlers, we currently don't support a
5810 (small) number of builtin-types, which in turns inhibits support for a
5811 number of builtin functions. */
5812 #define wint_type_node void_type_node
5813 #define intmax_type_node void_type_node
5814 #define uintmax_type_node void_type_node
5816 /* Used to help initialize the builtin-types.def table. When a type of
5817 the correct size doesn't exist, use error_mark_node instead of NULL.
5818 The later results in segfaults even when a decl using the type doesn't
5822 builtin_type_for_size (int size, bool unsignedp)
5824 tree type = gnat_type_for_size (size, unsignedp);
5825 return type ? type : error_mark_node;
5828 /* Build/push the elementary type decls that builtin functions/types
5832 install_builtin_elementary_types (void)
5834 signed_size_type_node = gnat_signed_type_for (size_type_node);
5835 pid_type_node = integer_type_node;
5837 string_type_node = build_pointer_type (char_type_node);
5838 const_string_type_node
5839 = build_pointer_type (build_qualified_type
5840 (char_type_node, TYPE_QUAL_CONST));
5843 /* ----------------------------------------------------------------------- *
5844 * BUILTIN FUNCTION TYPES *
5845 * ----------------------------------------------------------------------- */
5847 /* Now, builtin function types per se. */
5851 #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
5852 #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
5853 #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
5854 #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
5855 #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5856 #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5857 #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
5858 #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5860 #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5862 #define DEF_FUNCTION_TYPE_8(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5863 ARG6, ARG7, ARG8) NAME,
5864 #define DEF_FUNCTION_TYPE_9(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5865 ARG6, ARG7, ARG8, ARG9) NAME,
5866 #define DEF_FUNCTION_TYPE_10(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5867 ARG6, ARG7, ARG8, ARG9, ARG10) NAME,
5868 #define DEF_FUNCTION_TYPE_11(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5869 ARG6, ARG7, ARG8, ARG9, ARG10, ARG11) NAME,
5870 #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
5871 #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
5872 #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
5873 #define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5874 #define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5875 #define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5877 #define DEF_FUNCTION_TYPE_VAR_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5879 #define DEF_FUNCTION_TYPE_VAR_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5881 #define DEF_POINTER_TYPE(NAME, TYPE) NAME,
5882 #include "builtin-types.def"
5883 #include "ada-builtin-types.def"
5884 #undef DEF_PRIMITIVE_TYPE
5885 #undef DEF_FUNCTION_TYPE_0
5886 #undef DEF_FUNCTION_TYPE_1
5887 #undef DEF_FUNCTION_TYPE_2
5888 #undef DEF_FUNCTION_TYPE_3
5889 #undef DEF_FUNCTION_TYPE_4
5890 #undef DEF_FUNCTION_TYPE_5
5891 #undef DEF_FUNCTION_TYPE_6
5892 #undef DEF_FUNCTION_TYPE_7
5893 #undef DEF_FUNCTION_TYPE_8
5894 #undef DEF_FUNCTION_TYPE_9
5895 #undef DEF_FUNCTION_TYPE_10
5896 #undef DEF_FUNCTION_TYPE_11
5897 #undef DEF_FUNCTION_TYPE_VAR_0
5898 #undef DEF_FUNCTION_TYPE_VAR_1
5899 #undef DEF_FUNCTION_TYPE_VAR_2
5900 #undef DEF_FUNCTION_TYPE_VAR_3
5901 #undef DEF_FUNCTION_TYPE_VAR_4
5902 #undef DEF_FUNCTION_TYPE_VAR_5
5903 #undef DEF_FUNCTION_TYPE_VAR_6
5904 #undef DEF_FUNCTION_TYPE_VAR_7
5905 #undef DEF_POINTER_TYPE
5909 typedef enum c_builtin_type builtin_type;
5911 /* A temporary array used in communication with def_fn_type. */
5912 static GTY(()) tree builtin_types[(int) BT_LAST + 1];
5914 /* A helper function for install_builtin_types. Build function type
5915 for DEF with return type RET and N arguments. If VAR is true, then the
5916 function should be variadic after those N arguments.
5918 Takes special care not to ICE if any of the types involved are
5919 error_mark_node, which indicates that said type is not in fact available
5920 (see builtin_type_for_size). In which case the function type as a whole
5921 should be error_mark_node. */
5924 def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
5927 tree *args = XALLOCAVEC (tree, n);
5932 for (i = 0; i < n; ++i)
5934 builtin_type a = (builtin_type) va_arg (list, int);
5935 t = builtin_types[a];
5936 if (t == error_mark_node)
5941 t = builtin_types[ret];
5942 if (t == error_mark_node)
5945 t = build_varargs_function_type_array (t, n, args);
5947 t = build_function_type_array (t, n, args);
5950 builtin_types[def] = t;
5954 /* Build the builtin function types and install them in the builtin_types
5955 array for later use in builtin function decls. */
5958 install_builtin_function_types (void)
5960 tree va_list_ref_type_node;
5961 tree va_list_arg_type_node;
5963 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
5965 va_list_arg_type_node = va_list_ref_type_node =
5966 build_pointer_type (TREE_TYPE (va_list_type_node));
5970 va_list_arg_type_node = va_list_type_node;
5971 va_list_ref_type_node = build_reference_type (va_list_type_node);
5974 #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
5975 builtin_types[ENUM] = VALUE;
5976 #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
5977 def_fn_type (ENUM, RETURN, 0, 0);
5978 #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
5979 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
5980 #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
5981 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
5982 #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5983 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
5984 #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5985 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
5986 #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5987 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5988 #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5990 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5991 #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5993 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
5994 #define DEF_FUNCTION_TYPE_8(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5996 def_fn_type (ENUM, RETURN, 0, 8, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5998 #define DEF_FUNCTION_TYPE_9(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5999 ARG6, ARG7, ARG8, ARG9) \
6000 def_fn_type (ENUM, RETURN, 0, 9, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
6002 #define DEF_FUNCTION_TYPE_10(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5,\
6003 ARG6, ARG7, ARG8, ARG9, ARG10) \
6004 def_fn_type (ENUM, RETURN, 0, 10, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
6005 ARG7, ARG8, ARG9, ARG10);
6006 #define DEF_FUNCTION_TYPE_11(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5,\
6007 ARG6, ARG7, ARG8, ARG9, ARG10, ARG11) \
6008 def_fn_type (ENUM, RETURN, 0, 11, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
6009 ARG7, ARG8, ARG9, ARG10, ARG11);
6010 #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
6011 def_fn_type (ENUM, RETURN, 1, 0);
6012 #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
6013 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
6014 #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
6015 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
6016 #define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
6017 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
6018 #define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
6019 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
6020 #define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
6021 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
6022 #define DEF_FUNCTION_TYPE_VAR_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6024 def_fn_type (ENUM, RETURN, 1, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
6025 #define DEF_FUNCTION_TYPE_VAR_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6027 def_fn_type (ENUM, RETURN, 1, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
6028 #define DEF_POINTER_TYPE(ENUM, TYPE) \
6029 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
6031 #include "builtin-types.def"
6032 #include "ada-builtin-types.def"
6034 #undef DEF_PRIMITIVE_TYPE
6035 #undef DEF_FUNCTION_TYPE_0
6036 #undef DEF_FUNCTION_TYPE_1
6037 #undef DEF_FUNCTION_TYPE_2
6038 #undef DEF_FUNCTION_TYPE_3
6039 #undef DEF_FUNCTION_TYPE_4
6040 #undef DEF_FUNCTION_TYPE_5
6041 #undef DEF_FUNCTION_TYPE_6
6042 #undef DEF_FUNCTION_TYPE_7
6043 #undef DEF_FUNCTION_TYPE_8
6044 #undef DEF_FUNCTION_TYPE_9
6045 #undef DEF_FUNCTION_TYPE_10
6046 #undef DEF_FUNCTION_TYPE_11
6047 #undef DEF_FUNCTION_TYPE_VAR_0
6048 #undef DEF_FUNCTION_TYPE_VAR_1
6049 #undef DEF_FUNCTION_TYPE_VAR_2
6050 #undef DEF_FUNCTION_TYPE_VAR_3
6051 #undef DEF_FUNCTION_TYPE_VAR_4
6052 #undef DEF_FUNCTION_TYPE_VAR_5
6053 #undef DEF_FUNCTION_TYPE_VAR_6
6054 #undef DEF_FUNCTION_TYPE_VAR_7
6055 #undef DEF_POINTER_TYPE
6056 builtin_types[(int) BT_LAST] = NULL_TREE;
6059 /* ----------------------------------------------------------------------- *
6060 * BUILTIN ATTRIBUTES *
6061 * ----------------------------------------------------------------------- */
6063 enum built_in_attribute
6065 #define DEF_ATTR_NULL_TREE(ENUM) ENUM,
6066 #define DEF_ATTR_INT(ENUM, VALUE) ENUM,
6067 #define DEF_ATTR_STRING(ENUM, VALUE) ENUM,
6068 #define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
6069 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
6070 #include "builtin-attrs.def"
6071 #undef DEF_ATTR_NULL_TREE
6073 #undef DEF_ATTR_STRING
6074 #undef DEF_ATTR_IDENT
6075 #undef DEF_ATTR_TREE_LIST
6079 static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
6082 install_builtin_attributes (void)
6084 /* Fill in the built_in_attributes array. */
6085 #define DEF_ATTR_NULL_TREE(ENUM) \
6086 built_in_attributes[(int) ENUM] = NULL_TREE;
6087 #define DEF_ATTR_INT(ENUM, VALUE) \
6088 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
6089 #define DEF_ATTR_STRING(ENUM, VALUE) \
6090 built_in_attributes[(int) ENUM] = build_string (strlen (VALUE), VALUE);
6091 #define DEF_ATTR_IDENT(ENUM, STRING) \
6092 built_in_attributes[(int) ENUM] = get_identifier (STRING);
6093 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
6094 built_in_attributes[(int) ENUM] \
6095 = tree_cons (built_in_attributes[(int) PURPOSE], \
6096 built_in_attributes[(int) VALUE], \
6097 built_in_attributes[(int) CHAIN]);
6098 #include "builtin-attrs.def"
6099 #undef DEF_ATTR_NULL_TREE
6101 #undef DEF_ATTR_STRING
6102 #undef DEF_ATTR_IDENT
6103 #undef DEF_ATTR_TREE_LIST
6106 /* Handle a "const" attribute; arguments as in
6107 struct attribute_spec.handler. */
6110 handle_const_attribute (tree *node, tree ARG_UNUSED (name),
6111 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6114 if (TREE_CODE (*node) == FUNCTION_DECL)
6115 TREE_READONLY (*node) = 1;
6117 *no_add_attrs = true;
6122 /* Handle a "nothrow" attribute; arguments as in
6123 struct attribute_spec.handler. */
6126 handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
6127 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6130 if (TREE_CODE (*node) == FUNCTION_DECL)
6131 TREE_NOTHROW (*node) = 1;
6133 *no_add_attrs = true;
6138 /* Handle a "pure" attribute; arguments as in
6139 struct attribute_spec.handler. */
6142 handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6143 int ARG_UNUSED (flags), bool *no_add_attrs)
6145 if (TREE_CODE (*node) == FUNCTION_DECL)
6146 DECL_PURE_P (*node) = 1;
6147 /* TODO: support types. */
6150 warning (OPT_Wattributes, "%qs attribute ignored",
6151 IDENTIFIER_POINTER (name));
6152 *no_add_attrs = true;
6158 /* Handle a "no vops" attribute; arguments as in
6159 struct attribute_spec.handler. */
6162 handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
6163 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6164 bool *ARG_UNUSED (no_add_attrs))
6166 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
6167 DECL_IS_NOVOPS (*node) = 1;
6171 /* Helper for nonnull attribute handling; fetch the operand number
6172 from the attribute argument list. */
6175 get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
6177 /* Verify the arg number is a constant. */
6178 if (!tree_fits_uhwi_p (arg_num_expr))
6181 *valp = TREE_INT_CST_LOW (arg_num_expr);
6185 /* Handle the "nonnull" attribute. */
6187 handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
6188 tree args, int ARG_UNUSED (flags),
6192 unsigned HOST_WIDE_INT attr_arg_num;
6194 /* If no arguments are specified, all pointer arguments should be
6195 non-null. Verify a full prototype is given so that the arguments
6196 will have the correct types when we actually check them later.
6197 Avoid diagnosing type-generic built-ins since those have no
6201 if (!prototype_p (type)
6202 && (!TYPE_ATTRIBUTES (type)
6203 || !lookup_attribute ("type generic", TYPE_ATTRIBUTES (type))))
6205 error ("nonnull attribute without arguments on a non-prototype");
6206 *no_add_attrs = true;
6211 /* Argument list specified. Verify that each argument number references
6212 a pointer argument. */
6213 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
6215 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
6217 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
6219 error ("nonnull argument has invalid operand number (argument %lu)",
6220 (unsigned long) attr_arg_num);
6221 *no_add_attrs = true;
6225 if (prototype_p (type))
6227 function_args_iterator iter;
6230 function_args_iter_init (&iter, type);
6231 for (ck_num = 1; ; ck_num++, function_args_iter_next (&iter))
6233 argument = function_args_iter_cond (&iter);
6234 if (!argument || ck_num == arg_num)
6239 || TREE_CODE (argument) == VOID_TYPE)
6241 error ("nonnull argument with out-of-range operand number "
6242 "(argument %lu, operand %lu)",
6243 (unsigned long) attr_arg_num, (unsigned long) arg_num);
6244 *no_add_attrs = true;
6248 if (TREE_CODE (argument) != POINTER_TYPE)
6250 error ("nonnull argument references non-pointer operand "
6251 "(argument %lu, operand %lu)",
6252 (unsigned long) attr_arg_num, (unsigned long) arg_num);
6253 *no_add_attrs = true;
6262 /* Handle a "sentinel" attribute. */
6265 handle_sentinel_attribute (tree *node, tree name, tree args,
6266 int ARG_UNUSED (flags), bool *no_add_attrs)
6268 if (!prototype_p (*node))
6270 warning (OPT_Wattributes,
6271 "%qs attribute requires prototypes with named arguments",
6272 IDENTIFIER_POINTER (name));
6273 *no_add_attrs = true;
6277 if (!stdarg_p (*node))
6279 warning (OPT_Wattributes,
6280 "%qs attribute only applies to variadic functions",
6281 IDENTIFIER_POINTER (name));
6282 *no_add_attrs = true;
6288 tree position = TREE_VALUE (args);
6290 if (TREE_CODE (position) != INTEGER_CST)
6292 warning (0, "requested position is not an integer constant");
6293 *no_add_attrs = true;
6297 if (tree_int_cst_lt (position, integer_zero_node))
6299 warning (0, "requested position is less than zero");
6300 *no_add_attrs = true;
6308 /* Handle a "noreturn" attribute; arguments as in
6309 struct attribute_spec.handler. */
6312 handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6313 int ARG_UNUSED (flags), bool *no_add_attrs)
6315 tree type = TREE_TYPE (*node);
6317 /* See FIXME comment in c_common_attribute_table. */
6318 if (TREE_CODE (*node) == FUNCTION_DECL)
6319 TREE_THIS_VOLATILE (*node) = 1;
6320 else if (TREE_CODE (type) == POINTER_TYPE
6321 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
6323 = build_pointer_type
6324 (change_qualified_type (TREE_TYPE (type), TYPE_QUAL_VOLATILE));
6327 warning (OPT_Wattributes, "%qs attribute ignored",
6328 IDENTIFIER_POINTER (name));
6329 *no_add_attrs = true;
6335 /* Handle a "noinline" attribute; arguments as in
6336 struct attribute_spec.handler. */
6339 handle_noinline_attribute (tree *node, tree name,
6340 tree ARG_UNUSED (args),
6341 int ARG_UNUSED (flags), bool *no_add_attrs)
6343 if (TREE_CODE (*node) == FUNCTION_DECL)
6345 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (*node)))
6347 warning (OPT_Wattributes, "%qE attribute ignored due to conflict "
6348 "with attribute %qs", name, "always_inline");
6349 *no_add_attrs = true;
6352 DECL_UNINLINABLE (*node) = 1;
6356 warning (OPT_Wattributes, "%qE attribute ignored", name);
6357 *no_add_attrs = true;
6363 /* Handle a "stack_protect" attribute; arguments as in
6364 struct attribute_spec.handler. */
6367 handle_stack_protect_attribute (tree *node, tree name, tree, int,
6370 if (TREE_CODE (*node) != FUNCTION_DECL)
6372 warning (OPT_Wattributes, "%qE attribute ignored", name);
6373 *no_add_attrs = true;
6379 /* Handle a "noclone" attribute; arguments as in
6380 struct attribute_spec.handler. */
6383 handle_noclone_attribute (tree *node, tree name,
6384 tree ARG_UNUSED (args),
6385 int ARG_UNUSED (flags), bool *no_add_attrs)
6387 if (TREE_CODE (*node) != FUNCTION_DECL)
6389 warning (OPT_Wattributes, "%qE attribute ignored", name);
6390 *no_add_attrs = true;
6396 /* Handle a "leaf" attribute; arguments as in
6397 struct attribute_spec.handler. */
6400 handle_leaf_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6401 int ARG_UNUSED (flags), bool *no_add_attrs)
6403 if (TREE_CODE (*node) != FUNCTION_DECL)
6405 warning (OPT_Wattributes, "%qE attribute ignored", name);
6406 *no_add_attrs = true;
6408 if (!TREE_PUBLIC (*node))
6410 warning (OPT_Wattributes, "%qE attribute has no effect", name);
6411 *no_add_attrs = true;
6417 /* Handle a "always_inline" attribute; arguments as in
6418 struct attribute_spec.handler. */
6421 handle_always_inline_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6422 int ARG_UNUSED (flags), bool *no_add_attrs)
6424 if (TREE_CODE (*node) == FUNCTION_DECL)
6426 /* Set the attribute and mark it for disregarding inline limits. */
6427 DECL_DISREGARD_INLINE_LIMITS (*node) = 1;
6431 warning (OPT_Wattributes, "%qE attribute ignored", name);
6432 *no_add_attrs = true;
6438 /* Handle a "malloc" attribute; arguments as in
6439 struct attribute_spec.handler. */
6442 handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6443 int ARG_UNUSED (flags), bool *no_add_attrs)
6445 if (TREE_CODE (*node) == FUNCTION_DECL
6446 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
6447 DECL_IS_MALLOC (*node) = 1;
6450 warning (OPT_Wattributes, "%qs attribute ignored",
6451 IDENTIFIER_POINTER (name));
6452 *no_add_attrs = true;
6458 /* Fake handler for attributes we don't properly support. */
6461 fake_attribute_handler (tree * ARG_UNUSED (node),
6462 tree ARG_UNUSED (name),
6463 tree ARG_UNUSED (args),
6464 int ARG_UNUSED (flags),
6465 bool * ARG_UNUSED (no_add_attrs))
6470 /* Handle a "type_generic" attribute. */
6473 handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
6474 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6475 bool * ARG_UNUSED (no_add_attrs))
6477 /* Ensure we have a function type. */
6478 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
6480 /* Ensure we have a variadic function. */
6481 gcc_assert (!prototype_p (*node) || stdarg_p (*node));
6486 /* Handle a "vector_size" attribute; arguments as in
6487 struct attribute_spec.handler. */
6490 handle_vector_size_attribute (tree *node, tree name, tree args,
6491 int ARG_UNUSED (flags), bool *no_add_attrs)
6496 *no_add_attrs = true;
6498 /* We need to provide for vector pointers, vector arrays, and
6499 functions returning vectors. For example:
6501 __attribute__((vector_size(16))) short *foo;
6503 In this case, the mode is SI, but the type being modified is
6504 HI, so we need to look further. */
6505 while (POINTER_TYPE_P (type)
6506 || TREE_CODE (type) == FUNCTION_TYPE
6507 || TREE_CODE (type) == ARRAY_TYPE)
6508 type = TREE_TYPE (type);
6510 vector_type = build_vector_type_for_size (type, TREE_VALUE (args), name);
6514 /* Build back pointers if needed. */
6515 *node = reconstruct_complex_type (*node, vector_type);
6520 /* Handle a "vector_type" attribute; arguments as in
6521 struct attribute_spec.handler. */
6524 handle_vector_type_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6525 int ARG_UNUSED (flags), bool *no_add_attrs)
6530 *no_add_attrs = true;
6532 if (TREE_CODE (type) != ARRAY_TYPE)
6534 error ("attribute %qs applies to array types only",
6535 IDENTIFIER_POINTER (name));
6539 vector_type = build_vector_type_for_array (type, name);
6543 TYPE_REPRESENTATIVE_ARRAY (vector_type) = type;
6544 *node = vector_type;
6549 /* ----------------------------------------------------------------------- *
6550 * BUILTIN FUNCTIONS *
6551 * ----------------------------------------------------------------------- */
6553 /* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
6554 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
6555 if nonansi_p and flag_no_nonansi_builtin. */
6558 def_builtin_1 (enum built_in_function fncode,
6560 enum built_in_class fnclass,
6561 tree fntype, tree libtype,
6562 bool both_p, bool fallback_p,
6563 bool nonansi_p ATTRIBUTE_UNUSED,
6564 tree fnattrs, bool implicit_p)
6567 const char *libname;
6569 /* Preserve an already installed decl. It most likely was setup in advance
6570 (e.g. as part of the internal builtins) for specific reasons. */
6571 if (builtin_decl_explicit (fncode))
6574 if (fntype == error_mark_node)
6577 gcc_assert ((!both_p && !fallback_p)
6578 || !strncmp (name, "__builtin_",
6579 strlen ("__builtin_")));
6581 libname = name + strlen ("__builtin_");
6582 decl = add_builtin_function (name, fntype, fncode, fnclass,
6583 (fallback_p ? libname : NULL),
6586 /* ??? This is normally further controlled by command-line options
6587 like -fno-builtin, but we don't have them for Ada. */
6588 add_builtin_function (libname, libtype, fncode, fnclass,
6591 set_builtin_decl (fncode, decl, implicit_p);
6594 static int flag_isoc94 = 0;
6595 static int flag_isoc99 = 0;
6596 static int flag_isoc11 = 0;
6598 /* Install what the common builtins.def offers plus our local additions.
6600 Note that ada-builtins.def is included first so that locally redefined
6601 built-in functions take precedence over the commonly defined ones. */
6604 install_builtin_functions (void)
6606 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
6607 NONANSI_P, ATTRS, IMPLICIT, COND) \
6609 def_builtin_1 (ENUM, NAME, CLASS, \
6610 builtin_types[(int) TYPE], \
6611 builtin_types[(int) LIBTYPE], \
6612 BOTH_P, FALLBACK_P, NONANSI_P, \
6613 built_in_attributes[(int) ATTRS], IMPLICIT);
6614 #define DEF_ADA_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
6615 DEF_BUILTIN (ENUM, "__builtin_" NAME, BUILT_IN_FRONTEND, TYPE, BT_LAST, \
6616 false, false, false, ATTRS, true, true)
6617 #include "ada-builtins.def"
6618 #include "builtins.def"
6621 /* ----------------------------------------------------------------------- *
6622 * BUILTIN FUNCTIONS *
6623 * ----------------------------------------------------------------------- */
6625 /* Install the builtin functions we might need. */
6628 gnat_install_builtins (void)
6630 install_builtin_elementary_types ();
6631 install_builtin_function_types ();
6632 install_builtin_attributes ();
6634 /* Install builtins used by generic middle-end pieces first. Some of these
6635 know about internal specificities and control attributes accordingly, for
6636 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
6637 the generic definition from builtins.def. */
6638 build_common_builtin_nodes ();
6640 /* Now, install the target specific builtins, such as the AltiVec family on
6641 ppc, and the common set as exposed by builtins.def. */
6642 targetm.init_builtins ();
6643 install_builtin_functions ();
6646 #include "gt-ada-utils.h"
6647 #include "gtype-ada.h"