1 /****************************************************************************
3 * GNAT COMPILER COMPONENTS *
7 * C Implementation File *
11 * Copyright (C) 1992-2001, Free Software Foundation, Inc. *
13 * GNAT is free software; you can redistribute it and/or modify it under *
14 * terms of the GNU General Public License as published by the Free Soft- *
15 * ware Foundation; either version 2, or (at your option) any later ver- *
16 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
17 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
18 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
19 * for more details. You should have received a copy of the GNU General *
20 * Public License distributed with GNAT; see file COPYING. If not, write *
21 * to the Free Software Foundation, 59 Temple Place - Suite 330, Boston, *
22 * MA 02111-1307, USA. *
24 * GNAT was originally developed by the GNAT team at New York University. *
25 * It is now maintained by Ada Core Technologies Inc (http://www.gnat.com). *
27 ****************************************************************************/
53 #ifndef MAX_FIXED_MODE_SIZE
54 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
57 #ifndef MAX_BITS_PER_WORD
58 #define MAX_BITS_PER_WORD BITS_PER_WORD
61 /* If nonzero, pretend we are allocating at global level. */
64 /* Global Variables for the various types we create. */
65 tree gnat_std_decls[(int) ADT_LAST];
67 /* Associates a GNAT tree node to a GCC tree node. It is used in
68 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
69 of `save_gnu_tree' for more info. */
70 static tree *associate_gnat_to_gnu;
72 /* This listhead is used to record any global objects that need elaboration.
73 TREE_PURPOSE is the variable to be elaborated and TREE_VALUE is the
74 initial value to assign. */
76 static tree pending_elaborations;
78 /* This stack allows us to momentarily switch to generating elaboration
79 lists for an inner context. */
81 static struct e_stack {struct e_stack *next; tree elab_list; } *elist_stack;
83 /* This variable keeps a table for types for each precision so that we only
84 allocate each of them once. Signed and unsigned types are kept separate.
86 Note that these types are only used when fold-const requests something
87 special. Perhaps we should NOT share these types; we'll see how it
89 static tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
91 /* Likewise for float types, but record these by mode. */
92 static tree float_types[NUM_MACHINE_MODES];
94 /* For each binding contour we allocate a binding_level structure which records
95 the entities defined or declared in that contour. Contours include:
98 one for each subprogram definition
99 one for each compound statement (declare block)
101 Binding contours are used to create GCC tree BLOCK nodes. */
105 /* A chain of ..._DECL nodes for all variables, constants, functions,
106 parameters and type declarations. These ..._DECL nodes are chained
107 through the TREE_CHAIN field. Note that these ..._DECL nodes are stored
108 in the reverse of the order supplied to be compatible with the
111 /* For each level (except the global one), a chain of BLOCK nodes for all
112 the levels that were entered and exited one level down from this one. */
114 /* The BLOCK node for this level, if one has been preallocated.
115 If 0, the BLOCK is allocated (if needed) when the level is popped. */
117 /* The binding level containing this one (the enclosing binding level). */
118 struct binding_level *level_chain;
121 /* The binding level currently in effect. */
122 static struct binding_level *current_binding_level = NULL;
124 /* A chain of binding_level structures awaiting reuse. */
125 static struct binding_level *free_binding_level = NULL;
127 /* The outermost binding level. This binding level is created when the
128 compiler is started and it will exist through the entire compilation. */
129 static struct binding_level *global_binding_level;
131 /* Binding level structures are initialized by copying this one. */
132 static struct binding_level clear_binding_level = {NULL, NULL, NULL, NULL};
135 static tree merge_sizes PARAMS ((tree, tree, tree, int, int));
136 static tree compute_related_constant PARAMS ((tree, tree));
137 static tree split_plus PARAMS ((tree, tree *));
138 static int value_zerop PARAMS ((tree));
139 static tree float_type_for_size PARAMS ((int, enum machine_mode));
140 static tree convert_to_fat_pointer PARAMS ((tree, tree));
141 static tree convert_to_thin_pointer PARAMS ((tree, tree));
142 static tree make_descriptor_field PARAMS ((const char *,tree, tree,
144 static void mark_binding_level PARAMS((PTR));
145 static void mark_e_stack PARAMS((PTR));
147 /* Initialize the association of GNAT nodes to GCC trees. */
154 associate_gnat_to_gnu = (tree *) xmalloc (max_gnat_nodes * sizeof (tree));
155 ggc_add_tree_root (associate_gnat_to_gnu, max_gnat_nodes);
157 for (gnat_node = 0; gnat_node < max_gnat_nodes; gnat_node++)
158 associate_gnat_to_gnu [gnat_node] = NULL_TREE;
160 associate_gnat_to_gnu -= First_Node_Id;
162 pending_elaborations = build_tree_list (NULL_TREE, NULL_TREE);
163 ggc_add_tree_root (&pending_elaborations, 1);
164 ggc_add_root ((PTR) &elist_stack, 1, sizeof (struct e_stack), mark_e_stack);
165 ggc_add_tree_root (&signed_and_unsigned_types[0][0],
166 (sizeof signed_and_unsigned_types
167 / sizeof signed_and_unsigned_types[0][0]));
168 ggc_add_tree_root (float_types, sizeof float_types / sizeof float_types[0]);
170 ggc_add_root (¤t_binding_level, 1, sizeof current_binding_level,
174 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
175 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
176 a ..._DECL node. If NO_CHECK is nonzero, the latter check is suppressed.
178 If GNU_DECL is zero, a previous association is to be reset. */
181 save_gnu_tree (gnat_entity, gnu_decl, no_check)
182 Entity_Id gnat_entity;
187 && (associate_gnat_to_gnu [gnat_entity]
188 || (! no_check && ! DECL_P (gnu_decl))))
191 associate_gnat_to_gnu [gnat_entity] = gnu_decl;
194 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
195 Return the ..._DECL node that was associated with it. If there is no tree
196 node associated with GNAT_ENTITY, abort.
198 In some cases, such as delayed elaboration or expressions that need to
199 be elaborated only once, GNAT_ENTITY is really not an entity. */
202 get_gnu_tree (gnat_entity)
203 Entity_Id gnat_entity;
205 if (! associate_gnat_to_gnu [gnat_entity])
208 return associate_gnat_to_gnu [gnat_entity];
211 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
214 present_gnu_tree (gnat_entity)
215 Entity_Id gnat_entity;
217 return (associate_gnat_to_gnu [gnat_entity] != NULL_TREE);
221 /* Return non-zero if we are currently in the global binding level. */
226 return (force_global != 0 || current_binding_level == global_binding_level
230 /* Return the list of declarations in the current level. Note that this list
231 is in reverse order (it has to be so for back-end compatibility). */
236 return current_binding_level->names;
239 /* Nonzero if the current level needs to have a BLOCK made. */
244 return (current_binding_level->names != 0);
247 /* Enter a new binding level. The input parameter is ignored, but has to be
248 specified for back-end compatibility. */
252 int ignore ATTRIBUTE_UNUSED;
254 struct binding_level *newlevel = NULL;
256 /* Reuse a struct for this binding level, if there is one. */
257 if (free_binding_level)
259 newlevel = free_binding_level;
260 free_binding_level = free_binding_level->level_chain;
264 = (struct binding_level *) xmalloc (sizeof (struct binding_level));
266 *newlevel = clear_binding_level;
268 /* Add this level to the front of the chain (stack) of levels that are
270 newlevel->level_chain = current_binding_level;
271 current_binding_level = newlevel;
274 /* Exit a binding level.
275 Pop the level off, and restore the state of the identifier-decl mappings
276 that were in effect when this level was entered.
278 If KEEP is nonzero, this level had explicit declarations, so
279 and create a "block" (a BLOCK node) for the level
280 to record its declarations and subblocks for symbol table output.
282 If FUNCTIONBODY is nonzero, this level is the body of a function,
283 so create a block as if KEEP were set and also clear out all
286 If REVERSE is nonzero, reverse the order of decls before putting
287 them into the BLOCK. */
290 poplevel (keep, reverse, functionbody)
295 /* Points to a GCC BLOCK tree node. This is the BLOCK node construted for the
296 binding level that we are about to exit and which is returned by this
298 tree block = NULL_TREE;
301 tree subblock_chain = current_binding_level->blocks;
303 int block_previously_created;
305 /* Reverse the list of XXXX_DECL nodes if desired. Note that the ..._DECL
306 nodes chained through the `names' field of current_binding_level are in
307 reverse order except for PARM_DECL node, which are explicitely stored in
309 current_binding_level->names
310 = decl_chain = (reverse) ? nreverse (current_binding_level->names)
311 : current_binding_level->names;
313 /* Output any nested inline functions within this block which must be
314 compiled because their address is needed. */
315 for (decl_node = decl_chain; decl_node; decl_node = TREE_CHAIN (decl_node))
316 if (TREE_CODE (decl_node) == FUNCTION_DECL
317 && ! TREE_ASM_WRITTEN (decl_node) && TREE_ADDRESSABLE (decl_node)
318 && DECL_INITIAL (decl_node) != 0)
320 push_function_context ();
321 output_inline_function (decl_node);
322 pop_function_context ();
326 block_previously_created = (current_binding_level->this_block != 0);
327 if (block_previously_created)
328 block = current_binding_level->this_block;
329 else if (keep || functionbody)
330 block = make_node (BLOCK);
333 BLOCK_VARS (block) = keep ? decl_chain : 0;
334 BLOCK_SUBBLOCKS (block) = subblock_chain;
337 /* Record the BLOCK node just built as the subblock its enclosing scope. */
338 for (subblock_node = subblock_chain; subblock_node;
339 subblock_node = TREE_CHAIN (subblock_node))
340 BLOCK_SUPERCONTEXT (subblock_node) = block;
342 /* Clear out the meanings of the local variables of this level. */
344 for (subblock_node = decl_chain; subblock_node;
345 subblock_node = TREE_CHAIN (subblock_node))
346 if (DECL_NAME (subblock_node) != 0)
347 /* If the identifier was used or addressed via a local extern decl,
348 don't forget that fact. */
349 if (DECL_EXTERNAL (subblock_node))
351 if (TREE_USED (subblock_node))
352 TREE_USED (DECL_NAME (subblock_node)) = 1;
353 if (TREE_ADDRESSABLE (subblock_node))
354 TREE_ADDRESSABLE (DECL_ASSEMBLER_NAME (subblock_node)) = 1;
358 /* Pop the current level, and free the structure for reuse. */
359 struct binding_level *level = current_binding_level;
360 current_binding_level = current_binding_level->level_chain;
361 level->level_chain = free_binding_level;
362 free_binding_level = level;
367 /* This is the top level block of a function. The ..._DECL chain stored
368 in BLOCK_VARS are the function's parameters (PARM_DECL nodes). Don't
369 leave them in the BLOCK because they are found in the FUNCTION_DECL
371 DECL_INITIAL (current_function_decl) = block;
372 BLOCK_VARS (block) = 0;
376 if (!block_previously_created)
377 current_binding_level->blocks
378 = chainon (current_binding_level->blocks, block);
381 /* If we did not make a block for the level just exited, any blocks made for
382 inner levels (since they cannot be recorded as subblocks in that level)
383 must be carried forward so they will later become subblocks of something
385 else if (subblock_chain)
386 current_binding_level->blocks
387 = chainon (current_binding_level->blocks, subblock_chain);
389 TREE_USED (block) = 1;
394 /* Insert BLOCK at the end of the list of subblocks of the
395 current binding level. This is used when a BIND_EXPR is expanded,
396 to handle the BLOCK node inside the BIND_EXPR. */
402 TREE_USED (block) = 1;
403 current_binding_level->blocks
404 = chainon (current_binding_level->blocks, block);
407 /* Set the BLOCK node for the innermost scope
408 (the one we are currently in). */
414 current_binding_level->this_block = block;
415 current_binding_level->names = chainon (current_binding_level->names,
417 current_binding_level->blocks = chainon (current_binding_level->blocks,
418 BLOCK_SUBBLOCKS (block));
421 /* Records a ..._DECL node DECL as belonging to the current lexical scope.
422 Returns the ..._DECL node. */
428 struct binding_level *b;
430 /* If at top level, there is no context. But PARM_DECLs always go in the
431 level of its function. */
432 if (global_bindings_p () && TREE_CODE (decl) != PARM_DECL)
434 b = global_binding_level;
435 DECL_CONTEXT (decl) = 0;
439 b = current_binding_level;
440 DECL_CONTEXT (decl) = current_function_decl;
443 /* Put the declaration on the list. The list of declarations is in reverse
444 order. The list will be reversed later if necessary. This needs to be
445 this way for compatibility with the back-end.
447 Don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into the list. They
448 will cause trouble with the debugger and aren't needed anyway. */
449 if (TREE_CODE (decl) != TYPE_DECL
450 || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE)
452 TREE_CHAIN (decl) = b->names;
456 /* For the declaration of a type, set its name if it either is not already
457 set, was set to an IDENTIFIER_NODE, indicating an internal name,
458 or if the previous type name was not derived from a source name.
459 We'd rather have the type named with a real name and all the pointer
460 types to the same object have the same POINTER_TYPE node. Code in this
461 function in c-decl.c makes a copy of the type node here, but that may
462 cause us trouble with incomplete types, so let's not try it (at least
465 if (TREE_CODE (decl) == TYPE_DECL
466 && DECL_NAME (decl) != 0
467 && (TYPE_NAME (TREE_TYPE (decl)) == 0
468 || TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) == IDENTIFIER_NODE
469 || (TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) == TYPE_DECL
470 && DECL_ARTIFICIAL (TYPE_NAME (TREE_TYPE (decl)))
471 && ! DECL_ARTIFICIAL (decl))))
472 TYPE_NAME (TREE_TYPE (decl)) = decl;
477 /* Do little here. Set up the standard declarations later after the
478 front end has been run. */
481 init_decl_processing ()
483 /* The structure `tree_identifier' is the GCC tree data structure that holds
484 IDENTIFIER_NODE nodes. We need to call `set_identifier_size' to tell GCC
485 that we have not added any language specific fields to IDENTIFIER_NODE
487 set_identifier_size (sizeof (struct tree_identifier));
491 /* incomplete_decl_finalize_hook is defined in toplev.c. It needs to be set
492 by each front end to the appropriate routine that handles incomplete
493 VAR_DECL nodes. This routine will be invoked by compile_file when a
494 VAR_DECL node of DECL_SIZE zero is encountered. */
495 incomplete_decl_finalize_hook = finish_incomplete_decl;
497 /* Make the binding_level structure for global names. */
498 current_function_decl = 0;
499 current_binding_level = 0;
500 free_binding_level = 0;
502 global_binding_level = current_binding_level;
504 build_common_tree_nodes (0);
506 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
507 corresponding to the size of ptr_mode. Make this here since we need
508 this before we can expand the GNAT types. */
509 set_sizetype (type_for_size (GET_MODE_BITSIZE (ptr_mode), 0));
510 build_common_tree_nodes_2 (0);
512 pushdecl (build_decl (TYPE_DECL, get_identifier (SIZE_TYPE), sizetype));
514 /* We need to make the integer type before doing anything else.
515 We stitch this in to the appropriate GNAT type later. */
516 pushdecl (build_decl (TYPE_DECL, get_identifier ("integer"),
518 pushdecl (build_decl (TYPE_DECL, get_identifier ("unsigned char"),
521 ptr_void_type_node = build_pointer_type (void_type_node);
525 /* Create the predefined scalar types such as `integer_type_node' needed
526 in the gcc back-end and initialize the global binding level. */
529 init_gigi_decls (long_long_float_type, exception_type)
530 tree long_long_float_type, exception_type;
534 /* Set the types that GCC and Gigi use from the front end. We would like
535 to do this for char_type_node, but it needs to correspond to the C
537 if (TREE_CODE (TREE_TYPE (long_long_float_type)) == INTEGER_TYPE)
539 /* In this case, the builtin floating point types are VAX float,
540 so make up a type for use. */
541 longest_float_type_node = make_node (REAL_TYPE);
542 TYPE_PRECISION (longest_float_type_node) = LONG_DOUBLE_TYPE_SIZE;
543 layout_type (longest_float_type_node);
544 pushdecl (build_decl (TYPE_DECL, get_identifier ("longest float type"),
545 longest_float_type_node));
548 longest_float_type_node = TREE_TYPE (long_long_float_type);
550 except_type_node = TREE_TYPE (exception_type);
552 unsigned_type_node = type_for_size (INT_TYPE_SIZE, 1);
553 pushdecl (build_decl (TYPE_DECL, get_identifier ("unsigned int"),
554 unsigned_type_node));
557 = pushdecl (build_decl (TYPE_DECL, get_identifier ("void"),
560 void_ftype = build_function_type (void_type_node, NULL_TREE);
561 ptr_void_ftype = build_pointer_type (void_ftype);
563 /* Now declare runtime functions. */
564 endlink = tree_cons (NULL_TREE, void_type_node, NULL_TREE);
566 /* malloc is a function declaration tree for a function to allocate
568 malloc_decl = create_subprog_decl (get_identifier ("__gnat_malloc"),
570 build_function_type (ptr_void_type_node,
571 tree_cons (NULL_TREE,
574 NULL_TREE, 0, 1, 1, 0);
576 /* free is a function declaration tree for a function to free memory. */
579 = create_subprog_decl (get_identifier ("__gnat_free"), NULL_TREE,
580 build_function_type (void_type_node,
581 tree_cons (NULL_TREE,
584 NULL_TREE, 0, 1, 1, 0);
586 /* Make the types and functions used for exception processing. */
588 = build_array_type (type_for_mode (Pmode, 0),
589 build_index_type (build_int_2 (5, 0)));
590 pushdecl (build_decl (TYPE_DECL, get_identifier ("JMPBUF_T"), jmpbuf_type));
591 jmpbuf_ptr_type = build_pointer_type (jmpbuf_type);
593 /* Functions to get and set the jumpbuf pointer for the current thread. */
595 = create_subprog_decl
596 (get_identifier ("system__soft_links__get_jmpbuf_address_soft"),
597 NULL_TREE, build_function_type (jmpbuf_ptr_type, NULL_TREE),
598 NULL_TREE, 0, 1, 1, 0);
601 = create_subprog_decl
602 (get_identifier ("system__soft_links__set_jmpbuf_address_soft"),
604 build_function_type (void_type_node,
605 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
606 NULL_TREE, 0, 1, 1, 0);
608 /* Function to get the current exception. */
610 = create_subprog_decl
611 (get_identifier ("system__soft_links__get_gnat_exception"),
613 build_function_type (build_pointer_type (except_type_node), NULL_TREE),
614 NULL_TREE, 0, 1, 1, 0);
616 /* Function that raise exceptions. */
618 = create_subprog_decl
619 (get_identifier ("__gnat_raise_nodefer_with_msg"), NULL_TREE,
620 build_function_type (void_type_node,
621 tree_cons (NULL_TREE,
622 build_pointer_type (except_type_node),
624 NULL_TREE, 0, 1, 1, 0);
627 /* __gnat_raise_constraint_error takes a string, an integer and never
629 raise_constraint_error_decl
630 = create_subprog_decl
631 (get_identifier ("__gnat_raise_constraint_error"), NULL_TREE,
632 build_function_type (void_type_node,
633 tree_cons (NULL_TREE,
634 build_pointer_type (char_type_node),
635 tree_cons (NULL_TREE,
638 NULL_TREE, 0, 1, 1, 0);
640 /* Likewise for __gnat_raise_program_error. */
641 raise_program_error_decl
642 = create_subprog_decl
643 (get_identifier ("__gnat_raise_program_error"), NULL_TREE,
644 build_function_type (void_type_node,
645 tree_cons (NULL_TREE,
646 build_pointer_type (char_type_node),
647 tree_cons (NULL_TREE,
650 NULL_TREE, 0, 1, 1, 0);
652 /* Likewise for __gnat_raise_storage_error. */
653 raise_storage_error_decl
654 = create_subprog_decl
655 (get_identifier ("__gnat_raise_storage_error"), NULL_TREE,
656 build_function_type (void_type_node,
657 tree_cons (NULL_TREE,
658 build_pointer_type (char_type_node),
659 tree_cons (NULL_TREE,
662 NULL_TREE, 0, 1, 1, 0);
664 /* Indicate that these never return. */
666 TREE_THIS_VOLATILE (raise_nodefer_decl) = 1;
667 TREE_THIS_VOLATILE (raise_constraint_error_decl) = 1;
668 TREE_THIS_VOLATILE (raise_program_error_decl) = 1;
669 TREE_THIS_VOLATILE (raise_storage_error_decl) = 1;
671 TREE_SIDE_EFFECTS (raise_nodefer_decl) = 1;
672 TREE_SIDE_EFFECTS (raise_constraint_error_decl) = 1;
673 TREE_SIDE_EFFECTS (raise_program_error_decl) = 1;
674 TREE_SIDE_EFFECTS (raise_storage_error_decl) = 1;
676 TREE_TYPE (raise_nodefer_decl)
677 = build_qualified_type (TREE_TYPE (raise_nodefer_decl),
679 TREE_TYPE (raise_constraint_error_decl)
680 = build_qualified_type (TREE_TYPE (raise_constraint_error_decl),
682 TREE_TYPE (raise_program_error_decl)
683 = build_qualified_type (TREE_TYPE (raise_program_error_decl),
685 TREE_TYPE (raise_storage_error_decl)
686 = build_qualified_type (TREE_TYPE (raise_storage_error_decl),
689 /* setjmp returns an integer and has one operand, which is a pointer to
692 = create_subprog_decl
693 (get_identifier ("setjmp"), NULL_TREE,
694 build_function_type (integer_type_node,
695 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
696 NULL_TREE, 0, 1, 1, 0);
698 DECL_BUILT_IN_CLASS (setjmp_decl) = BUILT_IN_NORMAL;
699 DECL_FUNCTION_CODE (setjmp_decl) = BUILT_IN_SETJMP;
701 ggc_add_tree_root (gnat_std_decls,
702 sizeof gnat_std_decls / sizeof gnat_std_decls[0]);
705 /* This routine is called in tree.c to print an error message for invalid use
706 of an incomplete type. */
709 incomplete_type_error (dont_care_1, dont_care_2)
710 tree dont_care_1 ATTRIBUTE_UNUSED;
711 tree dont_care_2 ATTRIBUTE_UNUSED;
716 /* This function is called indirectly from toplev.c to handle incomplete
717 declarations, i.e. VAR_DECL nodes whose DECL_SIZE is zero. To be precise,
718 compile_file in toplev.c makes an indirect call through the function pointer
719 incomplete_decl_finalize_hook which is initialized to this routine in
720 init_decl_processing. */
723 finish_incomplete_decl (dont_care)
724 tree dont_care ATTRIBUTE_UNUSED;
729 /* Given a record type (RECORD_TYPE) and a chain of FIELD_DECL
730 nodes (FIELDLIST), finish constructing the record or union type.
731 If HAS_REP is nonzero, this record has a rep clause; don't call
732 layout_type but merely set the size and alignment ourselves.
733 If DEFER_DEBUG is nonzero, do not call the debugging routines
734 on this type; it will be done later. */
737 finish_record_type (record_type, fieldlist, has_rep, defer_debug)
743 enum tree_code code = TREE_CODE (record_type);
744 tree ada_size = bitsize_zero_node;
745 tree size = bitsize_zero_node;
746 tree size_unit = size_zero_node;
749 TYPE_FIELDS (record_type) = fieldlist;
751 if (TYPE_NAME (record_type) != 0
752 && TREE_CODE (TYPE_NAME (record_type)) == TYPE_DECL)
753 TYPE_STUB_DECL (record_type) = TYPE_NAME (record_type);
755 TYPE_STUB_DECL (record_type)
756 = pushdecl (build_decl (TYPE_DECL, TYPE_NAME (record_type),
759 /* We don't need both the typedef name and the record name output in
760 the debugging information, since they are the same. */
761 DECL_ARTIFICIAL (TYPE_STUB_DECL (record_type)) = 1;
763 /* Globally initialize the record first. If this is a rep'ed record,
764 that just means some initializations; otherwise, layout the record. */
768 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
769 TYPE_MODE (record_type) = BLKmode;
770 if (TYPE_SIZE (record_type) == 0)
772 TYPE_SIZE (record_type) = bitsize_zero_node;
773 TYPE_SIZE_UNIT (record_type) = size_zero_node;
778 /* Ensure there isn't a size already set. There can be in an error
779 case where there is a rep clause but all fields have errors and
780 no longer have a position. */
781 TYPE_SIZE (record_type) = 0;
782 layout_type (record_type);
785 /* At this point, the position and size of each field is known. It was
786 either set before entry by a rep clause, or by laying out the type
787 above. We now make a pass through the fields (in reverse order for
788 QUAL_UNION_TYPEs) to compute the Ada size; the GCC size and alignment
789 (for rep'ed records that are not padding types); and the mode (for
792 if (code == QUAL_UNION_TYPE)
793 fieldlist = nreverse (fieldlist);
795 for (field = fieldlist; field; field = TREE_CHAIN (field))
797 tree type = TREE_TYPE (field);
798 tree this_size = DECL_SIZE (field);
799 tree this_size_unit = DECL_SIZE_UNIT (field);
800 tree this_ada_size = DECL_SIZE (field);
802 if ((TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
803 || TREE_CODE (type) == QUAL_UNION_TYPE)
804 && ! TYPE_IS_FAT_POINTER_P (type)
805 && ! TYPE_CONTAINS_TEMPLATE_P (type)
806 && TYPE_ADA_SIZE (type) != 0)
807 this_ada_size = TYPE_ADA_SIZE (type);
809 if (has_rep && ! DECL_BIT_FIELD (field))
810 TYPE_ALIGN (record_type)
811 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
816 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
817 size = size_binop (MAX_EXPR, size, this_size);
818 size_unit = size_binop (MAX_EXPR, size_unit, this_size_unit);
821 case QUAL_UNION_TYPE:
823 = fold (build (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
824 this_ada_size, ada_size));
825 size = fold (build (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
827 size_unit = fold (build (COND_EXPR, sizetype, DECL_QUALIFIER (field),
828 this_size_unit, size_unit));
832 /* Since we know here that all fields are sorted in order of
833 increasing bit position, the size of the record is one
834 higher than the ending bit of the last field processed
835 unless we have a rep clause, since in that case we might
836 have a field outside a QUAL_UNION_TYPE that has a higher ending
837 position. So use a MAX in that case. Also, if this field is a
838 QUAL_UNION_TYPE, we need to take into account the previous size in
839 the case of empty variants. */
841 = merge_sizes (ada_size, bit_position (field), this_ada_size,
842 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
843 size = merge_sizes (size, bit_position (field), this_size,
844 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
846 = merge_sizes (size_unit, byte_position (field), this_size_unit,
847 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
855 if (code == QUAL_UNION_TYPE)
856 nreverse (fieldlist);
858 /* If this is a padding record, we never want to make the size smaller than
859 what was specified in it, if any. */
860 if (TREE_CODE (record_type) == RECORD_TYPE
861 && TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type) != 0)
863 size = TYPE_SIZE (record_type);
864 size_unit = TYPE_SIZE_UNIT (record_type);
867 /* Now set any of the values we've just computed that apply. */
868 if (! TYPE_IS_FAT_POINTER_P (record_type)
869 && ! TYPE_CONTAINS_TEMPLATE_P (record_type))
870 TYPE_ADA_SIZE (record_type) = ada_size;
872 #ifdef ROUND_TYPE_SIZE
873 size = ROUND_TYPE_SIZE (record_type, size, TYPE_ALIGN (record_type));
874 size_unit = ROUND_TYPE_SIZE_UNIT (record_size, size_unit,
875 TYPE_ALIGN (record_type) / BITS_PER_UNIT);
877 size = round_up (size, TYPE_ALIGN (record_type));
878 size_unit = round_up (size_unit, TYPE_ALIGN (record_type) / BITS_PER_UNIT);
882 && ! (TREE_CODE (record_type) == RECORD_TYPE
883 && TYPE_IS_PADDING_P (record_type)
884 && TREE_CODE (size) != INTEGER_CST
885 && contains_placeholder_p (size)))
887 TYPE_SIZE (record_type) = size;
888 TYPE_SIZE_UNIT (record_type) = size_unit;
892 compute_record_mode (record_type);
896 /* If this record is of variable size, rename it so that the
897 debugger knows it is and make a new, parallel, record
898 that tells the debugger how the record is laid out. See
900 if (TREE_CODE (TYPE_SIZE (record_type)) != INTEGER_CST)
903 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
904 ? UNION_TYPE : TREE_CODE (record_type));
905 tree orig_id = DECL_NAME (TYPE_STUB_DECL (record_type));
907 = concat_id_with_name (orig_id,
908 TREE_CODE (record_type) == QUAL_UNION_TYPE
910 tree last_pos = bitsize_zero_node;
913 TYPE_NAME (new_record_type) = new_id;
914 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
915 TYPE_STUB_DECL (new_record_type)
916 = pushdecl (build_decl (TYPE_DECL, new_id, new_record_type));
917 DECL_ARTIFICIAL (TYPE_STUB_DECL (new_record_type)) = 1;
918 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
919 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
920 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
922 /* Now scan all the fields, replacing each field with a new
923 field corresponding to the new encoding. */
924 for (old_field = TYPE_FIELDS (record_type); old_field != 0;
925 old_field = TREE_CHAIN (old_field))
927 tree field_type = TREE_TYPE (old_field);
928 tree field_name = DECL_NAME (old_field);
930 tree curpos = bit_position (old_field);
932 unsigned int align = 0;
935 /* See how the position was modified from the last position.
937 There are two basic cases we support: a value was added
938 to the last position or the last position was rounded to
939 a boundary and they something was added. Check for the
940 first case first. If not, see if there is any evidence
941 of rounding. If so, round the last position and try
944 If this is a union, the position can be taken as zero. */
946 if (TREE_CODE (new_record_type) == UNION_TYPE)
947 pos = bitsize_zero_node, align = 0;
949 pos = compute_related_constant (curpos, last_pos);
951 if (pos == 0 && TREE_CODE (curpos) == MULT_EXPR
952 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST)
954 align = TREE_INT_CST_LOW (TREE_OPERAND (curpos, 1));
955 pos = compute_related_constant (curpos,
956 round_up (last_pos, align));
958 else if (pos == 0 && TREE_CODE (curpos) == PLUS_EXPR
959 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
960 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
961 && host_integerp (TREE_OPERAND
962 (TREE_OPERAND (curpos, 0), 1),
967 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
968 pos = compute_related_constant (curpos,
969 round_up (last_pos, align));
972 /* If we can't compute a position, set it to zero.
974 ??? We really should abort here, but it's too much work
975 to get this correct for all cases. */
978 pos = bitsize_zero_node;
980 /* See if this type is variable-size and make a new type
981 and indicate the indirection if so. */
982 if (TREE_CODE (TYPE_SIZE (field_type)) != INTEGER_CST)
984 field_type = build_pointer_type (field_type);
988 /* Make a new field name, if necessary. */
989 if (var || align != 0)
994 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
995 align / BITS_PER_UNIT);
997 strcpy (suffix, "XVL");
999 field_name = concat_id_with_name (field_name, suffix);
1002 new_field = create_field_decl (field_name, field_type,
1004 TYPE_SIZE (field_type), pos, 0);
1005 TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
1006 TYPE_FIELDS (new_record_type) = new_field;
1008 /* If old_field is a QUAL_UNION_TYPE, take its size as being
1009 zero. The only time it's not the last field of the record
1010 is when there are other components at fixed positions after
1011 it (meaning there was a rep clause for every field) and we
1012 want to be able to encode them. */
1013 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
1014 (TREE_CODE (TREE_TYPE (old_field))
1017 : TYPE_SIZE (TREE_TYPE (old_field)));
1020 TYPE_FIELDS (new_record_type)
1021 = nreverse (TYPE_FIELDS (new_record_type));
1023 rest_of_type_compilation (new_record_type, global_bindings_p ());
1026 rest_of_type_compilation (record_type, global_bindings_p ());
1030 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1031 with FIRST_BIT and SIZE that describe a field. SPECIAL is nonzero
1032 if this represents a QUAL_UNION_TYPE in which case we must look for
1033 COND_EXPRs and replace a value of zero with the old size. If HAS_REP
1034 is nonzero, we must take the MAX of the end position of this field
1035 with LAST_SIZE. In all other cases, we use FIRST_BIT plus SIZE.
1037 We return an expression for the size. */
1040 merge_sizes (last_size, first_bit, size, special, has_rep)
1042 tree first_bit, size;
1046 tree type = TREE_TYPE (last_size);
1048 if (! special || TREE_CODE (size) != COND_EXPR)
1050 tree new = size_binop (PLUS_EXPR, first_bit, size);
1053 new = size_binop (MAX_EXPR, last_size, new);
1058 return fold (build (COND_EXPR, type, TREE_OPERAND (size, 0),
1059 integer_zerop (TREE_OPERAND (size, 1))
1060 ? last_size : merge_sizes (last_size, first_bit,
1061 TREE_OPERAND (size, 1),
1063 integer_zerop (TREE_OPERAND (size, 2))
1064 ? last_size : merge_sizes (last_size, first_bit,
1065 TREE_OPERAND (size, 2),
1069 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1070 related by the addition of a constant. Return that constant if so. */
1073 compute_related_constant (op0, op1)
1076 tree op0_var, op1_var;
1077 tree op0_con = split_plus (op0, &op0_var);
1078 tree op1_con = split_plus (op1, &op1_var);
1079 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1081 if (operand_equal_p (op0_var, op1_var, 0))
1083 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1089 /* Utility function of above to split a tree OP which may be a sum, into a
1090 constant part, which is returned, and a variable part, which is stored
1091 in *PVAR. *PVAR may be size_zero_node. All operations must be of
1095 split_plus (in, pvar)
1099 tree result = bitsize_zero_node;
1101 while (TREE_CODE (in) == NON_LVALUE_EXPR)
1102 in = TREE_OPERAND (in, 0);
1105 if (TREE_CODE (in) == INTEGER_CST)
1107 *pvar = bitsize_zero_node;
1110 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1112 tree lhs_var, rhs_var;
1113 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1114 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1116 result = size_binop (PLUS_EXPR, result, lhs_con);
1117 result = size_binop (TREE_CODE (in), result, rhs_con);
1119 if (lhs_var == TREE_OPERAND (in, 0)
1120 && rhs_var == TREE_OPERAND (in, 1))
1121 return bitsize_zero_node;
1123 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1127 return bitsize_zero_node;
1130 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1131 subprogram. If it is void_type_node, then we are dealing with a procedure,
1132 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1133 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1134 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1135 RETURNS_UNCONSTRAINED is nonzero if the function returns an unconstrained
1136 object. RETURNS_BY_REF is nonzero if the function returns by reference.
1137 RETURNS_WITH_DSP is nonzero if the function is to return with a
1138 depressed stack pointer. */
1141 create_subprog_type (return_type, param_decl_list, cico_list,
1142 returns_unconstrained, returns_by_ref, returns_with_dsp)
1144 tree param_decl_list;
1146 int returns_unconstrained, returns_by_ref, returns_with_dsp;
1148 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1149 the subprogram formal parameters. This list is generated by traversing the
1150 input list of PARM_DECL nodes. */
1151 tree param_type_list = NULL;
1155 for (param_decl = param_decl_list; param_decl;
1156 param_decl = TREE_CHAIN (param_decl))
1157 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (param_decl),
1160 /* The list of the function parameter types has to be terminated by the void
1161 type to signal to the back-end that we are not dealing with a variable
1162 parameter subprogram, but that the subprogram has a fixed number of
1164 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1166 /* The list of argument types has been created in reverse
1168 param_type_list = nreverse (param_type_list);
1170 type = build_function_type (return_type, param_type_list);
1172 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1173 or the new type should, make a copy of TYPE. Likewise for
1174 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1175 if (TYPE_CI_CO_LIST (type) != 0 || cico_list != 0
1176 || TYPE_RETURNS_UNCONSTRAINED_P (type) != returns_unconstrained
1177 || TYPE_RETURNS_BY_REF_P (type) != returns_by_ref)
1178 type = copy_type (type);
1180 TYPE_CI_CO_LIST (type) = cico_list;
1181 TYPE_RETURNS_UNCONSTRAINED_P (type) = returns_unconstrained;
1182 TYPE_RETURNS_STACK_DEPRESSED (type) = returns_with_dsp;
1183 TYPE_RETURNS_BY_REF_P (type) = returns_by_ref;
1187 /* Return a copy of TYPE but safe to modify in any way. */
1193 tree new = copy_node (type);
1195 /* copy_node clears this field instead of copying it, because it is
1196 aliased with TREE_CHAIN. */
1197 TYPE_STUB_DECL (new) = TYPE_STUB_DECL (type);
1199 TYPE_POINTER_TO (new) = 0;
1200 TYPE_REFERENCE_TO (new) = 0;
1201 TYPE_MAIN_VARIANT (new) = new;
1202 TYPE_NEXT_VARIANT (new) = 0;
1207 /* Return an INTEGER_TYPE of SIZETYPE with range MIN to MAX and whose
1208 TYPE_INDEX_TYPE is INDEX. */
1211 create_index_type (min, max, index)
1215 /* First build a type for the desired range. */
1216 tree type = build_index_2_type (min, max);
1218 /* If this type has the TYPE_INDEX_TYPE we want, return it. Otherwise, if it
1219 doesn't have TYPE_INDEX_TYPE set, set it to INDEX. If TYPE_INDEX_TYPE
1220 is set, but not to INDEX, make a copy of this type with the requested
1221 index type. Note that we have no way of sharing these types, but that's
1222 only a small hole. */
1223 if (TYPE_INDEX_TYPE (type) == index)
1225 else if (TYPE_INDEX_TYPE (type) != 0)
1226 type = copy_type (type);
1228 TYPE_INDEX_TYPE (type) = index;
1232 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type (a character
1233 string) and TYPE is a ..._TYPE node giving its data type.
1234 ARTIFICIAL_P is nonzero if this is a declaration that was generated
1235 by the compiler. DEBUG_INFO_P is nonzero if we need to write debugging
1236 information about this type. */
1239 create_type_decl (type_name, type, attr_list, artificial_p, debug_info_p)
1242 struct attrib *attr_list;
1246 tree type_decl = build_decl (TYPE_DECL, type_name, type);
1247 enum tree_code code = TREE_CODE (type);
1249 DECL_ARTIFICIAL (type_decl) = artificial_p;
1250 pushdecl (type_decl);
1251 process_attributes (type_decl, attr_list);
1253 /* Pass type declaration information to the debugger unless this is an
1254 UNCONSTRAINED_ARRAY_TYPE, which the debugger does not support,
1255 and ENUMERAL_TYPE or RECORD_TYPE which is handled separately,
1256 a dummy type, which will be completed later, or a type for which
1257 debugging information was not requested. */
1258 if (code == UNCONSTRAINED_ARRAY_TYPE || TYPE_IS_DUMMY_P (type)
1260 DECL_IGNORED_P (type_decl) = 1;
1261 else if (code != ENUMERAL_TYPE && code != RECORD_TYPE
1262 && ! ((code == POINTER_TYPE || code == REFERENCE_TYPE)
1263 && TYPE_IS_DUMMY_P (TREE_TYPE (type))))
1264 rest_of_decl_compilation (type_decl, NULL, global_bindings_p (), 0);
1269 /* Returns a GCC VAR_DECL node. VAR_NAME gives the name of the variable.
1270 ASM_NAME is its assembler name (if provided). TYPE is its data type
1271 (a GCC ..._TYPE node). VAR_INIT is the GCC tree for an optional initial
1272 expression; NULL_TREE if none.
1274 CONST_FLAG is nonzero if this variable is constant.
1276 PUBLIC_FLAG is nonzero if this definition is to be made visible outside of
1277 the current compilation unit. This flag should be set when processing the
1278 variable definitions in a package specification. EXTERN_FLAG is nonzero
1279 when processing an external variable declaration (as opposed to a
1280 definition: no storage is to be allocated for the variable here).
1282 STATIC_FLAG is only relevant when not at top level. In that case
1283 it indicates whether to always allocate storage to the variable. */
1286 create_var_decl (var_name, asm_name, type, var_init, const_flag, public_flag,
1287 extern_flag, static_flag, attr_list)
1296 struct attrib *attr_list;
1301 : (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (TREE_TYPE (var_init))
1302 && (global_bindings_p () || static_flag
1303 ? 0 != initializer_constant_valid_p (var_init,
1304 TREE_TYPE (var_init))
1305 : TREE_CONSTANT (var_init))));
1307 = build_decl ((const_flag && init_const
1308 /* Only make a CONST_DECL for sufficiently-small objects.
1309 We consider complex double "sufficiently-small" */
1310 && TYPE_SIZE (type) != 0
1311 && host_integerp (TYPE_SIZE_UNIT (type), 1)
1312 && 0 >= compare_tree_int (TYPE_SIZE_UNIT (type),
1313 GET_MODE_SIZE (DCmode)))
1314 ? CONST_DECL : VAR_DECL, var_name, type);
1315 tree assign_init = 0;
1317 /* If this is external, throw away any initializations unless this is a
1318 CONST_DECL (meaning we have a constant); they will be done elsewhere. If
1319 we are defining a global here, leave a constant initialization and save
1320 any variable elaborations for the elaboration routine. Otherwise, if
1321 the initializing expression is not the same as TYPE, generate the
1322 initialization with an assignment statement, since it knows how
1323 to do the required adjustents. */
1325 if (extern_flag && TREE_CODE (var_decl) != CONST_DECL)
1328 if (global_bindings_p () && var_init != 0 && ! init_const)
1330 add_pending_elaborations (var_decl, var_init);
1334 else if (var_init != 0
1335 && ((TYPE_MAIN_VARIANT (TREE_TYPE (var_init))
1336 != TYPE_MAIN_VARIANT (type))
1337 || (static_flag && ! init_const)))
1338 assign_init = var_init, var_init = 0;
1340 DECL_COMMON (var_decl) = !flag_no_common;
1341 DECL_INITIAL (var_decl) = var_init;
1342 TREE_READONLY (var_decl) = const_flag;
1343 DECL_EXTERNAL (var_decl) = extern_flag;
1344 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1345 TREE_CONSTANT (var_decl) = TREE_CODE (var_decl) == CONST_DECL;
1346 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1347 = TYPE_VOLATILE (type);
1349 /* At the global binding level we need to allocate static storage for the
1350 variable if and only if its not external. If we are not at the top level
1351 we allocate automatic storage unless requested not to. */
1352 TREE_STATIC (var_decl) = global_bindings_p () ? !extern_flag : static_flag;
1355 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1357 process_attributes (var_decl, attr_list);
1359 /* Add this decl to the current binding level and generate any
1360 needed code and RTL. */
1361 var_decl = pushdecl (var_decl);
1362 expand_decl (var_decl);
1364 if (DECL_CONTEXT (var_decl) != 0)
1365 expand_decl_init (var_decl);
1367 /* If this is volatile, force it into memory. */
1368 if (TREE_SIDE_EFFECTS (var_decl))
1369 mark_addressable (var_decl);
1371 if (TREE_CODE (var_decl) != CONST_DECL)
1372 rest_of_decl_compilation (var_decl, 0, global_bindings_p (), 0);
1374 if (assign_init != 0)
1376 /* If VAR_DECL has a padded type, convert it to the unpadded
1377 type so the assignment is done properly. */
1378 tree lhs = var_decl;
1380 if (TREE_CODE (TREE_TYPE (lhs)) == RECORD_TYPE
1381 && TYPE_IS_PADDING_P (TREE_TYPE (lhs)))
1382 lhs = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (lhs))), lhs);
1384 expand_expr_stmt (build_binary_op (MODIFY_EXPR, NULL_TREE, lhs,
1391 /* Returns a FIELD_DECL node. FIELD_NAME the field name, FIELD_TYPE is its
1392 type, and RECORD_TYPE is the type of the parent. PACKED is nonzero if
1393 this field is in a record type with a "pragma pack". If SIZE is nonzero
1394 it is the specified size for this field. If POS is nonzero, it is the bit
1395 position. If ADDRESSABLE is nonzero, it means we are allowed to take
1396 the address of this field for aliasing purposes. */
1399 create_field_decl (field_name, field_type, record_type, packed, size, pos,
1408 tree field_decl = build_decl (FIELD_DECL, field_name, field_type);
1410 DECL_CONTEXT (field_decl) = record_type;
1411 TREE_READONLY (field_decl) = TREE_READONLY (field_type);
1413 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1414 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
1415 If it is a padding type where the inner field is of variable size, it
1416 must be at its natural alignment. Just handle the packed case here; we
1417 will disallow non-aligned rep clauses elsewhere. */
1418 if (packed && TYPE_MODE (field_type) == BLKmode)
1419 DECL_ALIGN (field_decl)
1420 = ((TREE_CODE (field_type) == RECORD_TYPE
1421 && TYPE_IS_PADDING_P (field_type)
1422 && ! TREE_CONSTANT (DECL_SIZE (TYPE_FIELDS (field_type))))
1423 ? TYPE_ALIGN (field_type) : BITS_PER_UNIT);
1425 /* If a size is specified, use it. Otherwise, see if we have a size
1426 to use that may differ from the natural size of the object. */
1428 size = convert (bitsizetype, size);
1431 if (packed == 1 && ! operand_equal_p (rm_size (field_type),
1432 TYPE_SIZE (field_type), 0))
1433 size = rm_size (field_type);
1435 /* For a constant size larger than MAX_FIXED_MODE_SIZE, round up to
1437 if (size != 0 && TREE_CODE (size) == INTEGER_CST
1438 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) > 0)
1439 size = round_up (size, BITS_PER_UNIT);
1442 /* Make a bitfield if a size is specified for two reasons: first if the size
1443 differs from the natural size. Second, if the alignment is insufficient.
1444 There are a number of ways the latter can be true. But never make a
1445 bitfield if the type of the field has a nonconstant size. */
1447 if (size != 0 && TREE_CODE (size) == INTEGER_CST
1448 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1449 && (! operand_equal_p (TYPE_SIZE (field_type), size, 0)
1451 && ! value_zerop (size_binop (TRUNC_MOD_EXPR, pos,
1452 bitsize_int (TYPE_ALIGN
1455 || (TYPE_ALIGN (record_type) != 0
1456 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1458 DECL_BIT_FIELD (field_decl) = 1;
1459 DECL_SIZE (field_decl) = size;
1460 if (! packed && pos == 0)
1461 DECL_ALIGN (field_decl)
1462 = (TYPE_ALIGN (record_type) != 0
1463 ? MIN (TYPE_ALIGN (record_type), TYPE_ALIGN (field_type))
1464 : TYPE_ALIGN (field_type));
1467 DECL_PACKED (field_decl) = pos != 0 ? DECL_BIT_FIELD (field_decl) : packed;
1468 DECL_ALIGN (field_decl)
1469 = MAX (DECL_ALIGN (field_decl),
1470 DECL_BIT_FIELD (field_decl) ? 1
1471 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT
1472 : TYPE_ALIGN (field_type));
1476 /* We need to pass in the alignment the DECL is known to have.
1477 This is the lowest-order bit set in POS, but no more than
1478 the alignment of the record, if one is specified. Note
1479 that an alignment of 0 is taken as infinite. */
1480 unsigned int known_align;
1482 if (host_integerp (pos, 1))
1483 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1485 known_align = BITS_PER_UNIT;
1487 if (TYPE_ALIGN (record_type)
1488 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1489 known_align = TYPE_ALIGN (record_type);
1491 layout_decl (field_decl, known_align);
1492 SET_DECL_OFFSET_ALIGN (field_decl, BIGGEST_ALIGNMENT);
1493 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1494 &DECL_FIELD_BIT_OFFSET (field_decl),
1495 BIGGEST_ALIGNMENT, pos);
1497 DECL_HAS_REP_P (field_decl) = 1;
1500 /* Mark the decl as nonaddressable if it either is indicated so semantically
1501 or if it is a bit field. */
1502 DECL_NONADDRESSABLE_P (field_decl)
1503 = ! addressable || DECL_BIT_FIELD (field_decl);
1508 /* Subroutine of previous function: return nonzero if EXP, ignoring any side
1509 effects, has the value of zero. */
1515 if (TREE_CODE (exp) == COMPOUND_EXPR)
1516 return value_zerop (TREE_OPERAND (exp, 1));
1518 return integer_zerop (exp);
1521 /* Returns a PARM_DECL node. PARAM_NAME is the name of the parameter,
1522 PARAM_TYPE is its type. READONLY is nonzero if the parameter is
1523 readonly (either an IN parameter or an address of a pass-by-ref
1527 create_param_decl (param_name, param_type, readonly)
1532 tree param_decl = build_decl (PARM_DECL, param_name, param_type);
1534 DECL_ARG_TYPE (param_decl) = param_type;
1535 DECL_ARG_TYPE_AS_WRITTEN (param_decl) = param_type;
1536 TREE_READONLY (param_decl) = readonly;
1540 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1543 process_attributes (decl, attr_list)
1545 struct attrib *attr_list;
1547 for (; attr_list; attr_list = attr_list->next)
1548 switch (attr_list->type)
1550 case ATTR_MACHINE_ATTRIBUTE:
1551 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->arg,
1553 ATTR_FLAG_TYPE_IN_PLACE);
1556 case ATTR_LINK_ALIAS:
1557 TREE_STATIC (decl) = 1;
1558 assemble_alias (decl, attr_list->name);
1561 case ATTR_WEAK_EXTERNAL:
1563 declare_weak (decl);
1565 post_error ("?weak declarations not supported on this target",
1566 attr_list->error_point);
1569 case ATTR_LINK_SECTION:
1570 #ifdef ASM_OUTPUT_SECTION_NAME
1571 DECL_SECTION_NAME (decl)
1572 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1573 IDENTIFIER_POINTER (attr_list->name));
1574 DECL_COMMON (decl) = 0;
1576 post_error ("?section attributes are not supported for this target",
1577 attr_list->error_point);
1583 /* Add some pending elaborations on the list. */
1586 add_pending_elaborations (var_decl, var_init)
1591 Check_Elaboration_Code_Allowed (error_gnat_node);
1593 pending_elaborations
1594 = chainon (pending_elaborations, build_tree_list (var_decl, var_init));
1597 /* Obtain any pending elaborations and clear the old list. */
1600 get_pending_elaborations ()
1602 /* Each thing added to the list went on the end; we want it on the
1604 tree result = TREE_CHAIN (pending_elaborations);
1606 TREE_CHAIN (pending_elaborations) = 0;
1610 /* Mark the binding level stack. */
1613 mark_binding_level (arg)
1616 struct binding_level *level = *(struct binding_level **) arg;
1618 for (; level != 0; level = level->level_chain)
1620 ggc_mark_tree (level->names);
1621 ggc_mark_tree (level->blocks);
1622 ggc_mark_tree (level->this_block);
1626 /* Mark the pending elaboration list. */
1632 struct e_stack *p = *((struct e_stack **) data);
1636 ggc_mark_tree (p->elab_list);
1637 mark_e_stack (&p->next);
1641 /* Return nonzero if there are pending elaborations. */
1644 pending_elaborations_p ()
1646 return TREE_CHAIN (pending_elaborations) != 0;
1649 /* Save a copy of the current pending elaboration list and make a new
1653 push_pending_elaborations ()
1655 struct e_stack *p = (struct e_stack *) xmalloc (sizeof (struct e_stack));
1657 p->next = elist_stack;
1658 p->elab_list = pending_elaborations;
1660 pending_elaborations = build_tree_list (NULL_TREE, NULL_TREE);
1663 /* Pop the stack of pending elaborations. */
1666 pop_pending_elaborations ()
1668 struct e_stack *p = elist_stack;
1670 pending_elaborations = p->elab_list;
1671 elist_stack = p->next;
1675 /* Return the current position in pending_elaborations so we can insert
1676 elaborations after that point. */
1679 get_elaboration_location ()
1681 return tree_last (pending_elaborations);
1684 /* Insert the current elaborations after ELAB, which is in some elaboration
1688 insert_elaboration_list (elab)
1691 tree next = TREE_CHAIN (elab);
1693 if (TREE_CHAIN (pending_elaborations))
1695 TREE_CHAIN (elab) = TREE_CHAIN (pending_elaborations);
1696 TREE_CHAIN (tree_last (pending_elaborations)) = next;
1697 TREE_CHAIN (pending_elaborations) = 0;
1701 /* Returns a LABEL_DECL node for LABEL_NAME. */
1704 create_label_decl (label_name)
1707 tree label_decl = build_decl (LABEL_DECL, label_name, void_type_node);
1709 DECL_CONTEXT (label_decl) = current_function_decl;
1710 DECL_MODE (label_decl) = VOIDmode;
1711 DECL_SOURCE_LINE (label_decl) = lineno;
1712 DECL_SOURCE_FILE (label_decl) = input_filename;
1717 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1718 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1719 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1720 PARM_DECL nodes chained through the TREE_CHAIN field).
1722 INLINE_FLAG, PUBLIC_FLAG, and EXTERN_FLAG are used to set the appropriate
1723 fields in the FUNCTION_DECL. */
1726 create_subprog_decl (subprog_name, asm_name, subprog_type, param_decl_list,
1727 inline_flag, public_flag, extern_flag, attr_list)
1731 tree param_decl_list;
1735 struct attrib *attr_list;
1737 tree return_type = TREE_TYPE (subprog_type);
1738 tree subprog_decl = build_decl (FUNCTION_DECL, subprog_name, subprog_type);
1740 /* If this is a function nested inside an inlined external function, it
1741 means we aren't going to compile the outer function unless it is
1742 actually inlined, so do the same for us. */
1743 if (current_function_decl != 0 && DECL_INLINE (current_function_decl)
1744 && DECL_EXTERNAL (current_function_decl))
1747 DECL_EXTERNAL (subprog_decl) = extern_flag;
1748 TREE_PUBLIC (subprog_decl) = public_flag;
1749 DECL_INLINE (subprog_decl) = inline_flag;
1750 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1751 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1752 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1753 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1754 DECL_RESULT (subprog_decl) = build_decl (RESULT_DECL, 0, return_type);
1757 DECL_ASSEMBLER_NAME (subprog_decl) = asm_name;
1759 process_attributes (subprog_decl, attr_list);
1761 /* Add this decl to the current binding level. */
1762 subprog_decl = pushdecl (subprog_decl);
1764 /* Output the assembler code and/or RTL for the declaration. */
1765 rest_of_decl_compilation (subprog_decl, 0, global_bindings_p (), 0);
1767 return subprog_decl;
1770 /* Count how deep we are into nested functions. This is because
1771 we shouldn't call the backend function context routines unless we
1772 are in a nested function. */
1774 static int function_nesting_depth;
1776 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1777 body. This routine needs to be invoked before processing the declarations
1778 appearing in the subprogram. */
1781 begin_subprog_body (subprog_decl)
1784 tree param_decl_list;
1788 if (function_nesting_depth++ != 0)
1789 push_function_context ();
1791 announce_function (subprog_decl);
1793 /* Make this field nonzero so further routines know that this is not
1794 tentative. error_mark_node is replaced below (in poplevel) with the
1796 DECL_INITIAL (subprog_decl) = error_mark_node;
1798 /* This function exists in static storage. This does not mean `static' in
1800 TREE_STATIC (subprog_decl) = 1;
1802 /* Enter a new binding level. */
1803 current_function_decl = subprog_decl;
1806 /* Push all the PARM_DECL nodes onto the current scope (i.e. the scope of the
1807 subprogram body) so that they can be recognized as local variables in the
1810 The list of PARM_DECL nodes is stored in the right order in
1811 DECL_ARGUMENTS. Since ..._DECL nodes get stored in the reverse order in
1812 which they are transmitted to `pushdecl' we need to reverse the list of
1813 PARM_DECLs if we want it to be stored in the right order. The reason why
1814 we want to make sure the PARM_DECLs are stored in the correct order is
1815 that this list will be retrieved in a few lines with a call to `getdecl'
1816 to store it back into the DECL_ARGUMENTS field. */
1817 param_decl_list = nreverse (DECL_ARGUMENTS (subprog_decl));
1819 for (param_decl = param_decl_list; param_decl; param_decl = next_param)
1821 next_param = TREE_CHAIN (param_decl);
1822 TREE_CHAIN (param_decl) = NULL;
1823 pushdecl (param_decl);
1826 /* Store back the PARM_DECL nodes. They appear in the right order. */
1827 DECL_ARGUMENTS (subprog_decl) = getdecls ();
1829 init_function_start (subprog_decl, input_filename, lineno);
1830 expand_function_start (subprog_decl, 0);
1834 /* Finish the definition of the current subprogram and compile it all the way
1835 to assembler language output. */
1838 end_subprog_body (void)
1844 BLOCK_SUPERCONTEXT (DECL_INITIAL (current_function_decl))
1845 = current_function_decl;
1847 /* Mark the RESULT_DECL as being in this subprogram. */
1848 DECL_CONTEXT (DECL_RESULT (current_function_decl)) = current_function_decl;
1850 expand_function_end (input_filename, lineno, 0);
1851 rest_of_compilation (current_function_decl);
1854 /* If we're sure this function is defined in this file then mark it
1856 if (TREE_ASM_WRITTEN (current_function_decl))
1857 mark_fn_defined_in_this_file (current_function_decl);
1860 /* Throw away any VAR_DECLs we made for OUT parameters; they must
1861 not be seen when we call this function and will be in
1862 unallocated memory anyway. */
1863 for (cico_list = TYPE_CI_CO_LIST (TREE_TYPE (current_function_decl));
1864 cico_list != 0; cico_list = TREE_CHAIN (cico_list))
1865 TREE_VALUE (cico_list) = 0;
1867 if (DECL_SAVED_INSNS (current_function_decl) == 0)
1869 /* Throw away DECL_RTL in any PARM_DECLs unless this function
1870 was saved for inline, in which case the DECL_RTLs are in
1871 preserved memory. */
1872 for (decl = DECL_ARGUMENTS (current_function_decl);
1873 decl != 0; decl = TREE_CHAIN (decl))
1875 SET_DECL_RTL (decl, 0);
1876 DECL_INCOMING_RTL (decl) = 0;
1879 /* Similarly, discard DECL_RTL of the return value. */
1880 SET_DECL_RTL (DECL_RESULT (current_function_decl), 0);
1882 /* But DECL_INITIAL must remain nonzero so we know this
1883 was an actual function definition unless toplev.c decided not
1885 if (DECL_INITIAL (current_function_decl) != 0)
1886 DECL_INITIAL (current_function_decl) = error_mark_node;
1888 DECL_ARGUMENTS (current_function_decl) = 0;
1891 /* If we are not at the bottom of the function nesting stack, pop up to
1892 the containing function. Otherwise show we aren't in any function. */
1893 if (--function_nesting_depth != 0)
1894 pop_function_context ();
1896 current_function_decl = 0;
1899 /* Return a definition for a builtin function named NAME and whose data type
1900 is TYPE. TYPE should be a function type with argument types.
1901 FUNCTION_CODE tells later passes how to compile calls to this function.
1902 See tree.h for its possible values.
1904 If LIBRARY_NAME is nonzero, use that for DECL_ASSEMBLER_NAME,
1905 the name to be called if we can't opencode the function. */
1908 builtin_function (name, type, function_code, class, library_name)
1912 enum built_in_class class;
1913 const char *library_name;
1915 tree decl = build_decl (FUNCTION_DECL, get_identifier (name), type);
1917 DECL_EXTERNAL (decl) = 1;
1918 TREE_PUBLIC (decl) = 1;
1920 DECL_ASSEMBLER_NAME (decl) = get_identifier (library_name);
1923 DECL_BUILT_IN_CLASS (decl) = class;
1924 DECL_FUNCTION_CODE (decl) = function_code;
1928 /* Return an integer type with the number of bits of precision given by
1929 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
1930 it is a signed type. */
1933 type_for_size (precision, unsignedp)
1940 if (precision <= 2 * MAX_BITS_PER_WORD
1941 && signed_and_unsigned_types[precision][unsignedp] != 0)
1942 return signed_and_unsigned_types[precision][unsignedp];
1945 t = make_unsigned_type (precision);
1947 t = make_signed_type (precision);
1949 if (precision <= 2 * MAX_BITS_PER_WORD)
1950 signed_and_unsigned_types[precision][unsignedp] = t;
1952 if (TYPE_NAME (t) == 0)
1954 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
1955 TYPE_NAME (t) = get_identifier (type_name);
1961 /* Likewise for floating-point types. */
1964 float_type_for_size (precision, mode)
1966 enum machine_mode mode;
1971 if (float_types[(int) mode] != 0)
1972 return float_types[(int) mode];
1974 float_types[(int) mode] = t = make_node (REAL_TYPE);
1975 TYPE_PRECISION (t) = precision;
1978 if (TYPE_MODE (t) != mode)
1981 if (TYPE_NAME (t) == 0)
1983 sprintf (type_name, "FLOAT_%d", precision);
1984 TYPE_NAME (t) = get_identifier (type_name);
1990 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
1991 an unsigned type; otherwise a signed type is returned. */
1994 type_for_mode (mode, unsignedp)
1995 enum machine_mode mode;
1998 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1999 return float_type_for_size (GET_MODE_BITSIZE (mode), mode);
2001 return type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2004 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2007 unsigned_type (type_node)
2010 tree type = type_for_size (TYPE_PRECISION (type_node), 1);
2012 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2014 type = copy_node (type);
2015 TREE_TYPE (type) = type_node;
2017 else if (TREE_TYPE (type_node) != 0
2018 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2019 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2021 type = copy_node (type);
2022 TREE_TYPE (type) = TREE_TYPE (type_node);
2028 /* Return the signed version of a TYPE_NODE, a scalar type. */
2031 signed_type (type_node)
2034 tree type = type_for_size (TYPE_PRECISION (type_node), 0);
2036 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2038 type = copy_node (type);
2039 TREE_TYPE (type) = type_node;
2041 else if (TREE_TYPE (type_node) != 0
2042 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2043 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2045 type = copy_node (type);
2046 TREE_TYPE (type) = TREE_TYPE (type_node);
2052 /* Return a type the same as TYPE except unsigned or signed according to
2056 signed_or_unsigned_type (unsignedp, type)
2060 if (! INTEGRAL_TYPE_P (type) || TREE_UNSIGNED (type) == unsignedp)
2063 return type_for_size (TYPE_PRECISION (type), unsignedp);
2066 /* EXP is an expression for the size of an object. If this size contains
2067 discriminant references, replace them with the maximum (if MAX_P) or
2068 minimum (if ! MAX_P) possible value of the discriminant. */
2071 max_size (exp, max_p)
2075 enum tree_code code = TREE_CODE (exp);
2076 tree type = TREE_TYPE (exp);
2078 switch (TREE_CODE_CLASS (code))
2085 if (code == TREE_LIST)
2086 return tree_cons (TREE_PURPOSE (exp),
2087 max_size (TREE_VALUE (exp), max_p),
2088 TREE_CHAIN (exp) != 0
2089 ? max_size (TREE_CHAIN (exp), max_p) : 0);
2093 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2094 modify. Otherwise, we abort since it is something we can't
2096 if (! contains_placeholder_p (exp))
2099 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2101 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), 1);
2104 return max_p ? size_one_node : size_zero_node;
2109 switch (TREE_CODE_LENGTH (code))
2112 if (code == NON_LVALUE_EXPR)
2113 return max_size (TREE_OPERAND (exp, 0), max_p);
2116 fold (build1 (code, type,
2117 max_size (TREE_OPERAND (exp, 0),
2118 code == NEGATE_EXPR ? ! max_p : max_p)));
2121 if (code == RTL_EXPR)
2123 else if (code == COMPOUND_EXPR)
2124 return max_size (TREE_OPERAND (exp, 1), max_p);
2125 else if (code == WITH_RECORD_EXPR)
2129 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2130 tree rhs = max_size (TREE_OPERAND (exp, 1),
2131 code == MINUS_EXPR ? ! max_p : max_p);
2133 /* Special-case wanting the maximum value of a MIN_EXPR.
2134 In that case, if one side overflows, return the other.
2135 sizetype is signed, but we know sizes are non-negative.
2136 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2137 overflowing or the maximum possible value and the RHS
2139 if (max_p && code == MIN_EXPR && TREE_OVERFLOW (rhs))
2141 else if (max_p && code == MIN_EXPR && TREE_OVERFLOW (lhs))
2143 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2144 && (TREE_OVERFLOW (lhs)
2145 || operand_equal_p (lhs, TYPE_MAX_VALUE (type), 0))
2146 && ! TREE_CONSTANT (rhs))
2149 return fold (build (code, type, lhs, rhs));
2153 if (code == SAVE_EXPR)
2155 else if (code == COND_EXPR)
2156 return fold (build (MAX_EXPR, type,
2157 max_size (TREE_OPERAND (exp, 1), max_p),
2158 max_size (TREE_OPERAND (exp, 2), max_p)));
2159 else if (code == CALL_EXPR && TREE_OPERAND (exp, 1) != 0)
2160 return build (CALL_EXPR, type, TREE_OPERAND (exp, 0),
2161 max_size (TREE_OPERAND (exp, 1), max_p));
2168 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2169 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2170 Return a constructor for the template. */
2173 build_template (template_type, array_type, expr)
2178 tree template_elts = NULL_TREE;
2179 tree bound_list = NULL_TREE;
2182 if (TREE_CODE (array_type) == RECORD_TYPE
2183 && (TYPE_IS_PADDING_P (array_type)
2184 || TYPE_LEFT_JUSTIFIED_MODULAR_P (array_type)))
2185 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2187 if (TREE_CODE (array_type) == ARRAY_TYPE
2188 || (TREE_CODE (array_type) == INTEGER_TYPE
2189 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2190 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2192 /* First make the list for a CONSTRUCTOR for the template. Go down the
2193 field list of the template instead of the type chain because this
2194 array might be an Ada array of arrays and we can't tell where the
2195 nested arrays stop being the underlying object. */
2197 for (field = TYPE_FIELDS (template_type); field;
2199 ? (bound_list = TREE_CHAIN (bound_list))
2200 : (array_type = TREE_TYPE (array_type))),
2201 field = TREE_CHAIN (TREE_CHAIN (field)))
2203 tree bounds, min, max;
2205 /* If we have a bound list, get the bounds from there. Likewise
2206 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2207 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2208 This will give us a maximum range. */
2209 if (bound_list != 0)
2210 bounds = TREE_VALUE (bound_list);
2211 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2212 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2213 else if (expr != 0 && TREE_CODE (expr) == PARM_DECL
2214 && DECL_BY_COMPONENT_PTR_P (expr))
2215 bounds = TREE_TYPE (field);
2219 min = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MIN_VALUE (bounds));
2220 max = convert (TREE_TYPE (field), TYPE_MAX_VALUE (bounds));
2222 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2223 surround them with a WITH_RECORD_EXPR giving EXPR as the
2225 if (! TREE_CONSTANT (min) && contains_placeholder_p (min))
2226 min = build (WITH_RECORD_EXPR, TREE_TYPE (min), min, expr);
2227 if (! TREE_CONSTANT (max) && contains_placeholder_p (max))
2228 max = build (WITH_RECORD_EXPR, TREE_TYPE (max), max, expr);
2230 template_elts = tree_cons (TREE_CHAIN (field), max,
2231 tree_cons (field, min, template_elts));
2234 return build_constructor (template_type, nreverse (template_elts));
2237 /* Build a VMS descriptor from a Mechanism_Type, which must specify
2238 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2239 in the type contains in its DECL_INITIAL the expression to use when
2240 a constructor is made for the type. GNAT_ENTITY is a gnat node used
2241 to print out an error message if the mechanism cannot be applied to
2242 an object of that type and also for the name. */
2245 build_vms_descriptor (type, mech, gnat_entity)
2247 Mechanism_Type mech;
2248 Entity_Id gnat_entity;
2250 tree record_type = make_node (RECORD_TYPE);
2251 tree field_list = 0;
2260 /* If TYPE is an unconstrained array, use the underlying array type. */
2261 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2262 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2264 /* If this is an array, compute the number of dimensions in the array,
2265 get the index types, and point to the inner type. */
2266 if (TREE_CODE (type) != ARRAY_TYPE)
2269 for (ndim = 1, inner_type = type;
2270 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2271 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2272 ndim++, inner_type = TREE_TYPE (inner_type))
2275 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2277 if (mech != By_Descriptor_NCA
2278 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2279 for (i = ndim - 1, inner_type = type;
2281 i--, inner_type = TREE_TYPE (inner_type))
2282 idx_arr[i] = TYPE_DOMAIN (inner_type);
2284 for (i = 0, inner_type = type;
2286 i++, inner_type = TREE_TYPE (inner_type))
2287 idx_arr[i] = TYPE_DOMAIN (inner_type);
2289 /* Now get the DTYPE value. */
2290 switch (TREE_CODE (type))
2294 if (TYPE_VAX_FLOATING_POINT_P (type))
2295 switch ((int) TYPE_DIGITS_VALUE (type))
2308 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2311 dtype = TREE_UNSIGNED (type) ? 2 : 6;
2314 dtype = TREE_UNSIGNED (type) ? 3 : 7;
2317 dtype = TREE_UNSIGNED (type) ? 4 : 8;
2320 dtype = TREE_UNSIGNED (type) ? 5 : 9;
2323 dtype = TREE_UNSIGNED (type) ? 25 : 26;
2329 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2333 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2334 && TYPE_VAX_FLOATING_POINT_P (type))
2335 switch ((int) TYPE_DIGITS_VALUE (type))
2347 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2358 /* Get the CLASS value. */
2361 case By_Descriptor_A:
2364 case By_Descriptor_NCA:
2367 case By_Descriptor_SB:
2374 /* Make the type for a descriptor for VMS. The first four fields
2375 are the same for all types. */
2378 = chainon (field_list,
2379 make_descriptor_field
2380 ("LENGTH", type_for_size (16, 1), record_type,
2381 size_in_bytes (mech == By_Descriptor_A ? inner_type : type)));
2383 field_list = chainon (field_list,
2384 make_descriptor_field ("DTYPE", type_for_size (8, 1),
2385 record_type, size_int (dtype)));
2386 field_list = chainon (field_list,
2387 make_descriptor_field ("CLASS", type_for_size (8, 1),
2388 record_type, size_int (class)));
2391 = chainon (field_list,
2392 make_descriptor_field ("POINTER",
2393 build_pointer_type (type),
2396 build_pointer_type (type),
2397 build (PLACEHOLDER_EXPR,
2403 case By_Descriptor_S:
2406 case By_Descriptor_SB:
2408 = chainon (field_list,
2409 make_descriptor_field
2410 ("SB_L1", type_for_size (32, 1), record_type,
2411 TREE_CODE (type) == ARRAY_TYPE
2412 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2414 = chainon (field_list,
2415 make_descriptor_field
2416 ("SB_L2", type_for_size (32, 1), record_type,
2417 TREE_CODE (type) == ARRAY_TYPE
2418 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2421 case By_Descriptor_A:
2422 case By_Descriptor_NCA:
2423 field_list = chainon (field_list,
2424 make_descriptor_field ("SCALE",
2425 type_for_size (8, 1),
2429 field_list = chainon (field_list,
2430 make_descriptor_field ("DIGITS",
2431 type_for_size (8, 1),
2436 = chainon (field_list,
2437 make_descriptor_field
2438 ("AFLAGS", type_for_size (8, 1), record_type,
2439 size_int (mech == By_Descriptor_NCA
2441 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2442 : (TREE_CODE (type) == ARRAY_TYPE
2443 && TYPE_CONVENTION_FORTRAN_P (type)
2446 field_list = chainon (field_list,
2447 make_descriptor_field ("DIMCT",
2448 type_for_size (8, 1),
2452 field_list = chainon (field_list,
2453 make_descriptor_field ("ARSIZE",
2454 type_for_size (32, 1),
2456 size_in_bytes (type)));
2458 /* Now build a pointer to the 0,0,0... element. */
2459 tem = build (PLACEHOLDER_EXPR, type);
2460 for (i = 0, inner_type = type; i < ndim;
2461 i++, inner_type = TREE_TYPE (inner_type))
2462 tem = build (ARRAY_REF, TREE_TYPE (inner_type), tem,
2463 convert (TYPE_DOMAIN (inner_type), size_zero_node));
2466 = chainon (field_list,
2467 make_descriptor_field
2468 ("A0", build_pointer_type (inner_type), record_type,
2469 build1 (ADDR_EXPR, build_pointer_type (inner_type), tem)));
2471 /* Next come the addressing coefficients. */
2473 for (i = 0; i < ndim; i++)
2477 = size_binop (MULT_EXPR, tem,
2478 size_binop (PLUS_EXPR,
2479 size_binop (MINUS_EXPR,
2480 TYPE_MAX_VALUE (idx_arr[i]),
2481 TYPE_MIN_VALUE (idx_arr[i])),
2484 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
2485 fname[1] = '0' + i, fname[2] = 0;
2486 field_list = chainon (field_list,
2487 make_descriptor_field (fname,
2488 type_for_size (32, 1),
2492 if (mech == By_Descriptor_NCA)
2496 /* Finally here are the bounds. */
2497 for (i = 0; i < ndim; i++)
2501 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2503 = chainon (field_list,
2504 make_descriptor_field
2505 (fname, type_for_size (32, 1), record_type,
2506 TYPE_MIN_VALUE (idx_arr[i])));
2510 = chainon (field_list,
2511 make_descriptor_field
2512 (fname, type_for_size (32, 1), record_type,
2513 TYPE_MAX_VALUE (idx_arr[i])));
2518 post_error ("unsupported descriptor type for &", gnat_entity);
2521 finish_record_type (record_type, field_list, 0, 1);
2522 pushdecl (build_decl (TYPE_DECL, create_concat_name (gnat_entity, "DESC"),
2528 /* Utility routine for above code to make a field. */
2531 make_descriptor_field (name, type, rec_type, initial)
2538 = create_field_decl (get_identifier (name), type, rec_type, 0, 0, 0, 0);
2540 DECL_INITIAL (field) = initial;
2544 /* Build a type to be used to represent an aliased object whose nominal
2545 type is an unconstrained array. This consists of a RECORD_TYPE containing
2546 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
2547 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
2548 is used to represent an arbitrary unconstrained object. Use NAME
2549 as the name of the record. */
2552 build_unc_object_type (template_type, object_type, name)
2557 tree type = make_node (RECORD_TYPE);
2558 tree template_field = create_field_decl (get_identifier ("BOUNDS"),
2559 template_type, type, 0, 0, 0, 1);
2560 tree array_field = create_field_decl (get_identifier ("ARRAY"), object_type,
2563 TYPE_NAME (type) = name;
2564 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
2565 finish_record_type (type,
2566 chainon (chainon (NULL_TREE, template_field),
2573 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE. In
2574 the normal case this is just two adjustments, but we have more to do
2575 if NEW is an UNCONSTRAINED_ARRAY_TYPE. */
2578 update_pointer_to (old_type, new_type)
2582 tree ptr = TYPE_POINTER_TO (old_type);
2583 tree ref = TYPE_REFERENCE_TO (old_type);
2585 if ((ptr == 0 && ref == 0) || old_type == new_type)
2588 /* First handle the simple case. */
2589 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
2592 TREE_TYPE (ptr) = new_type;
2593 TYPE_POINTER_TO (new_type) = ptr;
2596 TREE_TYPE (ref) = new_type;
2597 TYPE_REFERENCE_TO (new_type) = ref;
2599 if (ptr != 0 && TYPE_NAME (ptr) != 0
2600 && TREE_CODE (TYPE_NAME (ptr)) == TYPE_DECL
2601 && TREE_CODE (new_type) != ENUMERAL_TYPE)
2602 rest_of_decl_compilation (TYPE_NAME (ptr), NULL,
2603 global_bindings_p (), 0);
2604 if (ref != 0 && TYPE_NAME (ref) != 0
2605 && TREE_CODE (TYPE_NAME (ref)) == TYPE_DECL
2606 && TREE_CODE (new_type) != ENUMERAL_TYPE)
2607 rest_of_decl_compilation (TYPE_NAME (ref), NULL,
2608 global_bindings_p (), 0);
2611 /* Now deal with the unconstrained array case. In this case the "pointer"
2612 is actually a RECORD_TYPE where the types of both fields are
2613 pointers to void. In that case, copy the field list from the
2614 old type to the new one and update the fields' context. */
2615 else if (TREE_CODE (ptr) != RECORD_TYPE || ! TYPE_IS_FAT_POINTER_P (ptr))
2620 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
2625 TYPE_FIELDS (ptr) = TYPE_FIELDS (TYPE_POINTER_TO (new_type));
2626 DECL_CONTEXT (TYPE_FIELDS (ptr)) = ptr;
2627 DECL_CONTEXT (TREE_CHAIN (TYPE_FIELDS (ptr))) = ptr;
2629 /* Rework the PLACEHOLDER_EXPR inside the reference to the
2632 ??? This is now the only use of gnat_substitute_in_type, which
2633 is now a very "heavy" routine to do this, so it should be replaced
2635 ptr_temp_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (ptr)));
2636 new_ref = build (COMPONENT_REF, ptr_temp_type,
2637 build (PLACEHOLDER_EXPR, ptr),
2638 TREE_CHAIN (TYPE_FIELDS (ptr)));
2641 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
2642 gnat_substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
2643 TREE_CHAIN (TYPE_FIELDS (ptr)), new_ref));
2645 for (var = TYPE_MAIN_VARIANT (ptr); var; var = TYPE_NEXT_VARIANT (var))
2646 TYPE_UNCONSTRAINED_ARRAY (var) = new_type;
2648 TYPE_POINTER_TO (new_type) = TYPE_REFERENCE_TO (new_type)
2649 = TREE_TYPE (new_type) = ptr;
2651 /* Now handle updating the allocation record, what the thin pointer
2652 points to. Update all pointers from the old record into the new
2653 one, update the types of the fields, and recompute the size. */
2655 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
2657 TREE_TYPE (TYPE_FIELDS (new_obj_rec)) = TREE_TYPE (ptr_temp_type);
2658 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2659 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr)));
2660 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2661 = TYPE_SIZE (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))));
2662 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2663 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))));
2665 TYPE_SIZE (new_obj_rec)
2666 = size_binop (PLUS_EXPR,
2667 DECL_SIZE (TYPE_FIELDS (new_obj_rec)),
2668 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))));
2669 TYPE_SIZE_UNIT (new_obj_rec)
2670 = size_binop (PLUS_EXPR,
2671 DECL_SIZE_UNIT (TYPE_FIELDS (new_obj_rec)),
2672 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))));
2673 rest_of_type_compilation (ptr, global_bindings_p ());
2677 /* Convert a pointer to a constrained array into a pointer to a fat
2678 pointer. This involves making or finding a template. */
2681 convert_to_fat_pointer (type, expr)
2685 tree template_type = TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type))));
2686 tree template, template_addr;
2687 tree etype = TREE_TYPE (expr);
2689 /* If EXPR is a constant of zero, we make a fat pointer that has a null
2690 pointer to the template and array. */
2691 if (integer_zerop (expr))
2695 tree_cons (TYPE_FIELDS (type),
2696 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
2697 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2698 convert (build_pointer_type (template_type),
2702 /* If EXPR is a thin pointer, make the template and data from the record. */
2704 else if (TYPE_THIN_POINTER_P (etype))
2706 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
2708 expr = save_expr (expr);
2709 if (TREE_CODE (expr) == ADDR_EXPR)
2710 expr = TREE_OPERAND (expr, 0);
2712 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
2714 template = build_component_ref (expr, NULL_TREE, fields);
2715 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
2716 build_component_ref (expr, NULL_TREE,
2717 TREE_CHAIN (fields)));
2720 /* Otherwise, build the constructor for the template. */
2721 template = build_template (template_type, TREE_TYPE (etype), expr);
2723 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
2725 /* The result is a CONSTRUCTOR for the fat pointer. */
2727 build_constructor (type,
2728 tree_cons (TYPE_FIELDS (type), expr,
2729 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2730 template_addr, NULL_TREE)));
2733 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
2734 is something that is a fat pointer, so convert to it first if it EXPR
2735 is not already a fat pointer. */
2738 convert_to_thin_pointer (type, expr)
2742 if (! TYPE_FAT_POINTER_P (TREE_TYPE (expr)))
2744 = convert_to_fat_pointer
2745 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
2747 /* We get the pointer to the data and use a NOP_EXPR to make it the
2749 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)));
2750 expr = build1 (NOP_EXPR, type, expr);
2755 /* Create an expression whose value is that of EXPR,
2756 converted to type TYPE. The TREE_TYPE of the value
2757 is always TYPE. This function implements all reasonable
2758 conversions; callers should filter out those that are
2759 not permitted by the language being compiled. */
2762 convert (type, expr)
2765 enum tree_code code = TREE_CODE (type);
2766 tree etype = TREE_TYPE (expr);
2767 enum tree_code ecode = TREE_CODE (etype);
2770 /* If EXPR is already the right type, we are done. */
2774 /* If EXPR is a WITH_RECORD_EXPR, do the conversion inside and then make a
2776 if (TREE_CODE (expr) == WITH_RECORD_EXPR)
2777 return build (WITH_RECORD_EXPR, type,
2778 convert (type, TREE_OPERAND (expr, 0)),
2779 TREE_OPERAND (expr, 1));
2781 /* If the input type has padding, remove it by doing a component reference
2782 to the field. If the output type has padding, make a constructor
2783 to build the record. If both input and output have padding and are
2784 of variable size, do this as an unchecked conversion. */
2785 if (ecode == RECORD_TYPE && code == RECORD_TYPE
2786 && TYPE_IS_PADDING_P (type) && TYPE_IS_PADDING_P (etype)
2787 && (! TREE_CONSTANT (TYPE_SIZE (type))
2788 || ! TREE_CONSTANT (TYPE_SIZE (etype))))
2790 else if (ecode == RECORD_TYPE && TYPE_IS_PADDING_P (etype))
2792 /* If we have just converted to this padded type, just get
2793 the inner expression. */
2794 if (TREE_CODE (expr) == CONSTRUCTOR
2795 && CONSTRUCTOR_ELTS (expr) != 0
2796 && TREE_PURPOSE (CONSTRUCTOR_ELTS (expr)) == TYPE_FIELDS (etype))
2797 return TREE_VALUE (CONSTRUCTOR_ELTS (expr));
2799 return convert (type, build_component_ref (expr, NULL_TREE,
2800 TYPE_FIELDS (etype)));
2802 else if (code == RECORD_TYPE && TYPE_IS_PADDING_P (type))
2804 /* If we previously converted from another type and our type is
2805 of variable size, remove the conversion to avoid the need for
2806 variable-size temporaries. */
2807 if (TREE_CODE (expr) == UNCHECKED_CONVERT_EXPR
2808 && ! TREE_CONSTANT (TYPE_SIZE (type)))
2809 expr = TREE_OPERAND (expr, 0);
2811 /* If we are just removing the padding from expr, convert the original
2812 object if we have variable size. That will avoid the need
2813 for some variable-size temporaries. */
2814 if (TREE_CODE (expr) == COMPONENT_REF
2815 && TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == RECORD_TYPE
2816 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
2817 && ! TREE_CONSTANT (TYPE_SIZE (type)))
2818 return convert (type, TREE_OPERAND (expr, 0));
2820 /* If the result type is a padded type with a self-referentially-sized
2821 field and the expression type is a record, do this as an
2822 unchecked converstion. */
2823 else if (TREE_CODE (DECL_SIZE (TYPE_FIELDS (type))) != INTEGER_CST
2824 && contains_placeholder_p (DECL_SIZE (TYPE_FIELDS (type)))
2825 && TREE_CODE (etype) == RECORD_TYPE)
2826 return unchecked_convert (type, expr);
2830 build_constructor (type,
2831 tree_cons (TYPE_FIELDS (type),
2833 (TYPE_FIELDS (type)),
2838 /* If the input is a biased type, adjust first. */
2839 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
2840 return convert (type, fold (build (PLUS_EXPR, TREE_TYPE (etype),
2841 fold (build1 (GNAT_NOP_EXPR,
2842 TREE_TYPE (etype), expr)),
2843 TYPE_MIN_VALUE (etype))));
2845 /* If the input is a left-justified modular type, we need to extract
2846 the actual object before converting it to any other type with the
2847 exception of an unconstrained array. */
2848 if (ecode == RECORD_TYPE && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype)
2849 && code != UNCONSTRAINED_ARRAY_TYPE)
2850 return convert (type, build_component_ref (expr, NULL_TREE,
2851 TYPE_FIELDS (etype)));
2853 /* If converting a type that does not contain a template into one
2854 that does, convert to the data type and then build the template. */
2855 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type)
2856 && ! (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype)))
2858 tree obj_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type)));
2863 tree_cons (TYPE_FIELDS (type),
2864 build_template (TREE_TYPE (TYPE_FIELDS (type)),
2865 obj_type, NULL_TREE),
2866 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2867 convert (obj_type, expr), NULL_TREE)));
2870 /* There are some special cases of expressions that we process
2872 switch (TREE_CODE (expr))
2877 case TRANSFORM_EXPR:
2879 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
2880 conversion in gnat_expand_expr. NULL_EXPR does not represent
2881 and actual value, so no conversion is needed. */
2882 TREE_TYPE (expr) = type;
2887 /* If we are converting a STRING_CST to another constrained array type,
2888 just make a new one in the proper type. Likewise for a
2889 CONSTRUCTOR. But if the mode of the type is different, we must
2890 ensure a new RTL is made for the constant. */
2891 if (code == ecode && AGGREGATE_TYPE_P (etype)
2892 && ! (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
2893 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
2895 expr = copy_node (expr);
2896 TREE_TYPE (expr) = type;
2898 if (TYPE_MODE (type) != TYPE_MODE (etype))
2899 TREE_CST_RTL (expr) = 0;
2906 /* If we are converting between two aggregate types of the same
2907 kind, size, mode, and alignment, just make a new COMPONENT_REF.
2908 This avoid unneeded conversions which makes reference computations
2910 if (code == ecode && TYPE_MODE (type) == TYPE_MODE (etype)
2911 && AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype)
2912 && TYPE_ALIGN (type) == TYPE_ALIGN (etype)
2913 && operand_equal_p (TYPE_SIZE (type), TYPE_SIZE (etype), 0))
2914 return build (COMPONENT_REF, type, TREE_OPERAND (expr, 0),
2915 TREE_OPERAND (expr, 1));
2919 case UNCONSTRAINED_ARRAY_REF:
2920 /* Convert this to the type of the inner array by getting the address of
2921 the array from the template. */
2922 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
2923 build_component_ref (TREE_OPERAND (expr, 0),
2924 get_identifier ("P_ARRAY"),
2926 etype = TREE_TYPE (expr);
2927 ecode = TREE_CODE (etype);
2930 case UNCHECKED_CONVERT_EXPR:
2931 if (AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype)
2932 && ! TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
2933 return convert (type, TREE_OPERAND (expr, 0));
2937 /* If both types are record types, just convert the pointer and
2938 make a new INDIRECT_REF.
2940 ??? Disable this for now since it causes problems with the
2941 code in build_binary_op for MODIFY_EXPR which wants to
2942 strip off conversions. But that code really is a mess and
2943 we need to do this a much better way some time. */
2945 && (TREE_CODE (type) == RECORD_TYPE
2946 || TREE_CODE (type) == UNION_TYPE)
2947 && (TREE_CODE (etype) == RECORD_TYPE
2948 || TREE_CODE (etype) == UNION_TYPE)
2949 && ! TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
2950 return build_unary_op (INDIRECT_REF, NULL_TREE,
2951 convert (build_pointer_type (type),
2952 TREE_OPERAND (expr, 0)));
2959 /* Check for converting to a pointer to an unconstrained array. */
2960 if (TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
2961 return convert_to_fat_pointer (type, expr);
2963 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
2964 || (code == INTEGER_CST && ecode == INTEGER_CST
2965 && (type == TREE_TYPE (etype) || etype == TREE_TYPE (type))))
2966 return fold (build1 (NOP_EXPR, type, expr));
2971 return build1 (CONVERT_EXPR, type, expr);
2974 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
2975 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE))
2976 return unchecked_convert (type, expr);
2977 else if (TYPE_BIASED_REPRESENTATION_P (type))
2978 return fold (build1 (CONVERT_EXPR, type,
2979 fold (build (MINUS_EXPR, TREE_TYPE (type),
2980 convert (TREE_TYPE (type), expr),
2981 TYPE_MIN_VALUE (type)))));
2983 /* ... fall through ... */
2986 return fold (convert_to_integer (type, expr));
2989 case REFERENCE_TYPE:
2990 /* If converting between two pointers to records denoting
2991 both a template and type, adjust if needed to account
2992 for any differing offsets, since one might be negative. */
2993 if (TYPE_THIN_POINTER_P (etype) && TYPE_THIN_POINTER_P (type))
2996 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
2997 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
2998 tree byte_diff = size_binop (CEIL_DIV_EXPR, bit_diff,
2999 sbitsize_int (BITS_PER_UNIT));
3001 expr = build1 (NOP_EXPR, type, expr);
3002 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
3003 if (integer_zerop (byte_diff))
3006 return build_binary_op (PLUS_EXPR, type, expr,
3007 fold (convert_to_pointer (type, byte_diff)));
3010 /* If converting to a thin pointer, handle specially. */
3011 if (TYPE_THIN_POINTER_P (type)
3012 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)) != 0)
3013 return convert_to_thin_pointer (type, expr);
3015 /* If converting fat pointer to normal pointer, get the pointer to the
3016 array and then convert it. */
3017 else if (TYPE_FAT_POINTER_P (etype))
3018 expr = build_component_ref (expr, get_identifier ("P_ARRAY"),
3021 return fold (convert_to_pointer (type, expr));
3024 return fold (convert_to_real (type, expr));
3027 if (TYPE_LEFT_JUSTIFIED_MODULAR_P (type) && ! AGGREGATE_TYPE_P (etype))
3030 (type, tree_cons (TYPE_FIELDS (type),
3031 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
3034 /* ... fall through ... */
3037 /* In these cases, assume the front-end has validated the conversion.
3038 If the conversion is valid, it will be a bit-wise conversion, so
3039 it can be viewed as an unchecked conversion. */
3040 return unchecked_convert (type, expr);
3043 /* Just validate that the type is indeed that of a field
3044 of the type. Then make the simple conversion. */
3045 for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
3046 if (TREE_TYPE (tem) == etype)
3047 return build1 (CONVERT_EXPR, type, expr);
3051 case UNCONSTRAINED_ARRAY_TYPE:
3052 /* If EXPR is a constrained array, take its address, convert it to a
3053 fat pointer, and then dereference it. Likewise if EXPR is a
3054 record containing both a template and a constrained array.
3055 Note that a record representing a left justified modular type
3056 always represents a packed constrained array. */
3057 if (ecode == ARRAY_TYPE
3058 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
3059 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
3060 || (ecode == RECORD_TYPE && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype)))
3063 (INDIRECT_REF, NULL_TREE,
3064 convert_to_fat_pointer (TREE_TYPE (type),
3065 build_unary_op (ADDR_EXPR,
3068 /* Do something very similar for converting one unconstrained
3069 array to another. */
3070 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
3072 build_unary_op (INDIRECT_REF, NULL_TREE,
3073 convert (TREE_TYPE (type),
3074 build_unary_op (ADDR_EXPR,
3080 return fold (convert_to_complex (type, expr));
3087 /* Remove all conversions that are done in EXP. This includes converting
3088 from a padded type or converting to a left-justified modular type. */
3091 remove_conversions (exp)
3094 switch (TREE_CODE (exp))
3097 if (TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3098 && TYPE_LEFT_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
3099 return remove_conversions (TREE_VALUE (CONSTRUCTOR_ELTS (exp)));
3103 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == RECORD_TYPE
3104 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
3105 return remove_conversions (TREE_OPERAND (exp, 0));
3108 case UNCHECKED_CONVERT_EXPR:
3109 case NOP_EXPR: case CONVERT_EXPR:
3110 return remove_conversions (TREE_OPERAND (exp, 0));
3119 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
3120 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
3121 likewise return an expression pointing to the underlying array. */
3124 maybe_unconstrained_array (exp)
3127 enum tree_code code = TREE_CODE (exp);
3130 switch (TREE_CODE (TREE_TYPE (exp)))
3132 case UNCONSTRAINED_ARRAY_TYPE:
3133 if (code == UNCONSTRAINED_ARRAY_REF)
3136 = build_unary_op (INDIRECT_REF, NULL_TREE,
3137 build_component_ref (TREE_OPERAND (exp, 0),
3138 get_identifier ("P_ARRAY"),
3140 TREE_READONLY (new) = TREE_STATIC (new) = TREE_READONLY (exp);
3144 else if (code == NULL_EXPR)
3145 return build1 (NULL_EXPR,
3146 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
3147 (TREE_TYPE (TREE_TYPE (exp))))),
3148 TREE_OPERAND (exp, 0));
3150 else if (code == WITH_RECORD_EXPR
3151 && (TREE_OPERAND (exp, 0)
3152 != (new = maybe_unconstrained_array
3153 (TREE_OPERAND (exp, 0)))))
3154 return build (WITH_RECORD_EXPR, TREE_TYPE (new), new,
3155 TREE_OPERAND (exp, 1));
3158 if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
3161 = build_component_ref (exp, NULL_TREE,
3162 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))));
3163 if (TREE_CODE (TREE_TYPE (new)) == RECORD_TYPE
3164 && TYPE_IS_PADDING_P (TREE_TYPE (new)))
3165 new = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (new))), new);
3178 /* Return an expression that does an unchecked converstion of EXPR to TYPE. */
3181 unchecked_convert (type, expr)
3185 tree etype = TREE_TYPE (expr);
3187 /* If the expression is already the right type, we are done. */
3191 /* If EXPR is a WITH_RECORD_EXPR, do the conversion inside and then make a
3193 if (TREE_CODE (expr) == WITH_RECORD_EXPR)
3194 return build (WITH_RECORD_EXPR, type,
3195 unchecked_convert (type, TREE_OPERAND (expr, 0)),
3196 TREE_OPERAND (expr, 1));
3198 /* If both types types are integral just do a normal conversion.
3199 Likewise for a conversion to an unconstrained array. */
3200 if ((((INTEGRAL_TYPE_P (type)
3201 && ! (TREE_CODE (type) == INTEGER_TYPE
3202 && TYPE_VAX_FLOATING_POINT_P (type)))
3203 || (POINTER_TYPE_P (type) && ! TYPE_THIN_POINTER_P (type))
3204 || (TREE_CODE (type) == RECORD_TYPE
3205 && TYPE_LEFT_JUSTIFIED_MODULAR_P (type)))
3206 && ((INTEGRAL_TYPE_P (etype)
3207 && ! (TREE_CODE (etype) == INTEGER_TYPE
3208 && TYPE_VAX_FLOATING_POINT_P (etype)))
3209 || (POINTER_TYPE_P (etype) && ! TYPE_THIN_POINTER_P (etype))
3210 || (TREE_CODE (etype) == RECORD_TYPE
3211 && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype))))
3212 || TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3216 if (TREE_CODE (etype) == INTEGER_TYPE
3217 && TYPE_BIASED_REPRESENTATION_P (etype))
3219 tree ntype = copy_type (etype);
3221 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
3222 TYPE_MAIN_VARIANT (ntype) = ntype;
3223 expr = build1 (GNAT_NOP_EXPR, ntype, expr);
3226 if (TREE_CODE (type) == INTEGER_TYPE
3227 && TYPE_BIASED_REPRESENTATION_P (type))
3229 rtype = copy_type (type);
3230 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
3231 TYPE_MAIN_VARIANT (rtype) = rtype;
3234 expr = convert (rtype, expr);
3236 expr = build1 (GNAT_NOP_EXPR, type, expr);
3239 /* If we are converting TO an integral type whose precision is not the
3240 same as its size, first unchecked convert to a record that contains
3241 an object of the output type. Then extract the field. */
3242 else if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type) != 0
3243 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3244 GET_MODE_BITSIZE (TYPE_MODE (type))))
3246 tree rec_type = make_node (RECORD_TYPE);
3247 tree field = create_field_decl (get_identifier ("OBJ"), type,
3248 rec_type, 1, 0, 0, 0);
3250 TYPE_FIELDS (rec_type) = field;
3251 layout_type (rec_type);
3253 expr = unchecked_convert (rec_type, expr);
3254 expr = build_component_ref (expr, NULL_TREE, field);
3257 /* Similarly for integral input type whose precision is not equal to its
3259 else if (INTEGRAL_TYPE_P (etype) && TYPE_RM_SIZE (etype) != 0
3260 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
3261 GET_MODE_BITSIZE (TYPE_MODE (etype))))
3263 tree rec_type = make_node (RECORD_TYPE);
3265 = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
3268 TYPE_FIELDS (rec_type) = field;
3269 layout_type (rec_type);
3271 expr = build_constructor (rec_type, build_tree_list (field, expr));
3272 expr = unchecked_convert (type, expr);
3275 /* We have a special case when we are converting between two
3276 unconstrained array types. In that case, take the address,
3277 convert the fat pointer types, and dereference. */
3278 else if (TREE_CODE (etype) == UNCONSTRAINED_ARRAY_TYPE
3279 && TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3280 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3281 build1 (UNCHECKED_CONVERT_EXPR, TREE_TYPE (type),
3282 build_unary_op (ADDR_EXPR, NULL_TREE,
3285 /* If both types are aggregates with the same mode and alignment (except
3286 if the result is a UNION_TYPE), we can do this as a normal conversion. */
3287 else if (AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype)
3288 && TREE_CODE (type) != UNION_TYPE
3289 && TYPE_ALIGN (type) == TYPE_ALIGN (etype)
3290 && TYPE_MODE (type) == TYPE_MODE (etype))
3291 expr = build1 (CONVERT_EXPR, type, expr);
3295 expr = maybe_unconstrained_array (expr);
3296 etype = TREE_TYPE (expr);
3297 expr = build1 (UNCHECKED_CONVERT_EXPR, type, expr);
3301 /* If the result is an integral type whose size is not equal to
3302 the size of the underlying machine type, sign- or zero-extend
3303 the result. We need not do this in the case where the input is
3304 an integral type of the same precision and signedness or if the output
3305 is a biased type or if both the input and output are unsigned. */
3306 if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type) != 0
3307 && ! (TREE_CODE (type) == INTEGER_TYPE
3308 && TYPE_BIASED_REPRESENTATION_P (type))
3309 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3310 GET_MODE_BITSIZE (TYPE_MODE (type)))
3311 && ! (INTEGRAL_TYPE_P (etype)
3312 && TREE_UNSIGNED (type) == TREE_UNSIGNED (etype)
3313 && operand_equal_p (TYPE_RM_SIZE (type),
3314 (TYPE_RM_SIZE (etype) != 0
3315 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
3317 && ! (TREE_UNSIGNED (type) && TREE_UNSIGNED (etype)))
3319 tree base_type = type_for_mode (TYPE_MODE (type), TREE_UNSIGNED (type));
3321 = convert (base_type,
3322 size_binop (MINUS_EXPR,
3324 (GET_MODE_BITSIZE (TYPE_MODE (type))),
3325 TYPE_RM_SIZE (type)));
3328 build_binary_op (RSHIFT_EXPR, base_type,
3329 build_binary_op (LSHIFT_EXPR, base_type,
3330 convert (base_type, expr),
3335 /* An unchecked conversion should never raise Constraint_Error. The code
3336 below assumes that GCC's conversion routines overflow the same
3337 way that the underlying hardware does. This is probably true. In
3338 the rare case when it isn't, we can rely on the fact that such
3339 conversions are erroneous anyway. */
3340 if (TREE_CODE (expr) == INTEGER_CST)
3341 TREE_OVERFLOW (expr) = TREE_CONSTANT_OVERFLOW (expr) = 0;
3343 /* If the sizes of the types differ and this is an UNCHECKED_CONVERT_EXPR,
3344 show no longer constant. */
3345 if (TREE_CODE (expr) == UNCHECKED_CONVERT_EXPR
3346 && ! operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype), 1))
3347 TREE_CONSTANT (expr) = 0;