1 /****************************************************************************
3 * GNAT COMPILER COMPONENTS *
7 * C Implementation File *
9 * Copyright (C) 1992-2012, Free Software Foundation, Inc. *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
24 ****************************************************************************/
28 #include "coretypes.h"
34 #include "tree-inline.h"
51 /* Return the base type of TYPE. */
54 get_base_type (tree type)
56 if (TREE_CODE (type) == RECORD_TYPE
57 && TYPE_JUSTIFIED_MODULAR_P (type))
58 type = TREE_TYPE (TYPE_FIELDS (type));
60 while (TREE_TYPE (type)
61 && (TREE_CODE (type) == INTEGER_TYPE
62 || TREE_CODE (type) == REAL_TYPE))
63 type = TREE_TYPE (type);
68 /* EXP is a GCC tree representing an address. See if we can find how
69 strictly the object at that address is aligned. Return that alignment
70 in bits. If we don't know anything about the alignment, return 0. */
73 known_alignment (tree exp)
75 unsigned int this_alignment;
76 unsigned int lhs, rhs;
78 switch (TREE_CODE (exp))
81 case VIEW_CONVERT_EXPR:
83 /* Conversions between pointers and integers don't change the alignment
84 of the underlying object. */
85 this_alignment = known_alignment (TREE_OPERAND (exp, 0));
89 /* The value of a COMPOUND_EXPR is that of it's second operand. */
90 this_alignment = known_alignment (TREE_OPERAND (exp, 1));
95 /* If two address are added, the alignment of the result is the
96 minimum of the two alignments. */
97 lhs = known_alignment (TREE_OPERAND (exp, 0));
98 rhs = known_alignment (TREE_OPERAND (exp, 1));
99 this_alignment = MIN (lhs, rhs);
102 case POINTER_PLUS_EXPR:
103 lhs = known_alignment (TREE_OPERAND (exp, 0));
104 rhs = known_alignment (TREE_OPERAND (exp, 1));
105 /* If we don't know the alignment of the offset, we assume that
108 this_alignment = lhs;
110 this_alignment = MIN (lhs, rhs);
114 /* If there is a choice between two values, use the smallest one. */
115 lhs = known_alignment (TREE_OPERAND (exp, 1));
116 rhs = known_alignment (TREE_OPERAND (exp, 2));
117 this_alignment = MIN (lhs, rhs);
122 unsigned HOST_WIDE_INT c = TREE_INT_CST_LOW (exp);
123 /* The first part of this represents the lowest bit in the constant,
124 but it is originally in bytes, not bits. */
125 this_alignment = MIN (BITS_PER_UNIT * (c & -c), BIGGEST_ALIGNMENT);
130 /* If we know the alignment of just one side, use it. Otherwise,
131 use the product of the alignments. */
132 lhs = known_alignment (TREE_OPERAND (exp, 0));
133 rhs = known_alignment (TREE_OPERAND (exp, 1));
136 this_alignment = rhs;
138 this_alignment = lhs;
140 this_alignment = MIN (lhs * rhs, BIGGEST_ALIGNMENT);
144 /* A bit-and expression is as aligned as the maximum alignment of the
145 operands. We typically get here for a complex lhs and a constant
146 negative power of two on the rhs to force an explicit alignment, so
147 don't bother looking at the lhs. */
148 this_alignment = known_alignment (TREE_OPERAND (exp, 1));
152 this_alignment = expr_align (TREE_OPERAND (exp, 0));
157 tree t = maybe_inline_call_in_expr (exp);
159 return known_alignment (t);
162 /* Fall through... */
165 /* For other pointer expressions, we assume that the pointed-to object
166 is at least as aligned as the pointed-to type. Beware that we can
167 have a dummy type here (e.g. a Taft Amendment type), for which the
168 alignment is meaningless and should be ignored. */
169 if (POINTER_TYPE_P (TREE_TYPE (exp))
170 && !TYPE_IS_DUMMY_P (TREE_TYPE (TREE_TYPE (exp))))
171 this_alignment = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
177 return this_alignment;
180 /* We have a comparison or assignment operation on two types, T1 and T2, which
181 are either both array types or both record types. T1 is assumed to be for
182 the left hand side operand, and T2 for the right hand side. Return the
183 type that both operands should be converted to for the operation, if any.
184 Otherwise return zero. */
187 find_common_type (tree t1, tree t2)
189 /* ??? As of today, various constructs lead to here with types of different
190 sizes even when both constants (e.g. tagged types, packable vs regular
191 component types, padded vs unpadded types, ...). While some of these
192 would better be handled upstream (types should be made consistent before
193 calling into build_binary_op), some others are really expected and we
194 have to be careful. */
196 /* We must avoid writing more than what the target can hold if this is for
197 an assignment and the case of tagged types is handled in build_binary_op
198 so we use the lhs type if it is known to be smaller or of constant size
199 and the rhs type is not, whatever the modes. We also force t1 in case of
200 constant size equality to minimize occurrences of view conversions on the
201 lhs of an assignment, except for the case of record types with a variant
202 part on the lhs but not on the rhs to make the conversion simpler. */
203 if (TREE_CONSTANT (TYPE_SIZE (t1))
204 && (!TREE_CONSTANT (TYPE_SIZE (t2))
205 || tree_int_cst_lt (TYPE_SIZE (t1), TYPE_SIZE (t2))
206 || (TYPE_SIZE (t1) == TYPE_SIZE (t2)
207 && !(TREE_CODE (t1) == RECORD_TYPE
208 && TREE_CODE (t2) == RECORD_TYPE
209 && get_variant_part (t1) != NULL_TREE
210 && get_variant_part (t2) == NULL_TREE))))
213 /* Otherwise, if the lhs type is non-BLKmode, use it. Note that we know
214 that we will not have any alignment problems since, if we did, the
215 non-BLKmode type could not have been used. */
216 if (TYPE_MODE (t1) != BLKmode)
219 /* If the rhs type is of constant size, use it whatever the modes. At
220 this point it is known to be smaller, or of constant size and the
222 if (TREE_CONSTANT (TYPE_SIZE (t2)))
225 /* Otherwise, if the rhs type is non-BLKmode, use it. */
226 if (TYPE_MODE (t2) != BLKmode)
229 /* In this case, both types have variable size and BLKmode. It's
230 probably best to leave the "type mismatch" because changing it
231 could cause a bad self-referential reference. */
235 /* Return an expression tree representing an equality comparison of A1 and A2,
236 two objects of type ARRAY_TYPE. The result should be of type RESULT_TYPE.
238 Two arrays are equal in one of two ways: (1) if both have zero length in
239 some dimension (not necessarily the same dimension) or (2) if the lengths
240 in each dimension are equal and the data is equal. We perform the length
241 tests in as efficient a manner as possible. */
244 compare_arrays (location_t loc, tree result_type, tree a1, tree a2)
246 tree result = convert (result_type, boolean_true_node);
247 tree a1_is_null = convert (result_type, boolean_false_node);
248 tree a2_is_null = convert (result_type, boolean_false_node);
249 tree t1 = TREE_TYPE (a1);
250 tree t2 = TREE_TYPE (a2);
251 bool a1_side_effects_p = TREE_SIDE_EFFECTS (a1);
252 bool a2_side_effects_p = TREE_SIDE_EFFECTS (a2);
253 bool length_zero_p = false;
255 /* If either operand has side-effects, they have to be evaluated only once
256 in spite of the multiple references to the operand in the comparison. */
257 if (a1_side_effects_p)
258 a1 = gnat_protect_expr (a1);
260 if (a2_side_effects_p)
261 a2 = gnat_protect_expr (a2);
263 /* Process each dimension separately and compare the lengths. If any
264 dimension has a length known to be zero, set LENGTH_ZERO_P to true
265 in order to suppress the comparison of the data at the end. */
266 while (TREE_CODE (t1) == ARRAY_TYPE && TREE_CODE (t2) == ARRAY_TYPE)
268 tree lb1 = TYPE_MIN_VALUE (TYPE_DOMAIN (t1));
269 tree ub1 = TYPE_MAX_VALUE (TYPE_DOMAIN (t1));
270 tree lb2 = TYPE_MIN_VALUE (TYPE_DOMAIN (t2));
271 tree ub2 = TYPE_MAX_VALUE (TYPE_DOMAIN (t2));
272 tree length1 = size_binop (PLUS_EXPR, size_binop (MINUS_EXPR, ub1, lb1),
274 tree length2 = size_binop (PLUS_EXPR, size_binop (MINUS_EXPR, ub2, lb2),
276 tree comparison, this_a1_is_null, this_a2_is_null;
278 /* If the length of the first array is a constant, swap our operands
279 unless the length of the second array is the constant zero. */
280 if (TREE_CODE (length1) == INTEGER_CST && !integer_zerop (length2))
285 tem = a1, a1 = a2, a2 = tem;
286 tem = t1, t1 = t2, t2 = tem;
287 tem = lb1, lb1 = lb2, lb2 = tem;
288 tem = ub1, ub1 = ub2, ub2 = tem;
289 tem = length1, length1 = length2, length2 = tem;
290 tem = a1_is_null, a1_is_null = a2_is_null, a2_is_null = tem;
291 btem = a1_side_effects_p, a1_side_effects_p = a2_side_effects_p,
292 a2_side_effects_p = btem;
295 /* If the length of the second array is the constant zero, we can just
296 use the original stored bounds for the first array and see whether
297 last < first holds. */
298 if (integer_zerop (length2))
300 length_zero_p = true;
302 ub1 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
303 lb1 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
305 comparison = fold_build2_loc (loc, LT_EXPR, result_type, ub1, lb1);
306 comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
307 if (EXPR_P (comparison))
308 SET_EXPR_LOCATION (comparison, loc);
310 this_a1_is_null = comparison;
311 this_a2_is_null = convert (result_type, boolean_true_node);
314 /* Otherwise, if the length is some other constant value, we know that
315 this dimension in the second array cannot be superflat, so we can
316 just use its length computed from the actual stored bounds. */
317 else if (TREE_CODE (length2) == INTEGER_CST)
321 ub1 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
322 lb1 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
323 /* Note that we know that UB2 and LB2 are constant and hence
324 cannot contain a PLACEHOLDER_EXPR. */
325 ub2 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2)));
326 lb2 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2)));
327 bt = get_base_type (TREE_TYPE (ub1));
330 = fold_build2_loc (loc, EQ_EXPR, result_type,
331 build_binary_op (MINUS_EXPR, bt, ub1, lb1),
332 build_binary_op (MINUS_EXPR, bt, ub2, lb2));
333 comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
334 if (EXPR_P (comparison))
335 SET_EXPR_LOCATION (comparison, loc);
338 = fold_build2_loc (loc, LT_EXPR, result_type, ub1, lb1);
340 this_a2_is_null = convert (result_type, boolean_false_node);
343 /* Otherwise, compare the computed lengths. */
346 length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
347 length2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length2, a2);
350 = fold_build2_loc (loc, EQ_EXPR, result_type, length1, length2);
352 /* If the length expression is of the form (cond ? val : 0), assume
353 that cond is equivalent to (length != 0). That's guaranteed by
354 construction of the array types in gnat_to_gnu_entity. */
355 if (TREE_CODE (length1) == COND_EXPR
356 && integer_zerop (TREE_OPERAND (length1, 2)))
358 = invert_truthvalue_loc (loc, TREE_OPERAND (length1, 0));
360 this_a1_is_null = fold_build2_loc (loc, EQ_EXPR, result_type,
361 length1, size_zero_node);
363 /* Likewise for the second array. */
364 if (TREE_CODE (length2) == COND_EXPR
365 && integer_zerop (TREE_OPERAND (length2, 2)))
367 = invert_truthvalue_loc (loc, TREE_OPERAND (length2, 0));
369 this_a2_is_null = fold_build2_loc (loc, EQ_EXPR, result_type,
370 length2, size_zero_node);
373 /* Append expressions for this dimension to the final expressions. */
374 result = build_binary_op (TRUTH_ANDIF_EXPR, result_type,
377 a1_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
378 this_a1_is_null, a1_is_null);
380 a2_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
381 this_a2_is_null, a2_is_null);
387 /* Unless the length of some dimension is known to be zero, compare the
388 data in the array. */
391 tree type = find_common_type (TREE_TYPE (a1), TREE_TYPE (a2));
396 a1 = convert (type, a1),
397 a2 = convert (type, a2);
400 comparison = fold_build2_loc (loc, EQ_EXPR, result_type, a1, a2);
403 = build_binary_op (TRUTH_ANDIF_EXPR, result_type, result, comparison);
406 /* The result is also true if both sizes are zero. */
407 result = build_binary_op (TRUTH_ORIF_EXPR, result_type,
408 build_binary_op (TRUTH_ANDIF_EXPR, result_type,
409 a1_is_null, a2_is_null),
412 /* If either operand has side-effects, they have to be evaluated before
413 starting the comparison above since the place they would be otherwise
414 evaluated could be wrong. */
415 if (a1_side_effects_p)
416 result = build2 (COMPOUND_EXPR, result_type, a1, result);
418 if (a2_side_effects_p)
419 result = build2 (COMPOUND_EXPR, result_type, a2, result);
424 /* Return an expression tree representing an equality comparison of P1 and P2,
425 two objects of fat pointer type. The result should be of type RESULT_TYPE.
427 Two fat pointers are equal in one of two ways: (1) if both have a null
428 pointer to the array or (2) if they contain the same couple of pointers.
429 We perform the comparison in as efficient a manner as possible. */
432 compare_fat_pointers (location_t loc, tree result_type, tree p1, tree p2)
434 tree p1_array, p2_array, p1_bounds, p2_bounds, same_array, same_bounds;
435 tree p1_array_is_null, p2_array_is_null;
437 /* If either operand has side-effects, they have to be evaluated only once
438 in spite of the multiple references to the operand in the comparison. */
439 p1 = gnat_protect_expr (p1);
440 p2 = gnat_protect_expr (p2);
442 /* The constant folder doesn't fold fat pointer types so we do it here. */
443 if (TREE_CODE (p1) == CONSTRUCTOR)
444 p1_array = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (p1), 0)->value;
446 p1_array = build_component_ref (p1, NULL_TREE,
447 TYPE_FIELDS (TREE_TYPE (p1)), true);
450 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_array,
451 fold_convert_loc (loc, TREE_TYPE (p1_array),
454 if (TREE_CODE (p2) == CONSTRUCTOR)
455 p2_array = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (p2), 0)->value;
457 p2_array = build_component_ref (p2, NULL_TREE,
458 TYPE_FIELDS (TREE_TYPE (p2)), true);
461 = fold_build2_loc (loc, EQ_EXPR, result_type, p2_array,
462 fold_convert_loc (loc, TREE_TYPE (p2_array),
465 /* If one of the pointers to the array is null, just compare the other. */
466 if (integer_zerop (p1_array))
467 return p2_array_is_null;
468 else if (integer_zerop (p2_array))
469 return p1_array_is_null;
471 /* Otherwise, do the fully-fledged comparison. */
473 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_array, p2_array);
475 if (TREE_CODE (p1) == CONSTRUCTOR)
476 p1_bounds = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (p1), 1)->value;
479 = build_component_ref (p1, NULL_TREE,
480 DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (p1))), true);
482 if (TREE_CODE (p2) == CONSTRUCTOR)
483 p2_bounds = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (p2), 1)->value;
486 = build_component_ref (p2, NULL_TREE,
487 DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (p2))), true);
490 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_bounds, p2_bounds);
492 /* P1_ARRAY == P2_ARRAY && (P1_ARRAY == NULL || P1_BOUNDS == P2_BOUNDS). */
493 return build_binary_op (TRUTH_ANDIF_EXPR, result_type, same_array,
494 build_binary_op (TRUTH_ORIF_EXPR, result_type,
495 p1_array_is_null, same_bounds));
498 /* Compute the result of applying OP_CODE to LHS and RHS, where both are of
499 type TYPE. We know that TYPE is a modular type with a nonbinary
503 nonbinary_modular_operation (enum tree_code op_code, tree type, tree lhs,
506 tree modulus = TYPE_MODULUS (type);
507 unsigned int needed_precision = tree_floor_log2 (modulus) + 1;
508 unsigned int precision;
509 bool unsignedp = true;
513 /* If this is an addition of a constant, convert it to a subtraction
514 of a constant since we can do that faster. */
515 if (op_code == PLUS_EXPR && TREE_CODE (rhs) == INTEGER_CST)
517 rhs = fold_build2 (MINUS_EXPR, type, modulus, rhs);
518 op_code = MINUS_EXPR;
521 /* For the logical operations, we only need PRECISION bits. For
522 addition and subtraction, we need one more and for multiplication we
523 need twice as many. But we never want to make a size smaller than
525 if (op_code == PLUS_EXPR || op_code == MINUS_EXPR)
526 needed_precision += 1;
527 else if (op_code == MULT_EXPR)
528 needed_precision *= 2;
530 precision = MAX (needed_precision, TYPE_PRECISION (op_type));
532 /* Unsigned will do for everything but subtraction. */
533 if (op_code == MINUS_EXPR)
536 /* If our type is the wrong signedness or isn't wide enough, make a new
537 type and convert both our operands to it. */
538 if (TYPE_PRECISION (op_type) < precision
539 || TYPE_UNSIGNED (op_type) != unsignedp)
541 /* Copy the node so we ensure it can be modified to make it modular. */
542 op_type = copy_node (gnat_type_for_size (precision, unsignedp));
543 modulus = convert (op_type, modulus);
544 SET_TYPE_MODULUS (op_type, modulus);
545 TYPE_MODULAR_P (op_type) = 1;
546 lhs = convert (op_type, lhs);
547 rhs = convert (op_type, rhs);
550 /* Do the operation, then we'll fix it up. */
551 result = fold_build2 (op_code, op_type, lhs, rhs);
553 /* For multiplication, we have no choice but to do a full modulus
554 operation. However, we want to do this in the narrowest
556 if (op_code == MULT_EXPR)
558 tree div_type = copy_node (gnat_type_for_size (needed_precision, 1));
559 modulus = convert (div_type, modulus);
560 SET_TYPE_MODULUS (div_type, modulus);
561 TYPE_MODULAR_P (div_type) = 1;
562 result = convert (op_type,
563 fold_build2 (TRUNC_MOD_EXPR, div_type,
564 convert (div_type, result), modulus));
567 /* For subtraction, add the modulus back if we are negative. */
568 else if (op_code == MINUS_EXPR)
570 result = gnat_protect_expr (result);
571 result = fold_build3 (COND_EXPR, op_type,
572 fold_build2 (LT_EXPR, boolean_type_node, result,
573 convert (op_type, integer_zero_node)),
574 fold_build2 (PLUS_EXPR, op_type, result, modulus),
578 /* For the other operations, subtract the modulus if we are >= it. */
581 result = gnat_protect_expr (result);
582 result = fold_build3 (COND_EXPR, op_type,
583 fold_build2 (GE_EXPR, boolean_type_node,
585 fold_build2 (MINUS_EXPR, op_type,
590 return convert (type, result);
593 /* This page contains routines that implement the Ada semantics with regard
594 to atomic objects. They are fully piggybacked on the middle-end support
595 for atomic loads and stores.
597 *** Memory barriers and volatile objects ***
599 We implement the weakened form of the C.6(16) clause that was introduced
600 in Ada 2012 (AI05-117). Earlier forms of this clause wouldn't have been
601 implementable without significant performance hits on modern platforms.
603 We also take advantage of the requirements imposed on shared variables by
604 9.10 (conditions for sequential actions) to have non-erroneous execution
605 and consider that C.6(16) and C.6(17) only prescribe an uniform order of
606 volatile updates with regard to sequential actions, i.e. with regard to
607 reads or updates of atomic objects.
609 As such, an update of an atomic object by a task requires that all earlier
610 accesses to volatile objects have completed. Similarly, later accesses to
611 volatile objects cannot be reordered before the update of the atomic object.
612 So, memory barriers both before and after the atomic update are needed.
614 For a read of an atomic object, to avoid seeing writes of volatile objects
615 by a task earlier than by the other tasks, a memory barrier is needed before
616 the atomic read. Finally, to avoid reordering later reads or updates of
617 volatile objects to before the atomic read, a barrier is needed after the
620 So, memory barriers are needed before and after atomic reads and updates.
621 And, in order to simplify the implementation, we use full memory barriers
622 in all cases, i.e. we enforce sequential consistency for atomic accesses. */
624 /* Return the size of TYPE, which must be a positive power of 2. */
627 resolve_atomic_size (tree type)
629 unsigned HOST_WIDE_INT size = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
631 if (size == 1 || size == 2 || size == 4 || size == 8 || size == 16)
634 /* We shouldn't reach here without having already detected that the size
635 isn't compatible with an atomic access. */
636 gcc_assert (Serious_Errors_Detected);
641 /* Build an atomic load for the underlying atomic object in SRC. */
644 build_atomic_load (tree src)
648 (build_qualified_type (void_type_node, TYPE_QUAL_VOLATILE));
649 tree mem_model = build_int_cst (integer_type_node, MEMMODEL_SEQ_CST);
651 tree type = TREE_TYPE (src);
656 src = remove_conversions (src, false);
657 size = resolve_atomic_size (TREE_TYPE (src));
661 fncode = (int) BUILT_IN_ATOMIC_LOAD_N + exact_log2 (size) + 1;
662 t = builtin_decl_implicit ((enum built_in_function) fncode);
664 src = build_unary_op (ADDR_EXPR, ptr_type, src);
665 val = build_call_expr (t, 2, src, mem_model);
667 return unchecked_convert (type, val, true);
670 /* Build an atomic store from SRC to the underlying atomic object in DEST. */
673 build_atomic_store (tree dest, tree src)
677 (build_qualified_type (void_type_node, TYPE_QUAL_VOLATILE));
678 tree mem_model = build_int_cst (integer_type_node, MEMMODEL_SEQ_CST);
679 tree orig_dest = dest;
684 dest = remove_conversions (dest, false);
685 size = resolve_atomic_size (TREE_TYPE (dest));
687 return build_binary_op (MODIFY_EXPR, NULL_TREE, orig_dest, src);
689 fncode = (int) BUILT_IN_ATOMIC_STORE_N + exact_log2 (size) + 1;
690 t = builtin_decl_implicit ((enum built_in_function) fncode);
691 int_type = gnat_type_for_size (BITS_PER_UNIT * size, 1);
693 dest = build_unary_op (ADDR_EXPR, ptr_type, dest);
694 src = unchecked_convert (int_type, src, true);
696 return build_call_expr (t, 3, dest, src, mem_model);
699 /* Make a binary operation of kind OP_CODE. RESULT_TYPE is the type
700 desired for the result. Usually the operation is to be performed
701 in that type. For INIT_EXPR and MODIFY_EXPR, RESULT_TYPE must be
702 NULL_TREE. For ARRAY_REF, RESULT_TYPE may be NULL_TREE, in which
703 case the type to be used will be derived from the operands.
705 This function is very much unlike the ones for C and C++ since we
706 have already done any type conversion and matching required. All we
707 have to do here is validate the work done by SEM and handle subtypes. */
710 build_binary_op (enum tree_code op_code, tree result_type,
711 tree left_operand, tree right_operand)
713 tree left_type = TREE_TYPE (left_operand);
714 tree right_type = TREE_TYPE (right_operand);
715 tree left_base_type = get_base_type (left_type);
716 tree right_base_type = get_base_type (right_type);
717 tree operation_type = result_type;
718 tree best_type = NULL_TREE;
719 tree modulus, result;
720 bool has_side_effects = false;
723 && TREE_CODE (operation_type) == RECORD_TYPE
724 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
725 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
728 && TREE_CODE (operation_type) == INTEGER_TYPE
729 && TYPE_EXTRA_SUBTYPE_P (operation_type))
730 operation_type = get_base_type (operation_type);
732 modulus = (operation_type
733 && TREE_CODE (operation_type) == INTEGER_TYPE
734 && TYPE_MODULAR_P (operation_type)
735 ? TYPE_MODULUS (operation_type) : NULL_TREE);
741 #ifdef ENABLE_CHECKING
742 gcc_assert (result_type == NULL_TREE);
744 /* If there were integral or pointer conversions on the LHS, remove
745 them; we'll be putting them back below if needed. Likewise for
746 conversions between array and record types, except for justified
747 modular types. But don't do this if the right operand is not
748 BLKmode (for packed arrays) unless we are not changing the mode. */
749 while ((CONVERT_EXPR_P (left_operand)
750 || TREE_CODE (left_operand) == VIEW_CONVERT_EXPR)
751 && (((INTEGRAL_TYPE_P (left_type)
752 || POINTER_TYPE_P (left_type))
753 && (INTEGRAL_TYPE_P (TREE_TYPE
754 (TREE_OPERAND (left_operand, 0)))
755 || POINTER_TYPE_P (TREE_TYPE
756 (TREE_OPERAND (left_operand, 0)))))
757 || (((TREE_CODE (left_type) == RECORD_TYPE
758 && !TYPE_JUSTIFIED_MODULAR_P (left_type))
759 || TREE_CODE (left_type) == ARRAY_TYPE)
760 && ((TREE_CODE (TREE_TYPE
761 (TREE_OPERAND (left_operand, 0)))
763 || (TREE_CODE (TREE_TYPE
764 (TREE_OPERAND (left_operand, 0)))
766 && (TYPE_MODE (right_type) == BLKmode
767 || (TYPE_MODE (left_type)
768 == TYPE_MODE (TREE_TYPE
770 (left_operand, 0))))))))
772 left_operand = TREE_OPERAND (left_operand, 0);
773 left_type = TREE_TYPE (left_operand);
776 /* If a class-wide type may be involved, force use of the RHS type. */
777 if ((TREE_CODE (right_type) == RECORD_TYPE
778 || TREE_CODE (right_type) == UNION_TYPE)
779 && TYPE_ALIGN_OK (right_type))
780 operation_type = right_type;
782 /* If we are copying between padded objects with compatible types, use
783 the padded view of the objects, this is very likely more efficient.
784 Likewise for a padded object that is assigned a constructor, if we
785 can convert the constructor to the inner type, to avoid putting a
786 VIEW_CONVERT_EXPR on the LHS. But don't do so if we wouldn't have
787 actually copied anything. */
788 else if (TYPE_IS_PADDING_P (left_type)
789 && TREE_CONSTANT (TYPE_SIZE (left_type))
790 && ((TREE_CODE (right_operand) == COMPONENT_REF
791 && TYPE_MAIN_VARIANT (left_type)
793 (TREE_TYPE (TREE_OPERAND (right_operand, 0))))
794 || (TREE_CODE (right_operand) == CONSTRUCTOR
795 && !CONTAINS_PLACEHOLDER_P
796 (DECL_SIZE (TYPE_FIELDS (left_type)))))
797 && !integer_zerop (TYPE_SIZE (right_type)))
799 /* We make an exception for a BLKmode type padding a non-BLKmode
800 inner type and do the conversion of the LHS right away, since
801 unchecked_convert wouldn't do it properly. */
802 if (TYPE_MODE (left_type) == BLKmode
803 && TYPE_MODE (right_type) != BLKmode
804 && TREE_CODE (right_operand) != CONSTRUCTOR)
806 operation_type = right_type;
807 left_operand = convert (operation_type, left_operand);
808 left_type = operation_type;
811 operation_type = left_type;
814 /* If we have a call to a function that returns an unconstrained type
815 with default discriminant on the RHS, use the RHS type (which is
816 padded) as we cannot compute the size of the actual assignment. */
817 else if (TREE_CODE (right_operand) == CALL_EXPR
818 && TYPE_IS_PADDING_P (right_type)
819 && CONTAINS_PLACEHOLDER_P
820 (TYPE_SIZE (TREE_TYPE (TYPE_FIELDS (right_type)))))
821 operation_type = right_type;
823 /* Find the best type to use for copying between aggregate types. */
824 else if (((TREE_CODE (left_type) == ARRAY_TYPE
825 && TREE_CODE (right_type) == ARRAY_TYPE)
826 || (TREE_CODE (left_type) == RECORD_TYPE
827 && TREE_CODE (right_type) == RECORD_TYPE))
828 && (best_type = find_common_type (left_type, right_type)))
829 operation_type = best_type;
831 /* Otherwise use the LHS type. */
833 operation_type = left_type;
835 /* Ensure everything on the LHS is valid. If we have a field reference,
836 strip anything that get_inner_reference can handle. Then remove any
837 conversions between types having the same code and mode. And mark
838 VIEW_CONVERT_EXPRs with TREE_ADDRESSABLE. When done, we must have
839 either an INDIRECT_REF, a NULL_EXPR or a DECL node. */
840 result = left_operand;
843 tree restype = TREE_TYPE (result);
845 if (TREE_CODE (result) == COMPONENT_REF
846 || TREE_CODE (result) == ARRAY_REF
847 || TREE_CODE (result) == ARRAY_RANGE_REF)
848 while (handled_component_p (result))
849 result = TREE_OPERAND (result, 0);
850 else if (TREE_CODE (result) == REALPART_EXPR
851 || TREE_CODE (result) == IMAGPART_EXPR
852 || (CONVERT_EXPR_P (result)
853 && (((TREE_CODE (restype)
854 == TREE_CODE (TREE_TYPE
855 (TREE_OPERAND (result, 0))))
856 && (TYPE_MODE (TREE_TYPE
857 (TREE_OPERAND (result, 0)))
858 == TYPE_MODE (restype)))
859 || TYPE_ALIGN_OK (restype))))
860 result = TREE_OPERAND (result, 0);
861 else if (TREE_CODE (result) == VIEW_CONVERT_EXPR)
863 TREE_ADDRESSABLE (result) = 1;
864 result = TREE_OPERAND (result, 0);
870 gcc_assert (TREE_CODE (result) == INDIRECT_REF
871 || TREE_CODE (result) == NULL_EXPR
874 /* Convert the right operand to the operation type unless it is
875 either already of the correct type or if the type involves a
876 placeholder, since the RHS may not have the same record type. */
877 if (operation_type != right_type
878 && !CONTAINS_PLACEHOLDER_P (TYPE_SIZE (operation_type)))
880 right_operand = convert (operation_type, right_operand);
881 right_type = operation_type;
884 /* If the left operand is not of the same type as the operation
885 type, wrap it up in a VIEW_CONVERT_EXPR. */
886 if (left_type != operation_type)
887 left_operand = unchecked_convert (operation_type, left_operand, false);
889 has_side_effects = true;
895 operation_type = TREE_TYPE (left_type);
897 /* ... fall through ... */
899 case ARRAY_RANGE_REF:
900 /* First look through conversion between type variants. Note that
901 this changes neither the operation type nor the type domain. */
902 if (TREE_CODE (left_operand) == VIEW_CONVERT_EXPR
903 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (left_operand, 0)))
904 == TYPE_MAIN_VARIANT (left_type))
906 left_operand = TREE_OPERAND (left_operand, 0);
907 left_type = TREE_TYPE (left_operand);
910 /* For a range, make sure the element type is consistent. */
911 if (op_code == ARRAY_RANGE_REF
912 && TREE_TYPE (operation_type) != TREE_TYPE (left_type))
913 operation_type = build_array_type (TREE_TYPE (left_type),
914 TYPE_DOMAIN (operation_type));
916 /* Then convert the right operand to its base type. This will prevent
917 unneeded sign conversions when sizetype is wider than integer. */
918 right_operand = convert (right_base_type, right_operand);
919 right_operand = convert_to_index_type (right_operand);
923 case TRUTH_ANDIF_EXPR:
924 case TRUTH_ORIF_EXPR:
928 #ifdef ENABLE_CHECKING
929 gcc_assert (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
931 operation_type = left_base_type;
932 left_operand = convert (operation_type, left_operand);
933 right_operand = convert (operation_type, right_operand);
942 #ifdef ENABLE_CHECKING
943 gcc_assert (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
945 /* If either operand is a NULL_EXPR, just return a new one. */
946 if (TREE_CODE (left_operand) == NULL_EXPR)
947 return build2 (op_code, result_type,
948 build1 (NULL_EXPR, integer_type_node,
949 TREE_OPERAND (left_operand, 0)),
952 else if (TREE_CODE (right_operand) == NULL_EXPR)
953 return build2 (op_code, result_type,
954 build1 (NULL_EXPR, integer_type_node,
955 TREE_OPERAND (right_operand, 0)),
958 /* If either object is a justified modular types, get the
959 fields from within. */
960 if (TREE_CODE (left_type) == RECORD_TYPE
961 && TYPE_JUSTIFIED_MODULAR_P (left_type))
963 left_operand = convert (TREE_TYPE (TYPE_FIELDS (left_type)),
965 left_type = TREE_TYPE (left_operand);
966 left_base_type = get_base_type (left_type);
969 if (TREE_CODE (right_type) == RECORD_TYPE
970 && TYPE_JUSTIFIED_MODULAR_P (right_type))
972 right_operand = convert (TREE_TYPE (TYPE_FIELDS (right_type)),
974 right_type = TREE_TYPE (right_operand);
975 right_base_type = get_base_type (right_type);
978 /* If both objects are arrays, compare them specially. */
979 if ((TREE_CODE (left_type) == ARRAY_TYPE
980 || (TREE_CODE (left_type) == INTEGER_TYPE
981 && TYPE_HAS_ACTUAL_BOUNDS_P (left_type)))
982 && (TREE_CODE (right_type) == ARRAY_TYPE
983 || (TREE_CODE (right_type) == INTEGER_TYPE
984 && TYPE_HAS_ACTUAL_BOUNDS_P (right_type))))
986 result = compare_arrays (input_location,
987 result_type, left_operand, right_operand);
988 if (op_code == NE_EXPR)
989 result = invert_truthvalue_loc (EXPR_LOCATION (result), result);
991 gcc_assert (op_code == EQ_EXPR);
996 /* Otherwise, the base types must be the same, unless they are both fat
997 pointer types or record types. In the latter case, use the best type
998 and convert both operands to that type. */
999 if (left_base_type != right_base_type)
1001 if (TYPE_IS_FAT_POINTER_P (left_base_type)
1002 && TYPE_IS_FAT_POINTER_P (right_base_type))
1004 gcc_assert (TYPE_MAIN_VARIANT (left_base_type)
1005 == TYPE_MAIN_VARIANT (right_base_type));
1006 best_type = left_base_type;
1009 else if (TREE_CODE (left_base_type) == RECORD_TYPE
1010 && TREE_CODE (right_base_type) == RECORD_TYPE)
1012 /* The only way this is permitted is if both types have the same
1013 name. In that case, one of them must not be self-referential.
1014 Use it as the best type. Even better with a fixed size. */
1015 gcc_assert (TYPE_NAME (left_base_type)
1016 && TYPE_NAME (left_base_type)
1017 == TYPE_NAME (right_base_type));
1019 if (TREE_CONSTANT (TYPE_SIZE (left_base_type)))
1020 best_type = left_base_type;
1021 else if (TREE_CONSTANT (TYPE_SIZE (right_base_type)))
1022 best_type = right_base_type;
1023 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (left_base_type)))
1024 best_type = left_base_type;
1025 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (right_base_type)))
1026 best_type = right_base_type;
1034 left_operand = convert (best_type, left_operand);
1035 right_operand = convert (best_type, right_operand);
1039 left_operand = convert (left_base_type, left_operand);
1040 right_operand = convert (right_base_type, right_operand);
1043 /* If both objects are fat pointers, compare them specially. */
1044 if (TYPE_IS_FAT_POINTER_P (left_base_type))
1047 = compare_fat_pointers (input_location,
1048 result_type, left_operand, right_operand);
1049 if (op_code == NE_EXPR)
1050 result = invert_truthvalue_loc (EXPR_LOCATION (result), result);
1052 gcc_assert (op_code == EQ_EXPR);
1057 modulus = NULL_TREE;
1064 /* The RHS of a shift can be any type. Also, ignore any modulus
1065 (we used to abort, but this is needed for unchecked conversion
1066 to modular types). Otherwise, processing is the same as normal. */
1067 gcc_assert (operation_type == left_base_type);
1068 modulus = NULL_TREE;
1069 left_operand = convert (operation_type, left_operand);
1075 /* For binary modulus, if the inputs are in range, so are the
1077 if (modulus && integer_pow2p (modulus))
1078 modulus = NULL_TREE;
1082 gcc_assert (TREE_TYPE (result_type) == left_base_type
1083 && TREE_TYPE (result_type) == right_base_type);
1084 left_operand = convert (left_base_type, left_operand);
1085 right_operand = convert (right_base_type, right_operand);
1088 case TRUNC_DIV_EXPR: case TRUNC_MOD_EXPR:
1089 case CEIL_DIV_EXPR: case CEIL_MOD_EXPR:
1090 case FLOOR_DIV_EXPR: case FLOOR_MOD_EXPR:
1091 case ROUND_DIV_EXPR: case ROUND_MOD_EXPR:
1092 /* These always produce results lower than either operand. */
1093 modulus = NULL_TREE;
1096 case POINTER_PLUS_EXPR:
1097 gcc_assert (operation_type == left_base_type
1098 && sizetype == right_base_type);
1099 left_operand = convert (operation_type, left_operand);
1100 right_operand = convert (sizetype, right_operand);
1103 case PLUS_NOMOD_EXPR:
1104 case MINUS_NOMOD_EXPR:
1105 if (op_code == PLUS_NOMOD_EXPR)
1106 op_code = PLUS_EXPR;
1108 op_code = MINUS_EXPR;
1109 modulus = NULL_TREE;
1111 /* ... fall through ... */
1115 /* Avoid doing arithmetics in ENUMERAL_TYPE or BOOLEAN_TYPE like the
1116 other compilers. Contrary to C, Ada doesn't allow arithmetics in
1117 these types but can generate addition/subtraction for Succ/Pred. */
1119 && (TREE_CODE (operation_type) == ENUMERAL_TYPE
1120 || TREE_CODE (operation_type) == BOOLEAN_TYPE))
1121 operation_type = left_base_type = right_base_type
1122 = gnat_type_for_mode (TYPE_MODE (operation_type),
1123 TYPE_UNSIGNED (operation_type));
1125 /* ... fall through ... */
1129 /* The result type should be the same as the base types of the
1130 both operands (and they should be the same). Convert
1131 everything to the result type. */
1133 gcc_assert (operation_type == left_base_type
1134 && left_base_type == right_base_type);
1135 left_operand = convert (operation_type, left_operand);
1136 right_operand = convert (operation_type, right_operand);
1139 if (modulus && !integer_pow2p (modulus))
1141 result = nonbinary_modular_operation (op_code, operation_type,
1142 left_operand, right_operand);
1143 modulus = NULL_TREE;
1145 /* If either operand is a NULL_EXPR, just return a new one. */
1146 else if (TREE_CODE (left_operand) == NULL_EXPR)
1147 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (left_operand, 0));
1148 else if (TREE_CODE (right_operand) == NULL_EXPR)
1149 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (right_operand, 0));
1150 else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1151 result = fold (build4 (op_code, operation_type, left_operand,
1152 right_operand, NULL_TREE, NULL_TREE));
1153 else if (op_code == INIT_EXPR || op_code == MODIFY_EXPR)
1154 result = build2 (op_code, void_type_node, left_operand, right_operand);
1157 = fold_build2 (op_code, operation_type, left_operand, right_operand);
1159 if (TREE_CONSTANT (result))
1161 else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1163 TREE_THIS_NOTRAP (result) = 1;
1164 if (TYPE_VOLATILE (operation_type))
1165 TREE_THIS_VOLATILE (result) = 1;
1168 TREE_CONSTANT (result)
1169 |= (TREE_CONSTANT (left_operand) && TREE_CONSTANT (right_operand));
1171 TREE_SIDE_EFFECTS (result) |= has_side_effects;
1173 /* If we are working with modular types, perform the MOD operation
1174 if something above hasn't eliminated the need for it. */
1176 result = fold_build2 (FLOOR_MOD_EXPR, operation_type, result,
1177 convert (operation_type, modulus));
1179 if (result_type && result_type != operation_type)
1180 result = convert (result_type, result);
1185 /* Similar, but for unary operations. */
1188 build_unary_op (enum tree_code op_code, tree result_type, tree operand)
1190 tree type = TREE_TYPE (operand);
1191 tree base_type = get_base_type (type);
1192 tree operation_type = result_type;
1196 && TREE_CODE (operation_type) == RECORD_TYPE
1197 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
1198 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
1201 && TREE_CODE (operation_type) == INTEGER_TYPE
1202 && TYPE_EXTRA_SUBTYPE_P (operation_type))
1203 operation_type = get_base_type (operation_type);
1209 if (!operation_type)
1210 result_type = operation_type = TREE_TYPE (type);
1212 gcc_assert (result_type == TREE_TYPE (type));
1214 result = fold_build1 (op_code, operation_type, operand);
1217 case TRUTH_NOT_EXPR:
1218 #ifdef ENABLE_CHECKING
1219 gcc_assert (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
1221 result = invert_truthvalue_loc (EXPR_LOCATION (operand), operand);
1222 /* When not optimizing, fold the result as invert_truthvalue_loc
1223 doesn't fold the result of comparisons. This is intended to undo
1224 the trick used for boolean rvalues in gnat_to_gnu. */
1226 result = fold (result);
1229 case ATTR_ADDR_EXPR:
1231 switch (TREE_CODE (operand))
1234 case UNCONSTRAINED_ARRAY_REF:
1235 result = TREE_OPERAND (operand, 0);
1237 /* Make sure the type here is a pointer, not a reference.
1238 GCC wants pointer types for function addresses. */
1240 result_type = build_pointer_type (type);
1242 /* If the underlying object can alias everything, propagate the
1243 property since we are effectively retrieving the object. */
1244 if (POINTER_TYPE_P (TREE_TYPE (result))
1245 && TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (result)))
1247 if (TREE_CODE (result_type) == POINTER_TYPE
1248 && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1250 = build_pointer_type_for_mode (TREE_TYPE (result_type),
1251 TYPE_MODE (result_type),
1253 else if (TREE_CODE (result_type) == REFERENCE_TYPE
1254 && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1256 = build_reference_type_for_mode (TREE_TYPE (result_type),
1257 TYPE_MODE (result_type),
1264 TREE_TYPE (result) = type = build_pointer_type (type);
1268 /* Fold a compound expression if it has unconstrained array type
1269 since the middle-end cannot handle it. But we don't it in the
1270 general case because it may introduce aliasing issues if the
1271 first operand is an indirect assignment and the second operand
1272 the corresponding address, e.g. for an allocator. */
1273 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
1275 result = build_unary_op (ADDR_EXPR, result_type,
1276 TREE_OPERAND (operand, 1));
1277 result = build2 (COMPOUND_EXPR, TREE_TYPE (result),
1278 TREE_OPERAND (operand, 0), result);
1284 case ARRAY_RANGE_REF:
1287 /* If this is for 'Address, find the address of the prefix and add
1288 the offset to the field. Otherwise, do this the normal way. */
1289 if (op_code == ATTR_ADDR_EXPR)
1291 HOST_WIDE_INT bitsize;
1292 HOST_WIDE_INT bitpos;
1294 enum machine_mode mode;
1295 int unsignedp, volatilep;
1297 inner = get_inner_reference (operand, &bitsize, &bitpos, &offset,
1298 &mode, &unsignedp, &volatilep,
1301 /* If INNER is a padding type whose field has a self-referential
1302 size, convert to that inner type. We know the offset is zero
1303 and we need to have that type visible. */
1304 if (TYPE_IS_PADDING_P (TREE_TYPE (inner))
1305 && CONTAINS_PLACEHOLDER_P
1306 (TYPE_SIZE (TREE_TYPE (TYPE_FIELDS
1307 (TREE_TYPE (inner))))))
1308 inner = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (inner))),
1311 /* Compute the offset as a byte offset from INNER. */
1313 offset = size_zero_node;
1315 offset = size_binop (PLUS_EXPR, offset,
1316 size_int (bitpos / BITS_PER_UNIT));
1318 /* Take the address of INNER, convert the offset to void *, and
1319 add then. It will later be converted to the desired result
1321 inner = build_unary_op (ADDR_EXPR, NULL_TREE, inner);
1322 inner = convert (ptr_void_type_node, inner);
1323 result = build_binary_op (POINTER_PLUS_EXPR, ptr_void_type_node,
1325 result = convert (build_pointer_type (TREE_TYPE (operand)),
1332 /* If this is just a constructor for a padded record, we can
1333 just take the address of the single field and convert it to
1334 a pointer to our type. */
1335 if (TYPE_IS_PADDING_P (type))
1337 result = VEC_index (constructor_elt,
1338 CONSTRUCTOR_ELTS (operand),
1340 result = convert (build_pointer_type (TREE_TYPE (operand)),
1341 build_unary_op (ADDR_EXPR, NULL_TREE, result));
1348 if (AGGREGATE_TYPE_P (type)
1349 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (operand, 0))))
1350 return build_unary_op (ADDR_EXPR, result_type,
1351 TREE_OPERAND (operand, 0));
1353 /* ... fallthru ... */
1355 case VIEW_CONVERT_EXPR:
1356 /* If this just a variant conversion or if the conversion doesn't
1357 change the mode, get the result type from this type and go down.
1358 This is needed for conversions of CONST_DECLs, to eventually get
1359 to the address of their CORRESPONDING_VARs. */
1360 if ((TYPE_MAIN_VARIANT (type)
1361 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (operand, 0))))
1362 || (TYPE_MODE (type) != BLKmode
1363 && (TYPE_MODE (type)
1364 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (operand, 0))))))
1365 return build_unary_op (ADDR_EXPR,
1366 (result_type ? result_type
1367 : build_pointer_type (type)),
1368 TREE_OPERAND (operand, 0));
1372 operand = DECL_CONST_CORRESPONDING_VAR (operand);
1374 /* ... fall through ... */
1379 /* If we are taking the address of a padded record whose field
1380 contains a template, take the address of the field. */
1381 if (TYPE_IS_PADDING_P (type)
1382 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
1383 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
1385 type = TREE_TYPE (TYPE_FIELDS (type));
1386 operand = convert (type, operand);
1389 gnat_mark_addressable (operand);
1390 result = build_fold_addr_expr (operand);
1393 TREE_CONSTANT (result) = staticp (operand) || TREE_CONSTANT (operand);
1398 tree t = remove_conversions (operand, false);
1399 bool can_never_be_null = DECL_P (t) && DECL_CAN_NEVER_BE_NULL_P (t);
1401 /* If TYPE is a thin pointer, either first retrieve the base if this
1402 is an expression with an offset built for the initialization of an
1403 object with an unconstrained nominal subtype, or else convert to
1405 if (TYPE_IS_THIN_POINTER_P (type))
1407 tree rec_type = TREE_TYPE (type);
1409 if (TREE_CODE (operand) == POINTER_PLUS_EXPR
1410 && TREE_OPERAND (operand, 1)
1411 == byte_position (DECL_CHAIN (TYPE_FIELDS (rec_type)))
1412 && TREE_CODE (TREE_OPERAND (operand, 0)) == NOP_EXPR)
1414 operand = TREE_OPERAND (TREE_OPERAND (operand, 0), 0);
1415 type = TREE_TYPE (operand);
1417 else if (TYPE_UNCONSTRAINED_ARRAY (rec_type))
1420 = convert (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (rec_type)),
1422 type = TREE_TYPE (operand);
1426 /* If we want to refer to an unconstrained array, use the appropriate
1427 expression. But this will never survive down to the back-end. */
1428 if (TYPE_IS_FAT_POINTER_P (type))
1430 result = build1 (UNCONSTRAINED_ARRAY_REF,
1431 TYPE_UNCONSTRAINED_ARRAY (type), operand);
1432 TREE_READONLY (result)
1433 = TYPE_READONLY (TYPE_UNCONSTRAINED_ARRAY (type));
1436 /* If we are dereferencing an ADDR_EXPR, return its operand. */
1437 else if (TREE_CODE (operand) == ADDR_EXPR)
1438 result = TREE_OPERAND (operand, 0);
1440 /* Otherwise, build and fold the indirect reference. */
1443 result = build_fold_indirect_ref (operand);
1444 TREE_READONLY (result) = TYPE_READONLY (TREE_TYPE (type));
1447 if (!TYPE_IS_FAT_POINTER_P (type) && TYPE_VOLATILE (TREE_TYPE (type)))
1449 TREE_SIDE_EFFECTS (result) = 1;
1450 if (TREE_CODE (result) == INDIRECT_REF)
1451 TREE_THIS_VOLATILE (result) = TYPE_VOLATILE (TREE_TYPE (result));
1454 if ((TREE_CODE (result) == INDIRECT_REF
1455 || TREE_CODE (result) == UNCONSTRAINED_ARRAY_REF)
1456 && can_never_be_null)
1457 TREE_THIS_NOTRAP (result) = 1;
1465 tree modulus = ((operation_type
1466 && TREE_CODE (operation_type) == INTEGER_TYPE
1467 && TYPE_MODULAR_P (operation_type))
1468 ? TYPE_MODULUS (operation_type) : NULL_TREE);
1469 int mod_pow2 = modulus && integer_pow2p (modulus);
1471 /* If this is a modular type, there are various possibilities
1472 depending on the operation and whether the modulus is a
1473 power of two or not. */
1477 gcc_assert (operation_type == base_type);
1478 operand = convert (operation_type, operand);
1480 /* The fastest in the negate case for binary modulus is
1481 the straightforward code; the TRUNC_MOD_EXPR below
1482 is an AND operation. */
1483 if (op_code == NEGATE_EXPR && mod_pow2)
1484 result = fold_build2 (TRUNC_MOD_EXPR, operation_type,
1485 fold_build1 (NEGATE_EXPR, operation_type,
1489 /* For nonbinary negate case, return zero for zero operand,
1490 else return the modulus minus the operand. If the modulus
1491 is a power of two minus one, we can do the subtraction
1492 as an XOR since it is equivalent and faster on most machines. */
1493 else if (op_code == NEGATE_EXPR && !mod_pow2)
1495 if (integer_pow2p (fold_build2 (PLUS_EXPR, operation_type,
1497 convert (operation_type,
1498 integer_one_node))))
1499 result = fold_build2 (BIT_XOR_EXPR, operation_type,
1502 result = fold_build2 (MINUS_EXPR, operation_type,
1505 result = fold_build3 (COND_EXPR, operation_type,
1506 fold_build2 (NE_EXPR,
1511 integer_zero_node)),
1516 /* For the NOT cases, we need a constant equal to
1517 the modulus minus one. For a binary modulus, we
1518 XOR against the constant and subtract the operand from
1519 that constant for nonbinary modulus. */
1521 tree cnst = fold_build2 (MINUS_EXPR, operation_type, modulus,
1522 convert (operation_type,
1526 result = fold_build2 (BIT_XOR_EXPR, operation_type,
1529 result = fold_build2 (MINUS_EXPR, operation_type,
1537 /* ... fall through ... */
1540 gcc_assert (operation_type == base_type);
1541 result = fold_build1 (op_code, operation_type,
1542 convert (operation_type, operand));
1545 if (result_type && TREE_TYPE (result) != result_type)
1546 result = convert (result_type, result);
1551 /* Similar, but for COND_EXPR. */
1554 build_cond_expr (tree result_type, tree condition_operand,
1555 tree true_operand, tree false_operand)
1557 bool addr_p = false;
1560 /* The front-end verified that result, true and false operands have
1561 same base type. Convert everything to the result type. */
1562 true_operand = convert (result_type, true_operand);
1563 false_operand = convert (result_type, false_operand);
1565 /* If the result type is unconstrained, take the address of the operands and
1566 then dereference the result. Likewise if the result type is passed by
1567 reference, because creating a temporary of this type is not allowed. */
1568 if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1569 || TYPE_IS_BY_REFERENCE_P (result_type)
1570 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1572 result_type = build_pointer_type (result_type);
1573 true_operand = build_unary_op (ADDR_EXPR, result_type, true_operand);
1574 false_operand = build_unary_op (ADDR_EXPR, result_type, false_operand);
1578 result = fold_build3 (COND_EXPR, result_type, condition_operand,
1579 true_operand, false_operand);
1581 /* If we have a common SAVE_EXPR (possibly surrounded by arithmetics)
1582 in both arms, make sure it gets evaluated by moving it ahead of the
1583 conditional expression. This is necessary because it is evaluated
1584 in only one place at run time and would otherwise be uninitialized
1585 in one of the arms. */
1586 true_operand = skip_simple_arithmetic (true_operand);
1587 false_operand = skip_simple_arithmetic (false_operand);
1589 if (true_operand == false_operand && TREE_CODE (true_operand) == SAVE_EXPR)
1590 result = build2 (COMPOUND_EXPR, result_type, true_operand, result);
1593 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1598 /* Similar, but for COMPOUND_EXPR. */
1601 build_compound_expr (tree result_type, tree stmt_operand, tree expr_operand)
1603 bool addr_p = false;
1606 /* If the result type is unconstrained, take the address of the operand and
1607 then dereference the result. Likewise if the result type is passed by
1608 reference, but this is natively handled in the gimplifier. */
1609 if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1610 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1612 result_type = build_pointer_type (result_type);
1613 expr_operand = build_unary_op (ADDR_EXPR, result_type, expr_operand);
1617 result = fold_build2 (COMPOUND_EXPR, result_type, stmt_operand,
1621 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1626 /* Conveniently construct a function call expression. FNDECL names the
1627 function to be called, N is the number of arguments, and the "..."
1628 parameters are the argument expressions. Unlike build_call_expr
1629 this doesn't fold the call, hence it will always return a CALL_EXPR. */
1632 build_call_n_expr (tree fndecl, int n, ...)
1635 tree fntype = TREE_TYPE (fndecl);
1636 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
1639 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
1644 /* Call a function that raises an exception and pass the line number and file
1645 name, if requested. MSG says which exception function to call.
1647 GNAT_NODE is the gnat node conveying the source location for which the
1648 error should be signaled, or Empty in which case the error is signaled on
1649 the current ref_file_name/input_line.
1651 KIND says which kind of exception this is for
1652 (N_Raise_{Constraint,Storage,Program}_Error). */
1655 build_call_raise (int msg, Node_Id gnat_node, char kind)
1657 tree fndecl = gnat_raise_decls[msg];
1658 tree label = get_exception_label (kind);
1664 /* If this is to be done as a goto, handle that case. */
1667 Entity_Id local_raise = Get_Local_Raise_Call_Entity ();
1668 tree gnu_result = build1 (GOTO_EXPR, void_type_node, label);
1670 /* If Local_Raise is present, generate
1671 Local_Raise (exception'Identity); */
1672 if (Present (local_raise))
1674 tree gnu_local_raise
1675 = gnat_to_gnu_entity (local_raise, NULL_TREE, 0);
1676 tree gnu_exception_entity
1677 = gnat_to_gnu_entity (Get_RT_Exception_Entity (msg), NULL_TREE, 0);
1679 = build_call_n_expr (gnu_local_raise, 1,
1680 build_unary_op (ADDR_EXPR, NULL_TREE,
1681 gnu_exception_entity));
1683 gnu_result = build2 (COMPOUND_EXPR, void_type_node,
1684 gnu_call, gnu_result);}
1690 = (Debug_Flag_NN || Exception_Locations_Suppressed)
1692 : (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1693 ? IDENTIFIER_POINTER
1694 (get_identifier (Get_Name_String
1696 (Get_Source_File_Index (Sloc (gnat_node))))))
1700 filename = build_string (len, str);
1702 = (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1703 ? Get_Logical_Line_Number (Sloc(gnat_node)) : input_line;
1705 TREE_TYPE (filename) = build_array_type (unsigned_char_type_node,
1706 build_index_type (size_int (len)));
1709 build_call_n_expr (fndecl, 2,
1711 build_pointer_type (unsigned_char_type_node),
1713 build_int_cst (NULL_TREE, line_number));
1716 /* Similar to build_call_raise, for an index or range check exception as
1717 determined by MSG, with extra information generated of the form
1718 "INDEX out of range FIRST..LAST". */
1721 build_call_raise_range (int msg, Node_Id gnat_node,
1722 tree index, tree first, tree last)
1724 tree fndecl = gnat_raise_decls_ext[msg];
1726 int line_number, column_number;
1731 = (Debug_Flag_NN || Exception_Locations_Suppressed)
1733 : (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1734 ? IDENTIFIER_POINTER
1735 (get_identifier (Get_Name_String
1737 (Get_Source_File_Index (Sloc (gnat_node))))))
1741 filename = build_string (len, str);
1742 if (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1744 line_number = Get_Logical_Line_Number (Sloc (gnat_node));
1745 column_number = Get_Column_Number (Sloc (gnat_node));
1749 line_number = input_line;
1753 TREE_TYPE (filename) = build_array_type (unsigned_char_type_node,
1754 build_index_type (size_int (len)));
1757 build_call_n_expr (fndecl, 6,
1759 build_pointer_type (unsigned_char_type_node),
1761 build_int_cst (NULL_TREE, line_number),
1762 build_int_cst (NULL_TREE, column_number),
1763 convert (integer_type_node, index),
1764 convert (integer_type_node, first),
1765 convert (integer_type_node, last));
1768 /* Similar to build_call_raise, with extra information about the column
1769 where the check failed. */
1772 build_call_raise_column (int msg, Node_Id gnat_node)
1774 tree fndecl = gnat_raise_decls_ext[msg];
1776 int line_number, column_number;
1781 = (Debug_Flag_NN || Exception_Locations_Suppressed)
1783 : (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1784 ? IDENTIFIER_POINTER
1785 (get_identifier (Get_Name_String
1787 (Get_Source_File_Index (Sloc (gnat_node))))))
1791 filename = build_string (len, str);
1792 if (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1794 line_number = Get_Logical_Line_Number (Sloc (gnat_node));
1795 column_number = Get_Column_Number (Sloc (gnat_node));
1799 line_number = input_line;
1803 TREE_TYPE (filename) = build_array_type (unsigned_char_type_node,
1804 build_index_type (size_int (len)));
1807 build_call_n_expr (fndecl, 3,
1809 build_pointer_type (unsigned_char_type_node),
1811 build_int_cst (NULL_TREE, line_number),
1812 build_int_cst (NULL_TREE, column_number));
1815 /* qsort comparer for the bit positions of two constructor elements
1816 for record components. */
1819 compare_elmt_bitpos (const PTR rt1, const PTR rt2)
1821 const constructor_elt * const elmt1 = (const constructor_elt * const) rt1;
1822 const constructor_elt * const elmt2 = (const constructor_elt * const) rt2;
1823 const_tree const field1 = elmt1->index;
1824 const_tree const field2 = elmt2->index;
1826 = tree_int_cst_compare (bit_position (field1), bit_position (field2));
1828 return ret ? ret : (int) (DECL_UID (field1) - DECL_UID (field2));
1831 /* Return a CONSTRUCTOR of TYPE whose elements are V. */
1834 gnat_build_constructor (tree type, VEC(constructor_elt,gc) *v)
1836 bool allconstant = (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST);
1837 bool side_effects = false;
1838 tree result, obj, val;
1839 unsigned int n_elmts;
1841 /* Scan the elements to see if they are all constant or if any has side
1842 effects, to let us set global flags on the resulting constructor. Count
1843 the elements along the way for possible sorting purposes below. */
1844 FOR_EACH_CONSTRUCTOR_ELT (v, n_elmts, obj, val)
1846 /* The predicate must be in keeping with output_constructor. */
1847 if ((!TREE_CONSTANT (val) && !TREE_STATIC (val))
1848 || (TREE_CODE (type) == RECORD_TYPE
1849 && CONSTRUCTOR_BITFIELD_P (obj)
1850 && !initializer_constant_valid_for_bitfield_p (val))
1851 || !initializer_constant_valid_p (val, TREE_TYPE (val)))
1852 allconstant = false;
1854 if (TREE_SIDE_EFFECTS (val))
1855 side_effects = true;
1858 /* For record types with constant components only, sort field list
1859 by increasing bit position. This is necessary to ensure the
1860 constructor can be output as static data. */
1861 if (allconstant && TREE_CODE (type) == RECORD_TYPE && n_elmts > 1)
1862 VEC_qsort (constructor_elt, v, compare_elmt_bitpos);
1864 result = build_constructor (type, v);
1865 TREE_CONSTANT (result) = TREE_STATIC (result) = allconstant;
1866 TREE_SIDE_EFFECTS (result) = side_effects;
1867 TREE_READONLY (result) = TYPE_READONLY (type) || allconstant;
1871 /* Return a COMPONENT_REF to access a field that is given by COMPONENT,
1872 an IDENTIFIER_NODE giving the name of the field, or FIELD, a FIELD_DECL,
1873 for the field. Don't fold the result if NO_FOLD_P is true.
1875 We also handle the fact that we might have been passed a pointer to the
1876 actual record and know how to look for fields in variant parts. */
1879 build_simple_component_ref (tree record_variable, tree component,
1880 tree field, bool no_fold_p)
1882 tree record_type = TYPE_MAIN_VARIANT (TREE_TYPE (record_variable));
1883 tree ref, inner_variable;
1885 gcc_assert (RECORD_OR_UNION_TYPE_P (record_type)
1886 && COMPLETE_TYPE_P (record_type)
1887 && (component == NULL_TREE) != (field == NULL_TREE));
1889 /* If no field was specified, look for a field with the specified name in
1890 the current record only. */
1892 for (field = TYPE_FIELDS (record_type);
1894 field = DECL_CHAIN (field))
1895 if (DECL_NAME (field) == component)
1901 /* If this field is not in the specified record, see if we can find a field
1902 in the specified record whose original field is the same as this one. */
1903 if (DECL_CONTEXT (field) != record_type)
1907 /* First loop thru normal components. */
1908 for (new_field = TYPE_FIELDS (record_type);
1910 new_field = DECL_CHAIN (new_field))
1911 if (SAME_FIELD_P (field, new_field))
1914 /* Next, see if we're looking for an inherited component in an extension.
1915 If so, look thru the extension directly. */
1917 && TREE_CODE (record_variable) == VIEW_CONVERT_EXPR
1918 && TYPE_ALIGN_OK (record_type)
1919 && TREE_CODE (TREE_TYPE (TREE_OPERAND (record_variable, 0)))
1921 && TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (record_variable, 0))))
1923 ref = build_simple_component_ref (TREE_OPERAND (record_variable, 0),
1924 NULL_TREE, field, no_fold_p);
1929 /* Next, loop thru DECL_INTERNAL_P components if we haven't found the
1930 component in the first search. Doing this search in two steps is
1931 required to avoid hidden homonymous fields in the _Parent field. */
1933 for (new_field = TYPE_FIELDS (record_type);
1935 new_field = DECL_CHAIN (new_field))
1936 if (DECL_INTERNAL_P (new_field))
1939 = build_simple_component_ref (record_variable,
1940 NULL_TREE, new_field, no_fold_p);
1941 ref = build_simple_component_ref (field_ref, NULL_TREE, field,
1953 /* If the field's offset has overflowed, do not try to access it, as doing
1954 so may trigger sanity checks deeper in the back-end. Note that we don't
1955 need to warn since this will be done on trying to declare the object. */
1956 if (TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST
1957 && TREE_OVERFLOW (DECL_FIELD_OFFSET (field)))
1960 /* Look through conversion between type variants. This is transparent as
1961 far as the field is concerned. */
1962 if (TREE_CODE (record_variable) == VIEW_CONVERT_EXPR
1963 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (record_variable, 0)))
1965 inner_variable = TREE_OPERAND (record_variable, 0);
1967 inner_variable = record_variable;
1969 ref = build3 (COMPONENT_REF, TREE_TYPE (field), inner_variable, field,
1972 if (TREE_READONLY (record_variable)
1973 || TREE_READONLY (field)
1974 || TYPE_READONLY (record_type))
1975 TREE_READONLY (ref) = 1;
1977 if (TREE_THIS_VOLATILE (record_variable)
1978 || TREE_THIS_VOLATILE (field)
1979 || TYPE_VOLATILE (record_type))
1980 TREE_THIS_VOLATILE (ref) = 1;
1985 /* The generic folder may punt in this case because the inner array type
1986 can be self-referential, but folding is in fact not problematic. */
1987 if (TREE_CODE (record_variable) == CONSTRUCTOR
1988 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (record_variable)))
1990 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (record_variable);
1991 unsigned HOST_WIDE_INT idx;
1993 FOR_EACH_CONSTRUCTOR_ELT (elts, idx, index, value)
2002 /* Like build_simple_component_ref, except that we give an error if the
2003 reference could not be found. */
2006 build_component_ref (tree record_variable, tree component,
2007 tree field, bool no_fold_p)
2009 tree ref = build_simple_component_ref (record_variable, component, field,
2015 /* If FIELD was specified, assume this is an invalid user field so raise
2016 Constraint_Error. Otherwise, we have no type to return so abort. */
2018 return build1 (NULL_EXPR, TREE_TYPE (field),
2019 build_call_raise (CE_Discriminant_Check_Failed, Empty,
2020 N_Raise_Constraint_Error));
2023 /* Helper for build_call_alloc_dealloc, with arguments to be interpreted
2024 identically. Process the case where a GNAT_PROC to call is provided. */
2027 build_call_alloc_dealloc_proc (tree gnu_obj, tree gnu_size, tree gnu_type,
2028 Entity_Id gnat_proc, Entity_Id gnat_pool)
2030 tree gnu_proc = gnat_to_gnu (gnat_proc);
2033 /* A storage pool's underlying type is a record type (for both predefined
2034 storage pools and GNAT simple storage pools). The secondary stack uses
2035 the same mechanism, but its pool object (SS_Pool) is an integer. */
2036 if (Is_Record_Type (Underlying_Type (Etype (gnat_pool))))
2038 /* The size is the third parameter; the alignment is the
2040 Entity_Id gnat_size_type
2041 = Etype (Next_Formal (Next_Formal (First_Formal (gnat_proc))));
2042 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
2044 tree gnu_pool = gnat_to_gnu (gnat_pool);
2045 tree gnu_pool_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_pool);
2046 tree gnu_align = size_int (TYPE_ALIGN (gnu_type) / BITS_PER_UNIT);
2048 gnu_size = convert (gnu_size_type, gnu_size);
2049 gnu_align = convert (gnu_size_type, gnu_align);
2051 /* The first arg is always the address of the storage pool; next
2052 comes the address of the object, for a deallocator, then the
2053 size and alignment. */
2055 gnu_call = build_call_n_expr (gnu_proc, 4, gnu_pool_addr, gnu_obj,
2056 gnu_size, gnu_align);
2058 gnu_call = build_call_n_expr (gnu_proc, 3, gnu_pool_addr,
2059 gnu_size, gnu_align);
2062 /* Secondary stack case. */
2065 /* The size is the second parameter. */
2066 Entity_Id gnat_size_type
2067 = Etype (Next_Formal (First_Formal (gnat_proc)));
2068 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
2070 gnu_size = convert (gnu_size_type, gnu_size);
2072 /* The first arg is the address of the object, for a deallocator,
2075 gnu_call = build_call_n_expr (gnu_proc, 2, gnu_obj, gnu_size);
2077 gnu_call = build_call_n_expr (gnu_proc, 1, gnu_size);
2083 /* Helper for build_call_alloc_dealloc, to build and return an allocator for
2084 DATA_SIZE bytes aimed at containing a DATA_TYPE object, using the default
2085 __gnat_malloc allocator. Honor DATA_TYPE alignments greater than what the
2089 maybe_wrap_malloc (tree data_size, tree data_type, Node_Id gnat_node)
2091 /* When the DATA_TYPE alignment is stricter than what malloc offers
2092 (super-aligned case), we allocate an "aligning" wrapper type and return
2093 the address of its single data field with the malloc's return value
2094 stored just in front. */
2096 unsigned int data_align = TYPE_ALIGN (data_type);
2097 unsigned int system_allocator_alignment
2098 = get_target_system_allocator_alignment () * BITS_PER_UNIT;
2101 = ((data_align > system_allocator_alignment)
2102 ? make_aligning_type (data_type, data_align, data_size,
2103 system_allocator_alignment,
2104 POINTER_SIZE / BITS_PER_UNIT)
2108 = aligning_type ? TYPE_SIZE_UNIT (aligning_type) : data_size;
2112 /* On VMS, if pointers are 64-bit and the allocator size is 32-bit or
2113 Convention C, allocate 32-bit memory. */
2114 if (TARGET_ABI_OPEN_VMS
2115 && POINTER_SIZE == 64
2116 && Nkind (gnat_node) == N_Allocator
2117 && (UI_To_Int (Esize (Etype (gnat_node))) == 32
2118 || Convention (Etype (gnat_node)) == Convention_C))
2119 malloc_ptr = build_call_n_expr (malloc32_decl, 1, size_to_malloc);
2121 malloc_ptr = build_call_n_expr (malloc_decl, 1, size_to_malloc);
2125 /* Latch malloc's return value and get a pointer to the aligning field
2127 tree storage_ptr = gnat_protect_expr (malloc_ptr);
2129 tree aligning_record_addr
2130 = convert (build_pointer_type (aligning_type), storage_ptr);
2132 tree aligning_record
2133 = build_unary_op (INDIRECT_REF, NULL_TREE, aligning_record_addr);
2136 = build_component_ref (aligning_record, NULL_TREE,
2137 TYPE_FIELDS (aligning_type), false);
2139 tree aligning_field_addr
2140 = build_unary_op (ADDR_EXPR, NULL_TREE, aligning_field);
2142 /* Then arrange to store the allocator's return value ahead
2144 tree storage_ptr_slot_addr
2145 = build_binary_op (POINTER_PLUS_EXPR, ptr_void_type_node,
2146 convert (ptr_void_type_node, aligning_field_addr),
2147 size_int (-(HOST_WIDE_INT) POINTER_SIZE
2150 tree storage_ptr_slot
2151 = build_unary_op (INDIRECT_REF, NULL_TREE,
2152 convert (build_pointer_type (ptr_void_type_node),
2153 storage_ptr_slot_addr));
2156 build2 (COMPOUND_EXPR, TREE_TYPE (aligning_field_addr),
2157 build_binary_op (INIT_EXPR, NULL_TREE,
2158 storage_ptr_slot, storage_ptr),
2159 aligning_field_addr);
2165 /* Helper for build_call_alloc_dealloc, to release a DATA_TYPE object
2166 designated by DATA_PTR using the __gnat_free entry point. */
2169 maybe_wrap_free (tree data_ptr, tree data_type)
2171 /* In the regular alignment case, we pass the data pointer straight to free.
2172 In the superaligned case, we need to retrieve the initial allocator
2173 return value, stored in front of the data block at allocation time. */
2175 unsigned int data_align = TYPE_ALIGN (data_type);
2176 unsigned int system_allocator_alignment
2177 = get_target_system_allocator_alignment () * BITS_PER_UNIT;
2181 if (data_align > system_allocator_alignment)
2183 /* DATA_FRONT_PTR (void *)
2184 = (void *)DATA_PTR - (void *)sizeof (void *)) */
2187 (POINTER_PLUS_EXPR, ptr_void_type_node,
2188 convert (ptr_void_type_node, data_ptr),
2189 size_int (-(HOST_WIDE_INT) POINTER_SIZE / BITS_PER_UNIT));
2191 /* FREE_PTR (void *) = *(void **)DATA_FRONT_PTR */
2194 (INDIRECT_REF, NULL_TREE,
2195 convert (build_pointer_type (ptr_void_type_node), data_front_ptr));
2198 free_ptr = data_ptr;
2200 return build_call_n_expr (free_decl, 1, free_ptr);
2203 /* Build a GCC tree to call an allocation or deallocation function.
2204 If GNU_OBJ is nonzero, it is an object to deallocate. Otherwise,
2205 generate an allocator.
2207 GNU_SIZE is the number of bytes to allocate and GNU_TYPE is the contained
2208 object type, used to determine the to-be-honored address alignment.
2209 GNAT_PROC, if present, is a procedure to call and GNAT_POOL is the storage
2210 pool to use. If not present, malloc and free are used. GNAT_NODE is used
2211 to provide an error location for restriction violation messages. */
2214 build_call_alloc_dealloc (tree gnu_obj, tree gnu_size, tree gnu_type,
2215 Entity_Id gnat_proc, Entity_Id gnat_pool,
2218 gnu_size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (gnu_size, gnu_obj);
2220 /* Explicit proc to call ? This one is assumed to deal with the type
2221 alignment constraints. */
2222 if (Present (gnat_proc))
2223 return build_call_alloc_dealloc_proc (gnu_obj, gnu_size, gnu_type,
2224 gnat_proc, gnat_pool);
2226 /* Otherwise, object to "free" or "malloc" with possible special processing
2227 for alignments stricter than what the default allocator honors. */
2229 return maybe_wrap_free (gnu_obj, gnu_type);
2232 /* Assert that we no longer can be called with this special pool. */
2233 gcc_assert (gnat_pool != -1);
2235 /* Check that we aren't violating the associated restriction. */
2236 if (!(Nkind (gnat_node) == N_Allocator && Comes_From_Source (gnat_node)))
2237 Check_No_Implicit_Heap_Alloc (gnat_node);
2239 return maybe_wrap_malloc (gnu_size, gnu_type, gnat_node);
2243 /* Build a GCC tree that corresponds to allocating an object of TYPE whose
2244 initial value is INIT, if INIT is nonzero. Convert the expression to
2245 RESULT_TYPE, which must be some pointer type, and return the result.
2247 GNAT_PROC and GNAT_POOL optionally give the procedure to call and
2248 the storage pool to use. GNAT_NODE is used to provide an error
2249 location for restriction violation messages. If IGNORE_INIT_TYPE is
2250 true, ignore the type of INIT for the purpose of determining the size;
2251 this will cause the maximum size to be allocated if TYPE is of
2252 self-referential size. */
2255 build_allocator (tree type, tree init, tree result_type, Entity_Id gnat_proc,
2256 Entity_Id gnat_pool, Node_Id gnat_node, bool ignore_init_type)
2258 tree size, storage, storage_deref, storage_init;
2260 /* If the initializer, if present, is a NULL_EXPR, just return a new one. */
2261 if (init && TREE_CODE (init) == NULL_EXPR)
2262 return build1 (NULL_EXPR, result_type, TREE_OPERAND (init, 0));
2264 /* If the initializer, if present, is a COND_EXPR, deal with each branch. */
2265 else if (init && TREE_CODE (init) == COND_EXPR)
2266 return build3 (COND_EXPR, result_type, TREE_OPERAND (init, 0),
2267 build_allocator (type, TREE_OPERAND (init, 1), result_type,
2268 gnat_proc, gnat_pool, gnat_node,
2270 build_allocator (type, TREE_OPERAND (init, 2), result_type,
2271 gnat_proc, gnat_pool, gnat_node,
2274 /* If RESULT_TYPE is a fat or thin pointer, set SIZE to be the sum of the
2275 sizes of the object and its template. Allocate the whole thing and
2276 fill in the parts that are known. */
2277 else if (TYPE_IS_FAT_OR_THIN_POINTER_P (result_type))
2280 = build_unc_object_type_from_ptr (result_type, type,
2281 get_identifier ("ALLOC"), false);
2282 tree template_type = TREE_TYPE (TYPE_FIELDS (storage_type));
2283 tree storage_ptr_type = build_pointer_type (storage_type);
2285 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (storage_type),
2288 /* If the size overflows, pass -1 so Storage_Error will be raised. */
2289 if (TREE_CODE (size) == INTEGER_CST && TREE_OVERFLOW (size))
2290 size = size_int (-1);
2292 storage = build_call_alloc_dealloc (NULL_TREE, size, storage_type,
2293 gnat_proc, gnat_pool, gnat_node);
2294 storage = convert (storage_ptr_type, gnat_protect_expr (storage));
2295 storage_deref = build_unary_op (INDIRECT_REF, NULL_TREE, storage);
2296 TREE_THIS_NOTRAP (storage_deref) = 1;
2298 /* If there is an initializing expression, then make a constructor for
2299 the entire object including the bounds and copy it into the object.
2300 If there is no initializing expression, just set the bounds. */
2303 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, 2);
2305 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (storage_type),
2306 build_template (template_type, type, init));
2307 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (storage_type)),
2310 = build_binary_op (INIT_EXPR, NULL_TREE, storage_deref,
2311 gnat_build_constructor (storage_type, v));
2315 = build_binary_op (INIT_EXPR, NULL_TREE,
2316 build_component_ref (storage_deref, NULL_TREE,
2317 TYPE_FIELDS (storage_type),
2319 build_template (template_type, type, NULL_TREE));
2321 return build2 (COMPOUND_EXPR, result_type,
2322 storage_init, convert (result_type, storage));
2325 size = TYPE_SIZE_UNIT (type);
2327 /* If we have an initializing expression, see if its size is simpler
2328 than the size from the type. */
2329 if (!ignore_init_type && init && TYPE_SIZE_UNIT (TREE_TYPE (init))
2330 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (init))) == INTEGER_CST
2331 || CONTAINS_PLACEHOLDER_P (size)))
2332 size = TYPE_SIZE_UNIT (TREE_TYPE (init));
2334 /* If the size is still self-referential, reference the initializing
2335 expression, if it is present. If not, this must have been a
2336 call to allocate a library-level object, in which case we use
2337 the maximum size. */
2338 if (CONTAINS_PLACEHOLDER_P (size))
2340 if (!ignore_init_type && init)
2341 size = substitute_placeholder_in_expr (size, init);
2343 size = max_size (size, true);
2346 /* If the size overflows, pass -1 so Storage_Error will be raised. */
2347 if (TREE_CODE (size) == INTEGER_CST && TREE_OVERFLOW (size))
2348 size = size_int (-1);
2350 storage = convert (result_type,
2351 build_call_alloc_dealloc (NULL_TREE, size, type,
2352 gnat_proc, gnat_pool,
2355 /* If we have an initial value, protect the new address, assign the value
2356 and return the address with a COMPOUND_EXPR. */
2359 storage = gnat_protect_expr (storage);
2360 storage_deref = build_unary_op (INDIRECT_REF, NULL_TREE, storage);
2361 TREE_THIS_NOTRAP (storage_deref) = 1;
2363 = build_binary_op (INIT_EXPR, NULL_TREE, storage_deref, init);
2364 return build2 (COMPOUND_EXPR, result_type, storage_init, storage);
2370 /* Indicate that we need to take the address of T and that it therefore
2371 should not be allocated in a register. Returns true if successful. */
2374 gnat_mark_addressable (tree t)
2377 switch (TREE_CODE (t))
2382 case ARRAY_RANGE_REF:
2385 case VIEW_CONVERT_EXPR:
2386 case NON_LVALUE_EXPR:
2388 t = TREE_OPERAND (t, 0);
2392 t = TREE_OPERAND (t, 1);
2396 TREE_ADDRESSABLE (t) = 1;
2402 TREE_ADDRESSABLE (t) = 1;
2406 TREE_ADDRESSABLE (t) = 1;
2410 return DECL_CONST_CORRESPONDING_VAR (t)
2411 && gnat_mark_addressable (DECL_CONST_CORRESPONDING_VAR (t));
2418 /* Save EXP for later use or reuse. This is equivalent to save_expr in tree.c
2419 but we know how to handle our own nodes. */
2422 gnat_save_expr (tree exp)
2424 tree type = TREE_TYPE (exp);
2425 enum tree_code code = TREE_CODE (exp);
2427 if (TREE_CONSTANT (exp) || code == SAVE_EXPR || code == NULL_EXPR)
2430 if (code == UNCONSTRAINED_ARRAY_REF)
2432 tree t = build1 (code, type, gnat_save_expr (TREE_OPERAND (exp, 0)));
2433 TREE_READONLY (t) = TYPE_READONLY (type);
2437 /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
2438 This may be more efficient, but will also allow us to more easily find
2439 the match for the PLACEHOLDER_EXPR. */
2440 if (code == COMPONENT_REF
2441 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2442 return build3 (code, type, gnat_save_expr (TREE_OPERAND (exp, 0)),
2443 TREE_OPERAND (exp, 1), TREE_OPERAND (exp, 2));
2445 return save_expr (exp);
2448 /* Protect EXP for immediate reuse. This is a variant of gnat_save_expr that
2449 is optimized under the assumption that EXP's value doesn't change before
2450 its subsequent reuse(s) except through its potential reevaluation. */
2453 gnat_protect_expr (tree exp)
2455 tree type = TREE_TYPE (exp);
2456 enum tree_code code = TREE_CODE (exp);
2458 if (TREE_CONSTANT (exp) || code == SAVE_EXPR || code == NULL_EXPR)
2461 /* If EXP has no side effects, we theoretically don't need to do anything.
2462 However, we may be recursively passed more and more complex expressions
2463 involving checks which will be reused multiple times and eventually be
2464 unshared for gimplification; in order to avoid a complexity explosion
2465 at that point, we protect any expressions more complex than a simple
2466 arithmetic expression. */
2467 if (!TREE_SIDE_EFFECTS (exp))
2469 tree inner = skip_simple_arithmetic (exp);
2470 if (!EXPR_P (inner) || REFERENCE_CLASS_P (inner))
2474 /* If this is a conversion, protect what's inside the conversion. */
2475 if (code == NON_LVALUE_EXPR
2476 || CONVERT_EXPR_CODE_P (code)
2477 || code == VIEW_CONVERT_EXPR)
2478 return build1 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)));
2480 /* If we're indirectly referencing something, we only need to protect the
2481 address since the data itself can't change in these situations. */
2482 if (code == INDIRECT_REF || code == UNCONSTRAINED_ARRAY_REF)
2484 tree t = build1 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)));
2485 TREE_READONLY (t) = TYPE_READONLY (type);
2489 /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
2490 This may be more efficient, but will also allow us to more easily find
2491 the match for the PLACEHOLDER_EXPR. */
2492 if (code == COMPONENT_REF
2493 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2494 return build3 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)),
2495 TREE_OPERAND (exp, 1), TREE_OPERAND (exp, 2));
2497 /* If this is a fat pointer or something that can be placed in a register,
2498 just make a SAVE_EXPR. Likewise for a CALL_EXPR as large objects are
2499 returned via invisible reference in most ABIs so the temporary will
2500 directly be filled by the callee. */
2501 if (TYPE_IS_FAT_POINTER_P (type)
2502 || TYPE_MODE (type) != BLKmode
2503 || code == CALL_EXPR)
2504 return save_expr (exp);
2506 /* Otherwise reference, protect the address and dereference. */
2508 build_unary_op (INDIRECT_REF, type,
2509 save_expr (build_unary_op (ADDR_EXPR,
2510 build_reference_type (type),
2514 /* This is equivalent to stabilize_reference_1 in tree.c but we take an extra
2515 argument to force evaluation of everything. */
2518 gnat_stabilize_reference_1 (tree e, bool force)
2520 enum tree_code code = TREE_CODE (e);
2521 tree type = TREE_TYPE (e);
2524 /* We cannot ignore const expressions because it might be a reference
2525 to a const array but whose index contains side-effects. But we can
2526 ignore things that are actual constant or that already have been
2527 handled by this function. */
2528 if (TREE_CONSTANT (e) || code == SAVE_EXPR)
2531 switch (TREE_CODE_CLASS (code))
2533 case tcc_exceptional:
2534 case tcc_declaration:
2535 case tcc_comparison:
2536 case tcc_expression:
2539 /* If this is a COMPONENT_REF of a fat pointer, save the entire
2540 fat pointer. This may be more efficient, but will also allow
2541 us to more easily find the match for the PLACEHOLDER_EXPR. */
2542 if (code == COMPONENT_REF
2543 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (e, 0))))
2545 = build3 (code, type,
2546 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), force),
2547 TREE_OPERAND (e, 1), TREE_OPERAND (e, 2));
2548 /* If the expression has side-effects, then encase it in a SAVE_EXPR
2549 so that it will only be evaluated once. */
2550 /* The tcc_reference and tcc_comparison classes could be handled as
2551 below, but it is generally faster to only evaluate them once. */
2552 else if (TREE_SIDE_EFFECTS (e) || force)
2553 return save_expr (e);
2559 /* Recursively stabilize each operand. */
2561 = build2 (code, type,
2562 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), force),
2563 gnat_stabilize_reference_1 (TREE_OPERAND (e, 1), force));
2567 /* Recursively stabilize each operand. */
2569 = build1 (code, type,
2570 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), force));
2577 /* See similar handling in gnat_stabilize_reference. */
2578 TREE_READONLY (result) = TREE_READONLY (e);
2579 TREE_SIDE_EFFECTS (result) |= TREE_SIDE_EFFECTS (e);
2580 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
2582 if (code == INDIRECT_REF
2583 || code == UNCONSTRAINED_ARRAY_REF
2584 || code == ARRAY_REF
2585 || code == ARRAY_RANGE_REF)
2586 TREE_THIS_NOTRAP (result) = TREE_THIS_NOTRAP (e);
2591 /* This is equivalent to stabilize_reference in tree.c but we know how to
2592 handle our own nodes and we take extra arguments. FORCE says whether to
2593 force evaluation of everything. We set SUCCESS to true unless we walk
2594 through something we don't know how to stabilize. */
2597 gnat_stabilize_reference (tree ref, bool force, bool *success)
2599 tree type = TREE_TYPE (ref);
2600 enum tree_code code = TREE_CODE (ref);
2603 /* Assume we'll success unless proven otherwise. */
2613 /* No action is needed in this case. */
2619 case FIX_TRUNC_EXPR:
2620 case VIEW_CONVERT_EXPR:
2622 = build1 (code, type,
2623 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2628 case UNCONSTRAINED_ARRAY_REF:
2629 result = build1 (code, type,
2630 gnat_stabilize_reference_1 (TREE_OPERAND (ref, 0),
2635 result = build3 (COMPONENT_REF, type,
2636 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2638 TREE_OPERAND (ref, 1), NULL_TREE);
2642 result = build3 (BIT_FIELD_REF, type,
2643 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2645 gnat_stabilize_reference_1 (TREE_OPERAND (ref, 1),
2647 gnat_stabilize_reference_1 (TREE_OPERAND (ref, 2),
2652 case ARRAY_RANGE_REF:
2653 result = build4 (code, type,
2654 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2656 gnat_stabilize_reference_1 (TREE_OPERAND (ref, 1),
2658 NULL_TREE, NULL_TREE);
2662 result = gnat_stabilize_reference_1 (ref, force);
2666 result = build2 (COMPOUND_EXPR, type,
2667 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2669 gnat_stabilize_reference (TREE_OPERAND (ref, 1), force,
2674 /* Constructors with 1 element are used extensively to formally
2675 convert objects to special wrapping types. */
2676 if (TREE_CODE (type) == RECORD_TYPE
2677 && VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ref)) == 1)
2680 = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (ref), 0)->index;
2682 = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (ref), 0)->value;
2684 = build_constructor_single (type, index,
2685 gnat_stabilize_reference_1 (value,
2697 ref = error_mark_node;
2699 /* ... fall through to failure ... */
2701 /* If arg isn't a kind of lvalue we recognize, make no change.
2702 Caller should recognize the error for an invalid lvalue. */
2709 /* TREE_THIS_VOLATILE and TREE_SIDE_EFFECTS set on the initial expression
2710 may not be sustained across some paths, such as the way via build1 for
2711 INDIRECT_REF. We reset those flags here in the general case, which is
2712 consistent with the GCC version of this routine.
2714 Special care should be taken regarding TREE_SIDE_EFFECTS, because some
2715 paths introduce side-effects where there was none initially (e.g. if a
2716 SAVE_EXPR is built) and we also want to keep track of that. */
2717 TREE_READONLY (result) = TREE_READONLY (ref);
2718 TREE_SIDE_EFFECTS (result) |= TREE_SIDE_EFFECTS (ref);
2719 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
2721 if (code == INDIRECT_REF
2722 || code == UNCONSTRAINED_ARRAY_REF
2723 || code == ARRAY_REF
2724 || code == ARRAY_RANGE_REF)
2725 TREE_THIS_NOTRAP (result) = TREE_THIS_NOTRAP (ref);
2730 /* If EXPR is an expression that is invariant in the current function, in the
2731 sense that it can be evaluated anywhere in the function and any number of
2732 times, return EXPR or an equivalent expression. Otherwise return NULL. */
2735 gnat_invariant_expr (tree expr)
2737 tree type = TREE_TYPE (expr), t;
2739 expr = remove_conversions (expr, false);
2741 while ((TREE_CODE (expr) == CONST_DECL
2742 || (TREE_CODE (expr) == VAR_DECL && TREE_READONLY (expr)))
2743 && decl_function_context (expr) == current_function_decl
2744 && DECL_INITIAL (expr))
2745 expr = remove_conversions (DECL_INITIAL (expr), false);
2747 if (TREE_CONSTANT (expr))
2748 return fold_convert (type, expr);
2754 switch (TREE_CODE (t))
2757 if (TREE_OPERAND (t, 2) != NULL_TREE)
2762 case ARRAY_RANGE_REF:
2763 if (!TREE_CONSTANT (TREE_OPERAND (t, 1))
2764 || TREE_OPERAND (t, 2) != NULL_TREE
2765 || TREE_OPERAND (t, 3) != NULL_TREE)
2770 case VIEW_CONVERT_EXPR:
2776 if (!TREE_READONLY (t)
2777 || TREE_SIDE_EFFECTS (t)
2778 || !TREE_THIS_NOTRAP (t))
2786 t = TREE_OPERAND (t, 0);
2790 if (TREE_SIDE_EFFECTS (t))
2793 if (TREE_CODE (t) == CONST_DECL
2794 && (DECL_EXTERNAL (t)
2795 || decl_function_context (t) != current_function_decl))
2796 return fold_convert (type, expr);
2798 if (!TREE_READONLY (t))
2801 if (TREE_CODE (t) == PARM_DECL)
2802 return fold_convert (type, expr);
2804 if (TREE_CODE (t) == VAR_DECL
2805 && (DECL_EXTERNAL (t)
2806 || decl_function_context (t) != current_function_decl))
2807 return fold_convert (type, expr);