1 /* Array translation routines
2 Copyright (C) 2002-2013 Free Software Foundation, Inc.
3 Contributed by Paul Brook <paul@nowt.org>
4 and Steven Bosscher <s.bosscher@student.tudelft.nl>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* trans-array.c-- Various array related code, including scalarization,
23 allocation, initialization and other support routines. */
25 /* How the scalarizer works.
26 In gfortran, array expressions use the same core routines as scalar
28 First, a Scalarization State (SS) chain is built. This is done by walking
29 the expression tree, and building a linear list of the terms in the
30 expression. As the tree is walked, scalar subexpressions are translated.
32 The scalarization parameters are stored in a gfc_loopinfo structure.
33 First the start and stride of each term is calculated by
34 gfc_conv_ss_startstride. During this process the expressions for the array
35 descriptors and data pointers are also translated.
37 If the expression is an assignment, we must then resolve any dependencies.
38 In Fortran all the rhs values of an assignment must be evaluated before
39 any assignments take place. This can require a temporary array to store the
40 values. We also require a temporary when we are passing array expressions
41 or vector subscripts as procedure parameters.
43 Array sections are passed without copying to a temporary. These use the
44 scalarizer to determine the shape of the section. The flag
45 loop->array_parameter tells the scalarizer that the actual values and loop
46 variables will not be required.
48 The function gfc_conv_loop_setup generates the scalarization setup code.
49 It determines the range of the scalarizing loop variables. If a temporary
50 is required, this is created and initialized. Code for scalar expressions
51 taken outside the loop is also generated at this time. Next the offset and
52 scaling required to translate from loop variables to array indices for each
55 A call to gfc_start_scalarized_body marks the start of the scalarized
56 expression. This creates a scope and declares the loop variables. Before
57 calling this gfc_make_ss_chain_used must be used to indicate which terms
58 will be used inside this loop.
60 The scalar gfc_conv_* functions are then used to build the main body of the
61 scalarization loop. Scalarization loop variables and precalculated scalar
62 values are automatically substituted. Note that gfc_advance_se_ss_chain
63 must be used, rather than changing the se->ss directly.
65 For assignment expressions requiring a temporary two sub loops are
66 generated. The first stores the result of the expression in the temporary,
67 the second copies it to the result. A call to
68 gfc_trans_scalarized_loop_boundary marks the end of the main loop code and
69 the start of the copying loop. The temporary may be less than full rank.
71 Finally gfc_trans_scalarizing_loops is called to generate the implicit do
72 loops. The loops are added to the pre chain of the loopinfo. The post
73 chain may still contain cleanup code.
75 After the loop code has been added into its parent scope gfc_cleanup_loop
76 is called to free all the SS allocated by the scalarizer. */
80 #include "coretypes.h"
82 #include "gimple.h" /* For create_tmp_var_name. */
83 #include "diagnostic-core.h" /* For internal_error/fatal_error. */
86 #include "constructor.h"
88 #include "trans-stmt.h"
89 #include "trans-types.h"
90 #include "trans-array.h"
91 #include "trans-const.h"
92 #include "dependency.h"
94 static bool gfc_get_array_constructor_size (mpz_t *, gfc_constructor_base);
96 /* The contents of this structure aren't actually used, just the address. */
97 static gfc_ss gfc_ss_terminator_var;
98 gfc_ss * const gfc_ss_terminator = &gfc_ss_terminator_var;
102 gfc_array_dataptr_type (tree desc)
104 return (GFC_TYPE_ARRAY_DATAPTR_TYPE (TREE_TYPE (desc)));
108 /* Build expressions to access the members of an array descriptor.
109 It's surprisingly easy to mess up here, so never access
110 an array descriptor by "brute force", always use these
111 functions. This also avoids problems if we change the format
112 of an array descriptor.
114 To understand these magic numbers, look at the comments
115 before gfc_build_array_type() in trans-types.c.
117 The code within these defines should be the only code which knows the format
118 of an array descriptor.
120 Any code just needing to read obtain the bounds of an array should use
121 gfc_conv_array_* rather than the following functions as these will return
122 know constant values, and work with arrays which do not have descriptors.
124 Don't forget to #undef these! */
127 #define OFFSET_FIELD 1
128 #define DTYPE_FIELD 2
129 #define DIMENSION_FIELD 3
130 #define CAF_TOKEN_FIELD 4
132 #define STRIDE_SUBFIELD 0
133 #define LBOUND_SUBFIELD 1
134 #define UBOUND_SUBFIELD 2
136 /* This provides READ-ONLY access to the data field. The field itself
137 doesn't have the proper type. */
140 gfc_conv_descriptor_data_get (tree desc)
144 type = TREE_TYPE (desc);
145 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
147 field = TYPE_FIELDS (type);
148 gcc_assert (DATA_FIELD == 0);
150 t = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field), desc,
152 t = fold_convert (GFC_TYPE_ARRAY_DATAPTR_TYPE (type), t);
157 /* This provides WRITE access to the data field.
159 TUPLES_P is true if we are generating tuples.
161 This function gets called through the following macros:
162 gfc_conv_descriptor_data_set
163 gfc_conv_descriptor_data_set. */
166 gfc_conv_descriptor_data_set (stmtblock_t *block, tree desc, tree value)
170 type = TREE_TYPE (desc);
171 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
173 field = TYPE_FIELDS (type);
174 gcc_assert (DATA_FIELD == 0);
176 t = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field), desc,
178 gfc_add_modify (block, t, fold_convert (TREE_TYPE (field), value));
182 /* This provides address access to the data field. This should only be
183 used by array allocation, passing this on to the runtime. */
186 gfc_conv_descriptor_data_addr (tree desc)
190 type = TREE_TYPE (desc);
191 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
193 field = TYPE_FIELDS (type);
194 gcc_assert (DATA_FIELD == 0);
196 t = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field), desc,
198 return gfc_build_addr_expr (NULL_TREE, t);
202 gfc_conv_descriptor_offset (tree desc)
207 type = TREE_TYPE (desc);
208 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
210 field = gfc_advance_chain (TYPE_FIELDS (type), OFFSET_FIELD);
211 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
213 return fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
214 desc, field, NULL_TREE);
218 gfc_conv_descriptor_offset_get (tree desc)
220 return gfc_conv_descriptor_offset (desc);
224 gfc_conv_descriptor_offset_set (stmtblock_t *block, tree desc,
227 tree t = gfc_conv_descriptor_offset (desc);
228 gfc_add_modify (block, t, fold_convert (TREE_TYPE (t), value));
233 gfc_conv_descriptor_dtype (tree desc)
238 type = TREE_TYPE (desc);
239 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
241 field = gfc_advance_chain (TYPE_FIELDS (type), DTYPE_FIELD);
242 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
244 return fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
245 desc, field, NULL_TREE);
250 gfc_conv_descriptor_rank (tree desc)
255 dtype = gfc_conv_descriptor_dtype (desc);
256 tmp = build_int_cst (TREE_TYPE (dtype), GFC_DTYPE_RANK_MASK);
257 tmp = fold_build2_loc (input_location, BIT_AND_EXPR, TREE_TYPE (dtype),
259 return fold_convert (gfc_get_int_type (gfc_default_integer_kind), tmp);
264 gfc_get_descriptor_dimension (tree desc)
268 type = TREE_TYPE (desc);
269 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
271 field = gfc_advance_chain (TYPE_FIELDS (type), DIMENSION_FIELD);
272 gcc_assert (field != NULL_TREE
273 && TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
274 && TREE_CODE (TREE_TYPE (TREE_TYPE (field))) == RECORD_TYPE);
276 return fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
277 desc, field, NULL_TREE);
282 gfc_conv_descriptor_dimension (tree desc, tree dim)
286 tmp = gfc_get_descriptor_dimension (desc);
288 return gfc_build_array_ref (tmp, dim, NULL);
293 gfc_conv_descriptor_token (tree desc)
298 type = TREE_TYPE (desc);
299 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
300 gcc_assert (GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ALLOCATABLE);
301 gcc_assert (gfc_option.coarray == GFC_FCOARRAY_LIB);
302 field = gfc_advance_chain (TYPE_FIELDS (type), CAF_TOKEN_FIELD);
303 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == prvoid_type_node);
305 return fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
306 desc, field, NULL_TREE);
311 gfc_conv_descriptor_stride (tree desc, tree dim)
316 tmp = gfc_conv_descriptor_dimension (desc, dim);
317 field = TYPE_FIELDS (TREE_TYPE (tmp));
318 field = gfc_advance_chain (field, STRIDE_SUBFIELD);
319 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
321 tmp = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
322 tmp, field, NULL_TREE);
327 gfc_conv_descriptor_stride_get (tree desc, tree dim)
329 tree type = TREE_TYPE (desc);
330 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
331 if (integer_zerop (dim)
332 && (GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ALLOCATABLE
333 ||GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ASSUMED_SHAPE_CONT
334 ||GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ASSUMED_RANK_CONT
335 ||GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_POINTER_CONT))
336 return gfc_index_one_node;
338 return gfc_conv_descriptor_stride (desc, dim);
342 gfc_conv_descriptor_stride_set (stmtblock_t *block, tree desc,
343 tree dim, tree value)
345 tree t = gfc_conv_descriptor_stride (desc, dim);
346 gfc_add_modify (block, t, fold_convert (TREE_TYPE (t), value));
350 gfc_conv_descriptor_lbound (tree desc, tree dim)
355 tmp = gfc_conv_descriptor_dimension (desc, dim);
356 field = TYPE_FIELDS (TREE_TYPE (tmp));
357 field = gfc_advance_chain (field, LBOUND_SUBFIELD);
358 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
360 tmp = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
361 tmp, field, NULL_TREE);
366 gfc_conv_descriptor_lbound_get (tree desc, tree dim)
368 return gfc_conv_descriptor_lbound (desc, dim);
372 gfc_conv_descriptor_lbound_set (stmtblock_t *block, tree desc,
373 tree dim, tree value)
375 tree t = gfc_conv_descriptor_lbound (desc, dim);
376 gfc_add_modify (block, t, fold_convert (TREE_TYPE (t), value));
380 gfc_conv_descriptor_ubound (tree desc, tree dim)
385 tmp = gfc_conv_descriptor_dimension (desc, dim);
386 field = TYPE_FIELDS (TREE_TYPE (tmp));
387 field = gfc_advance_chain (field, UBOUND_SUBFIELD);
388 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
390 tmp = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
391 tmp, field, NULL_TREE);
396 gfc_conv_descriptor_ubound_get (tree desc, tree dim)
398 return gfc_conv_descriptor_ubound (desc, dim);
402 gfc_conv_descriptor_ubound_set (stmtblock_t *block, tree desc,
403 tree dim, tree value)
405 tree t = gfc_conv_descriptor_ubound (desc, dim);
406 gfc_add_modify (block, t, fold_convert (TREE_TYPE (t), value));
409 /* Build a null array descriptor constructor. */
412 gfc_build_null_descriptor (tree type)
417 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
418 gcc_assert (DATA_FIELD == 0);
419 field = TYPE_FIELDS (type);
421 /* Set a NULL data pointer. */
422 tmp = build_constructor_single (type, field, null_pointer_node);
423 TREE_CONSTANT (tmp) = 1;
424 /* All other fields are ignored. */
430 /* Modify a descriptor such that the lbound of a given dimension is the value
431 specified. This also updates ubound and offset accordingly. */
434 gfc_conv_shift_descriptor_lbound (stmtblock_t* block, tree desc,
435 int dim, tree new_lbound)
437 tree offs, ubound, lbound, stride;
438 tree diff, offs_diff;
440 new_lbound = fold_convert (gfc_array_index_type, new_lbound);
442 offs = gfc_conv_descriptor_offset_get (desc);
443 lbound = gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[dim]);
444 ubound = gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[dim]);
445 stride = gfc_conv_descriptor_stride_get (desc, gfc_rank_cst[dim]);
447 /* Get difference (new - old) by which to shift stuff. */
448 diff = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
451 /* Shift ubound and offset accordingly. This has to be done before
452 updating the lbound, as they depend on the lbound expression! */
453 ubound = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
455 gfc_conv_descriptor_ubound_set (block, desc, gfc_rank_cst[dim], ubound);
456 offs_diff = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
458 offs = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
460 gfc_conv_descriptor_offset_set (block, desc, offs);
462 /* Finally set lbound to value we want. */
463 gfc_conv_descriptor_lbound_set (block, desc, gfc_rank_cst[dim], new_lbound);
467 /* Cleanup those #defines. */
472 #undef DIMENSION_FIELD
473 #undef CAF_TOKEN_FIELD
474 #undef STRIDE_SUBFIELD
475 #undef LBOUND_SUBFIELD
476 #undef UBOUND_SUBFIELD
479 /* Mark a SS chain as used. Flags specifies in which loops the SS is used.
480 flags & 1 = Main loop body.
481 flags & 2 = temp copy loop. */
484 gfc_mark_ss_chain_used (gfc_ss * ss, unsigned flags)
486 for (; ss != gfc_ss_terminator; ss = ss->next)
487 ss->info->useflags = flags;
491 /* Free a gfc_ss chain. */
494 gfc_free_ss_chain (gfc_ss * ss)
498 while (ss != gfc_ss_terminator)
500 gcc_assert (ss != NULL);
509 free_ss_info (gfc_ss_info *ss_info)
514 if (ss_info->refcount > 0)
517 gcc_assert (ss_info->refcount == 0);
519 switch (ss_info->type)
522 for (n = 0; n < GFC_MAX_DIMENSIONS; n++)
523 if (ss_info->data.array.subscript[n])
524 gfc_free_ss_chain (ss_info->data.array.subscript[n]);
538 gfc_free_ss (gfc_ss * ss)
540 free_ss_info (ss->info);
545 /* Creates and initializes an array type gfc_ss struct. */
548 gfc_get_array_ss (gfc_ss *next, gfc_expr *expr, int dimen, gfc_ss_type type)
551 gfc_ss_info *ss_info;
554 ss_info = gfc_get_ss_info ();
556 ss_info->type = type;
557 ss_info->expr = expr;
563 for (i = 0; i < ss->dimen; i++)
570 /* Creates and initializes a temporary type gfc_ss struct. */
573 gfc_get_temp_ss (tree type, tree string_length, int dimen)
576 gfc_ss_info *ss_info;
579 ss_info = gfc_get_ss_info ();
581 ss_info->type = GFC_SS_TEMP;
582 ss_info->string_length = string_length;
583 ss_info->data.temp.type = type;
587 ss->next = gfc_ss_terminator;
589 for (i = 0; i < ss->dimen; i++)
596 /* Creates and initializes a scalar type gfc_ss struct. */
599 gfc_get_scalar_ss (gfc_ss *next, gfc_expr *expr)
602 gfc_ss_info *ss_info;
604 ss_info = gfc_get_ss_info ();
606 ss_info->type = GFC_SS_SCALAR;
607 ss_info->expr = expr;
617 /* Free all the SS associated with a loop. */
620 gfc_cleanup_loop (gfc_loopinfo * loop)
622 gfc_loopinfo *loop_next, **ploop;
627 while (ss != gfc_ss_terminator)
629 gcc_assert (ss != NULL);
630 next = ss->loop_chain;
635 /* Remove reference to self in the parent loop. */
637 for (ploop = &loop->parent->nested; *ploop; ploop = &(*ploop)->next)
644 /* Free non-freed nested loops. */
645 for (loop = loop->nested; loop; loop = loop_next)
647 loop_next = loop->next;
648 gfc_cleanup_loop (loop);
655 set_ss_loop (gfc_ss *ss, gfc_loopinfo *loop)
659 for (; ss != gfc_ss_terminator; ss = ss->next)
663 if (ss->info->type == GFC_SS_SCALAR
664 || ss->info->type == GFC_SS_REFERENCE
665 || ss->info->type == GFC_SS_TEMP)
668 for (n = 0; n < GFC_MAX_DIMENSIONS; n++)
669 if (ss->info->data.array.subscript[n] != NULL)
670 set_ss_loop (ss->info->data.array.subscript[n], loop);
675 /* Associate a SS chain with a loop. */
678 gfc_add_ss_to_loop (gfc_loopinfo * loop, gfc_ss * head)
681 gfc_loopinfo *nested_loop;
683 if (head == gfc_ss_terminator)
686 set_ss_loop (head, loop);
689 for (; ss && ss != gfc_ss_terminator; ss = ss->next)
693 nested_loop = ss->nested_ss->loop;
695 /* More than one ss can belong to the same loop. Hence, we add the
696 loop to the chain only if it is different from the previously
697 added one, to avoid duplicate nested loops. */
698 if (nested_loop != loop->nested)
700 gcc_assert (nested_loop->parent == NULL);
701 nested_loop->parent = loop;
703 gcc_assert (nested_loop->next == NULL);
704 nested_loop->next = loop->nested;
705 loop->nested = nested_loop;
708 gcc_assert (nested_loop->parent == loop);
711 if (ss->next == gfc_ss_terminator)
712 ss->loop_chain = loop->ss;
714 ss->loop_chain = ss->next;
716 gcc_assert (ss == gfc_ss_terminator);
721 /* Generate an initializer for a static pointer or allocatable array. */
724 gfc_trans_static_array_pointer (gfc_symbol * sym)
728 gcc_assert (TREE_STATIC (sym->backend_decl));
729 /* Just zero the data member. */
730 type = TREE_TYPE (sym->backend_decl);
731 DECL_INITIAL (sym->backend_decl) = gfc_build_null_descriptor (type);
735 /* If the bounds of SE's loop have not yet been set, see if they can be
736 determined from array spec AS, which is the array spec of a called
737 function. MAPPING maps the callee's dummy arguments to the values
738 that the caller is passing. Add any initialization and finalization
742 gfc_set_loop_bounds_from_array_spec (gfc_interface_mapping * mapping,
743 gfc_se * se, gfc_array_spec * as)
745 int n, dim, total_dim;
754 if (!as || as->type != AS_EXPLICIT)
757 for (ss = se->ss; ss; ss = ss->parent)
759 total_dim += ss->loop->dimen;
760 for (n = 0; n < ss->loop->dimen; n++)
762 /* The bound is known, nothing to do. */
763 if (ss->loop->to[n] != NULL_TREE)
767 gcc_assert (dim < as->rank);
768 gcc_assert (ss->loop->dimen <= as->rank);
770 /* Evaluate the lower bound. */
771 gfc_init_se (&tmpse, NULL);
772 gfc_apply_interface_mapping (mapping, &tmpse, as->lower[dim]);
773 gfc_add_block_to_block (&se->pre, &tmpse.pre);
774 gfc_add_block_to_block (&se->post, &tmpse.post);
775 lower = fold_convert (gfc_array_index_type, tmpse.expr);
777 /* ...and the upper bound. */
778 gfc_init_se (&tmpse, NULL);
779 gfc_apply_interface_mapping (mapping, &tmpse, as->upper[dim]);
780 gfc_add_block_to_block (&se->pre, &tmpse.pre);
781 gfc_add_block_to_block (&se->post, &tmpse.post);
782 upper = fold_convert (gfc_array_index_type, tmpse.expr);
784 /* Set the upper bound of the loop to UPPER - LOWER. */
785 tmp = fold_build2_loc (input_location, MINUS_EXPR,
786 gfc_array_index_type, upper, lower);
787 tmp = gfc_evaluate_now (tmp, &se->pre);
788 ss->loop->to[n] = tmp;
792 gcc_assert (total_dim == as->rank);
796 /* Generate code to allocate an array temporary, or create a variable to
797 hold the data. If size is NULL, zero the descriptor so that the
798 callee will allocate the array. If DEALLOC is true, also generate code to
799 free the array afterwards.
801 If INITIAL is not NULL, it is packed using internal_pack and the result used
802 as data instead of allocating a fresh, unitialized area of memory.
804 Initialization code is added to PRE and finalization code to POST.
805 DYNAMIC is true if the caller may want to extend the array later
806 using realloc. This prevents us from putting the array on the stack. */
809 gfc_trans_allocate_array_storage (stmtblock_t * pre, stmtblock_t * post,
810 gfc_array_info * info, tree size, tree nelem,
811 tree initial, bool dynamic, bool dealloc)
817 desc = info->descriptor;
818 info->offset = gfc_index_zero_node;
819 if (size == NULL_TREE || integer_zerop (size))
821 /* A callee allocated array. */
822 gfc_conv_descriptor_data_set (pre, desc, null_pointer_node);
827 /* Allocate the temporary. */
828 onstack = !dynamic && initial == NULL_TREE
829 && (gfc_option.flag_stack_arrays
830 || gfc_can_put_var_on_stack (size));
834 /* Make a temporary variable to hold the data. */
835 tmp = fold_build2_loc (input_location, MINUS_EXPR, TREE_TYPE (nelem),
836 nelem, gfc_index_one_node);
837 tmp = gfc_evaluate_now (tmp, pre);
838 tmp = build_range_type (gfc_array_index_type, gfc_index_zero_node,
840 tmp = build_array_type (gfc_get_element_type (TREE_TYPE (desc)),
842 tmp = gfc_create_var (tmp, "A");
843 /* If we're here only because of -fstack-arrays we have to
844 emit a DECL_EXPR to make the gimplifier emit alloca calls. */
845 if (!gfc_can_put_var_on_stack (size))
846 gfc_add_expr_to_block (pre,
847 fold_build1_loc (input_location,
848 DECL_EXPR, TREE_TYPE (tmp),
850 tmp = gfc_build_addr_expr (NULL_TREE, tmp);
851 gfc_conv_descriptor_data_set (pre, desc, tmp);
855 /* Allocate memory to hold the data or call internal_pack. */
856 if (initial == NULL_TREE)
858 tmp = gfc_call_malloc (pre, NULL, size);
859 tmp = gfc_evaluate_now (tmp, pre);
866 stmtblock_t do_copying;
868 tmp = TREE_TYPE (initial); /* Pointer to descriptor. */
869 gcc_assert (TREE_CODE (tmp) == POINTER_TYPE);
870 tmp = TREE_TYPE (tmp); /* The descriptor itself. */
871 tmp = gfc_get_element_type (tmp);
872 gcc_assert (tmp == gfc_get_element_type (TREE_TYPE (desc)));
873 packed = gfc_create_var (build_pointer_type (tmp), "data");
875 tmp = build_call_expr_loc (input_location,
876 gfor_fndecl_in_pack, 1, initial);
877 tmp = fold_convert (TREE_TYPE (packed), tmp);
878 gfc_add_modify (pre, packed, tmp);
880 tmp = build_fold_indirect_ref_loc (input_location,
882 source_data = gfc_conv_descriptor_data_get (tmp);
884 /* internal_pack may return source->data without any allocation
885 or copying if it is already packed. If that's the case, we
886 need to allocate and copy manually. */
888 gfc_start_block (&do_copying);
889 tmp = gfc_call_malloc (&do_copying, NULL, size);
890 tmp = fold_convert (TREE_TYPE (packed), tmp);
891 gfc_add_modify (&do_copying, packed, tmp);
892 tmp = gfc_build_memcpy_call (packed, source_data, size);
893 gfc_add_expr_to_block (&do_copying, tmp);
895 was_packed = fold_build2_loc (input_location, EQ_EXPR,
896 boolean_type_node, packed,
898 tmp = gfc_finish_block (&do_copying);
899 tmp = build3_v (COND_EXPR, was_packed, tmp,
900 build_empty_stmt (input_location));
901 gfc_add_expr_to_block (pre, tmp);
903 tmp = fold_convert (pvoid_type_node, packed);
906 gfc_conv_descriptor_data_set (pre, desc, tmp);
909 info->data = gfc_conv_descriptor_data_get (desc);
911 /* The offset is zero because we create temporaries with a zero
913 gfc_conv_descriptor_offset_set (pre, desc, gfc_index_zero_node);
915 if (dealloc && !onstack)
917 /* Free the temporary. */
918 tmp = gfc_conv_descriptor_data_get (desc);
919 tmp = gfc_call_free (fold_convert (pvoid_type_node, tmp));
920 gfc_add_expr_to_block (post, tmp);
925 /* Get the scalarizer array dimension corresponding to actual array dimension
928 For example, if SS represents the array ref a(1,:,:,1), it is a
929 bidimensional scalarizer array, and the result would be 0 for ARRAY_DIM=1,
930 and 1 for ARRAY_DIM=2.
931 If SS represents transpose(a(:,1,1,:)), it is again a bidimensional
932 scalarizer array, and the result would be 1 for ARRAY_DIM=0 and 0 for
934 If SS represents sum(a(:,:,:,1), dim=1), it is a 2+1-dimensional scalarizer
935 array. If called on the inner ss, the result would be respectively 0,1,2 for
936 ARRAY_DIM=0,1,2. If called on the outer ss, the result would be 0,1
937 for ARRAY_DIM=1,2. */
940 get_scalarizer_dim_for_array_dim (gfc_ss *ss, int array_dim)
947 for (; ss; ss = ss->parent)
948 for (n = 0; n < ss->dimen; n++)
949 if (ss->dim[n] < array_dim)
952 return array_ref_dim;
957 innermost_ss (gfc_ss *ss)
959 while (ss->nested_ss != NULL)
967 /* Get the array reference dimension corresponding to the given loop dimension.
968 It is different from the true array dimension given by the dim array in
969 the case of a partial array reference (i.e. a(:,:,1,:) for example)
970 It is different from the loop dimension in the case of a transposed array.
974 get_array_ref_dim_for_loop_dim (gfc_ss *ss, int loop_dim)
976 return get_scalarizer_dim_for_array_dim (innermost_ss (ss),
981 /* Generate code to create and initialize the descriptor for a temporary
982 array. This is used for both temporaries needed by the scalarizer, and
983 functions returning arrays. Adjusts the loop variables to be
984 zero-based, and calculates the loop bounds for callee allocated arrays.
985 Allocate the array unless it's callee allocated (we have a callee
986 allocated array if 'callee_alloc' is true, or if loop->to[n] is
987 NULL_TREE for any n). Also fills in the descriptor, data and offset
988 fields of info if known. Returns the size of the array, or NULL for a
989 callee allocated array.
991 'eltype' == NULL signals that the temporary should be a class object.
992 The 'initial' expression is used to obtain the size of the dynamic
993 type; otherwise the allocation and initialisation proceeds as for any
996 PRE, POST, INITIAL, DYNAMIC and DEALLOC are as for
997 gfc_trans_allocate_array_storage. */
1000 gfc_trans_create_temp_array (stmtblock_t * pre, stmtblock_t * post, gfc_ss * ss,
1001 tree eltype, tree initial, bool dynamic,
1002 bool dealloc, bool callee_alloc, locus * where)
1006 gfc_array_info *info;
1007 tree from[GFC_MAX_DIMENSIONS], to[GFC_MAX_DIMENSIONS];
1015 tree class_expr = NULL_TREE;
1016 int n, dim, tmp_dim;
1019 /* This signals a class array for which we need the size of the
1020 dynamic type. Generate an eltype and then the class expression. */
1021 if (eltype == NULL_TREE && initial)
1023 gcc_assert (POINTER_TYPE_P (TREE_TYPE (initial)));
1024 class_expr = build_fold_indirect_ref_loc (input_location, initial);
1025 eltype = TREE_TYPE (class_expr);
1026 eltype = gfc_get_element_type (eltype);
1027 /* Obtain the structure (class) expression. */
1028 class_expr = TREE_OPERAND (class_expr, 0);
1029 gcc_assert (class_expr);
1032 memset (from, 0, sizeof (from));
1033 memset (to, 0, sizeof (to));
1035 info = &ss->info->data.array;
1037 gcc_assert (ss->dimen > 0);
1038 gcc_assert (ss->loop->dimen == ss->dimen);
1040 if (gfc_option.warn_array_temp && where)
1041 gfc_warning ("Creating array temporary at %L", where);
1043 /* Set the lower bound to zero. */
1044 for (s = ss; s; s = s->parent)
1048 total_dim += loop->dimen;
1049 for (n = 0; n < loop->dimen; n++)
1053 /* Callee allocated arrays may not have a known bound yet. */
1055 loop->to[n] = gfc_evaluate_now (
1056 fold_build2_loc (input_location, MINUS_EXPR,
1057 gfc_array_index_type,
1058 loop->to[n], loop->from[n]),
1060 loop->from[n] = gfc_index_zero_node;
1062 /* We have just changed the loop bounds, we must clear the
1063 corresponding specloop, so that delta calculation is not skipped
1064 later in gfc_set_delta. */
1065 loop->specloop[n] = NULL;
1067 /* We are constructing the temporary's descriptor based on the loop
1068 dimensions. As the dimensions may be accessed in arbitrary order
1069 (think of transpose) the size taken from the n'th loop may not map
1070 to the n'th dimension of the array. We need to reconstruct loop
1071 infos in the right order before using it to set the descriptor
1073 tmp_dim = get_scalarizer_dim_for_array_dim (ss, dim);
1074 from[tmp_dim] = loop->from[n];
1075 to[tmp_dim] = loop->to[n];
1077 info->delta[dim] = gfc_index_zero_node;
1078 info->start[dim] = gfc_index_zero_node;
1079 info->end[dim] = gfc_index_zero_node;
1080 info->stride[dim] = gfc_index_one_node;
1084 /* Initialize the descriptor. */
1086 gfc_get_array_type_bounds (eltype, total_dim, 0, from, to, 1,
1087 GFC_ARRAY_UNKNOWN, true);
1088 desc = gfc_create_var (type, "atmp");
1089 GFC_DECL_PACKED_ARRAY (desc) = 1;
1091 info->descriptor = desc;
1092 size = gfc_index_one_node;
1094 /* Fill in the array dtype. */
1095 tmp = gfc_conv_descriptor_dtype (desc);
1096 gfc_add_modify (pre, tmp, gfc_get_dtype (TREE_TYPE (desc)));
1099 Fill in the bounds and stride. This is a packed array, so:
1102 for (n = 0; n < rank; n++)
1105 delta = ubound[n] + 1 - lbound[n];
1106 size = size * delta;
1108 size = size * sizeof(element);
1111 or_expr = NULL_TREE;
1113 /* If there is at least one null loop->to[n], it is a callee allocated
1115 for (n = 0; n < total_dim; n++)
1116 if (to[n] == NULL_TREE)
1122 if (size == NULL_TREE)
1123 for (s = ss; s; s = s->parent)
1124 for (n = 0; n < s->loop->dimen; n++)
1126 dim = get_scalarizer_dim_for_array_dim (ss, s->dim[n]);
1128 /* For a callee allocated array express the loop bounds in terms
1129 of the descriptor fields. */
1130 tmp = fold_build2_loc (input_location,
1131 MINUS_EXPR, gfc_array_index_type,
1132 gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[dim]),
1133 gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[dim]));
1134 s->loop->to[n] = tmp;
1138 for (n = 0; n < total_dim; n++)
1140 /* Store the stride and bound components in the descriptor. */
1141 gfc_conv_descriptor_stride_set (pre, desc, gfc_rank_cst[n], size);
1143 gfc_conv_descriptor_lbound_set (pre, desc, gfc_rank_cst[n],
1144 gfc_index_zero_node);
1146 gfc_conv_descriptor_ubound_set (pre, desc, gfc_rank_cst[n], to[n]);
1148 tmp = fold_build2_loc (input_location, PLUS_EXPR,
1149 gfc_array_index_type,
1150 to[n], gfc_index_one_node);
1152 /* Check whether the size for this dimension is negative. */
1153 cond = fold_build2_loc (input_location, LE_EXPR, boolean_type_node,
1154 tmp, gfc_index_zero_node);
1155 cond = gfc_evaluate_now (cond, pre);
1160 or_expr = fold_build2_loc (input_location, TRUTH_OR_EXPR,
1161 boolean_type_node, or_expr, cond);
1163 size = fold_build2_loc (input_location, MULT_EXPR,
1164 gfc_array_index_type, size, tmp);
1165 size = gfc_evaluate_now (size, pre);
1169 /* Get the size of the array. */
1170 if (size && !callee_alloc)
1173 /* If or_expr is true, then the extent in at least one
1174 dimension is zero and the size is set to zero. */
1175 size = fold_build3_loc (input_location, COND_EXPR, gfc_array_index_type,
1176 or_expr, gfc_index_zero_node, size);
1179 if (class_expr == NULL_TREE)
1180 elemsize = fold_convert (gfc_array_index_type,
1181 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
1183 elemsize = gfc_vtable_size_get (class_expr);
1185 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
1194 gfc_trans_allocate_array_storage (pre, post, info, size, nelem, initial,
1200 if (ss->dimen > ss->loop->temp_dim)
1201 ss->loop->temp_dim = ss->dimen;
1207 /* Return the number of iterations in a loop that starts at START,
1208 ends at END, and has step STEP. */
1211 gfc_get_iteration_count (tree start, tree end, tree step)
1216 type = TREE_TYPE (step);
1217 tmp = fold_build2_loc (input_location, MINUS_EXPR, type, end, start);
1218 tmp = fold_build2_loc (input_location, FLOOR_DIV_EXPR, type, tmp, step);
1219 tmp = fold_build2_loc (input_location, PLUS_EXPR, type, tmp,
1220 build_int_cst (type, 1));
1221 tmp = fold_build2_loc (input_location, MAX_EXPR, type, tmp,
1222 build_int_cst (type, 0));
1223 return fold_convert (gfc_array_index_type, tmp);
1227 /* Extend the data in array DESC by EXTRA elements. */
1230 gfc_grow_array (stmtblock_t * pblock, tree desc, tree extra)
1237 if (integer_zerop (extra))
1240 ubound = gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[0]);
1242 /* Add EXTRA to the upper bound. */
1243 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
1245 gfc_conv_descriptor_ubound_set (pblock, desc, gfc_rank_cst[0], tmp);
1247 /* Get the value of the current data pointer. */
1248 arg0 = gfc_conv_descriptor_data_get (desc);
1250 /* Calculate the new array size. */
1251 size = TYPE_SIZE_UNIT (gfc_get_element_type (TREE_TYPE (desc)));
1252 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
1253 ubound, gfc_index_one_node);
1254 arg1 = fold_build2_loc (input_location, MULT_EXPR, size_type_node,
1255 fold_convert (size_type_node, tmp),
1256 fold_convert (size_type_node, size));
1258 /* Call the realloc() function. */
1259 tmp = gfc_call_realloc (pblock, arg0, arg1);
1260 gfc_conv_descriptor_data_set (pblock, desc, tmp);
1264 /* Return true if the bounds of iterator I can only be determined
1268 gfc_iterator_has_dynamic_bounds (gfc_iterator * i)
1270 return (i->start->expr_type != EXPR_CONSTANT
1271 || i->end->expr_type != EXPR_CONSTANT
1272 || i->step->expr_type != EXPR_CONSTANT);
1276 /* Split the size of constructor element EXPR into the sum of two terms,
1277 one of which can be determined at compile time and one of which must
1278 be calculated at run time. Set *SIZE to the former and return true
1279 if the latter might be nonzero. */
1282 gfc_get_array_constructor_element_size (mpz_t * size, gfc_expr * expr)
1284 if (expr->expr_type == EXPR_ARRAY)
1285 return gfc_get_array_constructor_size (size, expr->value.constructor);
1286 else if (expr->rank > 0)
1288 /* Calculate everything at run time. */
1289 mpz_set_ui (*size, 0);
1294 /* A single element. */
1295 mpz_set_ui (*size, 1);
1301 /* Like gfc_get_array_constructor_element_size, but applied to the whole
1302 of array constructor C. */
1305 gfc_get_array_constructor_size (mpz_t * size, gfc_constructor_base base)
1313 mpz_set_ui (*size, 0);
1318 for (c = gfc_constructor_first (base); c; c = gfc_constructor_next (c))
1321 if (i && gfc_iterator_has_dynamic_bounds (i))
1325 dynamic |= gfc_get_array_constructor_element_size (&len, c->expr);
1328 /* Multiply the static part of the element size by the
1329 number of iterations. */
1330 mpz_sub (val, i->end->value.integer, i->start->value.integer);
1331 mpz_fdiv_q (val, val, i->step->value.integer);
1332 mpz_add_ui (val, val, 1);
1333 if (mpz_sgn (val) > 0)
1334 mpz_mul (len, len, val);
1336 mpz_set_ui (len, 0);
1338 mpz_add (*size, *size, len);
1347 /* Make sure offset is a variable. */
1350 gfc_put_offset_into_var (stmtblock_t * pblock, tree * poffset,
1353 /* We should have already created the offset variable. We cannot
1354 create it here because we may be in an inner scope. */
1355 gcc_assert (*offsetvar != NULL_TREE);
1356 gfc_add_modify (pblock, *offsetvar, *poffset);
1357 *poffset = *offsetvar;
1358 TREE_USED (*offsetvar) = 1;
1362 /* Variables needed for bounds-checking. */
1363 static bool first_len;
1364 static tree first_len_val;
1365 static bool typespec_chararray_ctor;
1368 gfc_trans_array_ctor_element (stmtblock_t * pblock, tree desc,
1369 tree offset, gfc_se * se, gfc_expr * expr)
1373 gfc_conv_expr (se, expr);
1375 /* Store the value. */
1376 tmp = build_fold_indirect_ref_loc (input_location,
1377 gfc_conv_descriptor_data_get (desc));
1378 tmp = gfc_build_array_ref (tmp, offset, NULL);
1380 if (expr->ts.type == BT_CHARACTER)
1382 int i = gfc_validate_kind (BT_CHARACTER, expr->ts.kind, false);
1385 esize = size_in_bytes (gfc_get_element_type (TREE_TYPE (desc)));
1386 esize = fold_convert (gfc_charlen_type_node, esize);
1387 esize = fold_build2_loc (input_location, TRUNC_DIV_EXPR,
1388 gfc_charlen_type_node, esize,
1389 build_int_cst (gfc_charlen_type_node,
1390 gfc_character_kinds[i].bit_size / 8));
1392 gfc_conv_string_parameter (se);
1393 if (POINTER_TYPE_P (TREE_TYPE (tmp)))
1395 /* The temporary is an array of pointers. */
1396 se->expr = fold_convert (TREE_TYPE (tmp), se->expr);
1397 gfc_add_modify (&se->pre, tmp, se->expr);
1401 /* The temporary is an array of string values. */
1402 tmp = gfc_build_addr_expr (gfc_get_pchar_type (expr->ts.kind), tmp);
1403 /* We know the temporary and the value will be the same length,
1404 so can use memcpy. */
1405 gfc_trans_string_copy (&se->pre, esize, tmp, expr->ts.kind,
1406 se->string_length, se->expr, expr->ts.kind);
1408 if ((gfc_option.rtcheck & GFC_RTCHECK_BOUNDS) && !typespec_chararray_ctor)
1412 gfc_add_modify (&se->pre, first_len_val,
1418 /* Verify that all constructor elements are of the same
1420 tree cond = fold_build2_loc (input_location, NE_EXPR,
1421 boolean_type_node, first_len_val,
1423 gfc_trans_runtime_check
1424 (true, false, cond, &se->pre, &expr->where,
1425 "Different CHARACTER lengths (%ld/%ld) in array constructor",
1426 fold_convert (long_integer_type_node, first_len_val),
1427 fold_convert (long_integer_type_node, se->string_length));
1433 /* TODO: Should the frontend already have done this conversion? */
1434 se->expr = fold_convert (TREE_TYPE (tmp), se->expr);
1435 gfc_add_modify (&se->pre, tmp, se->expr);
1438 gfc_add_block_to_block (pblock, &se->pre);
1439 gfc_add_block_to_block (pblock, &se->post);
1443 /* Add the contents of an array to the constructor. DYNAMIC is as for
1444 gfc_trans_array_constructor_value. */
1447 gfc_trans_array_constructor_subarray (stmtblock_t * pblock,
1448 tree type ATTRIBUTE_UNUSED,
1449 tree desc, gfc_expr * expr,
1450 tree * poffset, tree * offsetvar,
1461 /* We need this to be a variable so we can increment it. */
1462 gfc_put_offset_into_var (pblock, poffset, offsetvar);
1464 gfc_init_se (&se, NULL);
1466 /* Walk the array expression. */
1467 ss = gfc_walk_expr (expr);
1468 gcc_assert (ss != gfc_ss_terminator);
1470 /* Initialize the scalarizer. */
1471 gfc_init_loopinfo (&loop);
1472 gfc_add_ss_to_loop (&loop, ss);
1474 /* Initialize the loop. */
1475 gfc_conv_ss_startstride (&loop);
1476 gfc_conv_loop_setup (&loop, &expr->where);
1478 /* Make sure the constructed array has room for the new data. */
1481 /* Set SIZE to the total number of elements in the subarray. */
1482 size = gfc_index_one_node;
1483 for (n = 0; n < loop.dimen; n++)
1485 tmp = gfc_get_iteration_count (loop.from[n], loop.to[n],
1486 gfc_index_one_node);
1487 size = fold_build2_loc (input_location, MULT_EXPR,
1488 gfc_array_index_type, size, tmp);
1491 /* Grow the constructed array by SIZE elements. */
1492 gfc_grow_array (&loop.pre, desc, size);
1495 /* Make the loop body. */
1496 gfc_mark_ss_chain_used (ss, 1);
1497 gfc_start_scalarized_body (&loop, &body);
1498 gfc_copy_loopinfo_to_se (&se, &loop);
1501 gfc_trans_array_ctor_element (&body, desc, *poffset, &se, expr);
1502 gcc_assert (se.ss == gfc_ss_terminator);
1504 /* Increment the offset. */
1505 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
1506 *poffset, gfc_index_one_node);
1507 gfc_add_modify (&body, *poffset, tmp);
1509 /* Finish the loop. */
1510 gfc_trans_scalarizing_loops (&loop, &body);
1511 gfc_add_block_to_block (&loop.pre, &loop.post);
1512 tmp = gfc_finish_block (&loop.pre);
1513 gfc_add_expr_to_block (pblock, tmp);
1515 gfc_cleanup_loop (&loop);
1519 /* Assign the values to the elements of an array constructor. DYNAMIC
1520 is true if descriptor DESC only contains enough data for the static
1521 size calculated by gfc_get_array_constructor_size. When true, memory
1522 for the dynamic parts must be allocated using realloc. */
1525 gfc_trans_array_constructor_value (stmtblock_t * pblock, tree type,
1526 tree desc, gfc_constructor_base base,
1527 tree * poffset, tree * offsetvar,
1531 tree start = NULL_TREE;
1532 tree end = NULL_TREE;
1533 tree step = NULL_TREE;
1539 tree shadow_loopvar = NULL_TREE;
1540 gfc_saved_var saved_loopvar;
1543 for (c = gfc_constructor_first (base); c; c = gfc_constructor_next (c))
1545 /* If this is an iterator or an array, the offset must be a variable. */
1546 if ((c->iterator || c->expr->rank > 0) && INTEGER_CST_P (*poffset))
1547 gfc_put_offset_into_var (pblock, poffset, offsetvar);
1549 /* Shadowing the iterator avoids changing its value and saves us from
1550 keeping track of it. Further, it makes sure that there's always a
1551 backend-decl for the symbol, even if there wasn't one before,
1552 e.g. in the case of an iterator that appears in a specification
1553 expression in an interface mapping. */
1559 /* Evaluate loop bounds before substituting the loop variable
1560 in case they depend on it. Such a case is invalid, but it is
1561 not more expensive to do the right thing here.
1563 gfc_init_se (&se, NULL);
1564 gfc_conv_expr_val (&se, c->iterator->start);
1565 gfc_add_block_to_block (pblock, &se.pre);
1566 start = gfc_evaluate_now (se.expr, pblock);
1568 gfc_init_se (&se, NULL);
1569 gfc_conv_expr_val (&se, c->iterator->end);
1570 gfc_add_block_to_block (pblock, &se.pre);
1571 end = gfc_evaluate_now (se.expr, pblock);
1573 gfc_init_se (&se, NULL);
1574 gfc_conv_expr_val (&se, c->iterator->step);
1575 gfc_add_block_to_block (pblock, &se.pre);
1576 step = gfc_evaluate_now (se.expr, pblock);
1578 sym = c->iterator->var->symtree->n.sym;
1579 type = gfc_typenode_for_spec (&sym->ts);
1581 shadow_loopvar = gfc_create_var (type, "shadow_loopvar");
1582 gfc_shadow_sym (sym, shadow_loopvar, &saved_loopvar);
1585 gfc_start_block (&body);
1587 if (c->expr->expr_type == EXPR_ARRAY)
1589 /* Array constructors can be nested. */
1590 gfc_trans_array_constructor_value (&body, type, desc,
1591 c->expr->value.constructor,
1592 poffset, offsetvar, dynamic);
1594 else if (c->expr->rank > 0)
1596 gfc_trans_array_constructor_subarray (&body, type, desc, c->expr,
1597 poffset, offsetvar, dynamic);
1601 /* This code really upsets the gimplifier so don't bother for now. */
1608 while (p && !(p->iterator || p->expr->expr_type != EXPR_CONSTANT))
1610 p = gfc_constructor_next (p);
1615 /* Scalar values. */
1616 gfc_init_se (&se, NULL);
1617 gfc_trans_array_ctor_element (&body, desc, *poffset,
1620 *poffset = fold_build2_loc (input_location, PLUS_EXPR,
1621 gfc_array_index_type,
1622 *poffset, gfc_index_one_node);
1626 /* Collect multiple scalar constants into a constructor. */
1627 vec<constructor_elt, va_gc> *v = NULL;
1631 HOST_WIDE_INT idx = 0;
1634 /* Count the number of consecutive scalar constants. */
1635 while (p && !(p->iterator
1636 || p->expr->expr_type != EXPR_CONSTANT))
1638 gfc_init_se (&se, NULL);
1639 gfc_conv_constant (&se, p->expr);
1641 if (c->expr->ts.type != BT_CHARACTER)
1642 se.expr = fold_convert (type, se.expr);
1643 /* For constant character array constructors we build
1644 an array of pointers. */
1645 else if (POINTER_TYPE_P (type))
1646 se.expr = gfc_build_addr_expr
1647 (gfc_get_pchar_type (p->expr->ts.kind),
1650 CONSTRUCTOR_APPEND_ELT (v,
1651 build_int_cst (gfc_array_index_type,
1655 p = gfc_constructor_next (p);
1658 bound = size_int (n - 1);
1659 /* Create an array type to hold them. */
1660 tmptype = build_range_type (gfc_array_index_type,
1661 gfc_index_zero_node, bound);
1662 tmptype = build_array_type (type, tmptype);
1664 init = build_constructor (tmptype, v);
1665 TREE_CONSTANT (init) = 1;
1666 TREE_STATIC (init) = 1;
1667 /* Create a static variable to hold the data. */
1668 tmp = gfc_create_var (tmptype, "data");
1669 TREE_STATIC (tmp) = 1;
1670 TREE_CONSTANT (tmp) = 1;
1671 TREE_READONLY (tmp) = 1;
1672 DECL_INITIAL (tmp) = init;
1675 /* Use BUILTIN_MEMCPY to assign the values. */
1676 tmp = gfc_conv_descriptor_data_get (desc);
1677 tmp = build_fold_indirect_ref_loc (input_location,
1679 tmp = gfc_build_array_ref (tmp, *poffset, NULL);
1680 tmp = gfc_build_addr_expr (NULL_TREE, tmp);
1681 init = gfc_build_addr_expr (NULL_TREE, init);
1683 size = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (type));
1684 bound = build_int_cst (size_type_node, n * size);
1685 tmp = build_call_expr_loc (input_location,
1686 builtin_decl_explicit (BUILT_IN_MEMCPY),
1687 3, tmp, init, bound);
1688 gfc_add_expr_to_block (&body, tmp);
1690 *poffset = fold_build2_loc (input_location, PLUS_EXPR,
1691 gfc_array_index_type, *poffset,
1692 build_int_cst (gfc_array_index_type, n));
1694 if (!INTEGER_CST_P (*poffset))
1696 gfc_add_modify (&body, *offsetvar, *poffset);
1697 *poffset = *offsetvar;
1701 /* The frontend should already have done any expansions
1705 /* Pass the code as is. */
1706 tmp = gfc_finish_block (&body);
1707 gfc_add_expr_to_block (pblock, tmp);
1711 /* Build the implied do-loop. */
1712 stmtblock_t implied_do_block;
1718 loopbody = gfc_finish_block (&body);
1720 /* Create a new block that holds the implied-do loop. A temporary
1721 loop-variable is used. */
1722 gfc_start_block(&implied_do_block);
1724 /* Initialize the loop. */
1725 gfc_add_modify (&implied_do_block, shadow_loopvar, start);
1727 /* If this array expands dynamically, and the number of iterations
1728 is not constant, we won't have allocated space for the static
1729 part of C->EXPR's size. Do that now. */
1730 if (dynamic && gfc_iterator_has_dynamic_bounds (c->iterator))
1732 /* Get the number of iterations. */
1733 tmp = gfc_get_iteration_count (shadow_loopvar, end, step);
1735 /* Get the static part of C->EXPR's size. */
1736 gfc_get_array_constructor_element_size (&size, c->expr);
1737 tmp2 = gfc_conv_mpz_to_tree (size, gfc_index_integer_kind);
1739 /* Grow the array by TMP * TMP2 elements. */
1740 tmp = fold_build2_loc (input_location, MULT_EXPR,
1741 gfc_array_index_type, tmp, tmp2);
1742 gfc_grow_array (&implied_do_block, desc, tmp);
1745 /* Generate the loop body. */
1746 exit_label = gfc_build_label_decl (NULL_TREE);
1747 gfc_start_block (&body);
1749 /* Generate the exit condition. Depending on the sign of
1750 the step variable we have to generate the correct
1752 tmp = fold_build2_loc (input_location, GT_EXPR, boolean_type_node,
1753 step, build_int_cst (TREE_TYPE (step), 0));
1754 cond = fold_build3_loc (input_location, COND_EXPR,
1755 boolean_type_node, tmp,
1756 fold_build2_loc (input_location, GT_EXPR,
1757 boolean_type_node, shadow_loopvar, end),
1758 fold_build2_loc (input_location, LT_EXPR,
1759 boolean_type_node, shadow_loopvar, end));
1760 tmp = build1_v (GOTO_EXPR, exit_label);
1761 TREE_USED (exit_label) = 1;
1762 tmp = build3_v (COND_EXPR, cond, tmp,
1763 build_empty_stmt (input_location));
1764 gfc_add_expr_to_block (&body, tmp);
1766 /* The main loop body. */
1767 gfc_add_expr_to_block (&body, loopbody);
1769 /* Increase loop variable by step. */
1770 tmp = fold_build2_loc (input_location, PLUS_EXPR,
1771 TREE_TYPE (shadow_loopvar), shadow_loopvar,
1773 gfc_add_modify (&body, shadow_loopvar, tmp);
1775 /* Finish the loop. */
1776 tmp = gfc_finish_block (&body);
1777 tmp = build1_v (LOOP_EXPR, tmp);
1778 gfc_add_expr_to_block (&implied_do_block, tmp);
1780 /* Add the exit label. */
1781 tmp = build1_v (LABEL_EXPR, exit_label);
1782 gfc_add_expr_to_block (&implied_do_block, tmp);
1784 /* Finish the implied-do loop. */
1785 tmp = gfc_finish_block(&implied_do_block);
1786 gfc_add_expr_to_block(pblock, tmp);
1788 gfc_restore_sym (c->iterator->var->symtree->n.sym, &saved_loopvar);
1795 /* A catch-all to obtain the string length for anything that is not
1796 a substring of non-constant length, a constant, array or variable. */
1799 get_array_ctor_all_strlen (stmtblock_t *block, gfc_expr *e, tree *len)
1803 /* Don't bother if we already know the length is a constant. */
1804 if (*len && INTEGER_CST_P (*len))
1807 if (!e->ref && e->ts.u.cl && e->ts.u.cl->length
1808 && e->ts.u.cl->length->expr_type == EXPR_CONSTANT)
1811 gfc_conv_const_charlen (e->ts.u.cl);
1812 *len = e->ts.u.cl->backend_decl;
1816 /* Otherwise, be brutal even if inefficient. */
1817 gfc_init_se (&se, NULL);
1819 /* No function call, in case of side effects. */
1820 se.no_function_call = 1;
1822 gfc_conv_expr (&se, e);
1824 gfc_conv_expr_descriptor (&se, e);
1826 /* Fix the value. */
1827 *len = gfc_evaluate_now (se.string_length, &se.pre);
1829 gfc_add_block_to_block (block, &se.pre);
1830 gfc_add_block_to_block (block, &se.post);
1832 e->ts.u.cl->backend_decl = *len;
1837 /* Figure out the string length of a variable reference expression.
1838 Used by get_array_ctor_strlen. */
1841 get_array_ctor_var_strlen (stmtblock_t *block, gfc_expr * expr, tree * len)
1847 /* Don't bother if we already know the length is a constant. */
1848 if (*len && INTEGER_CST_P (*len))
1851 ts = &expr->symtree->n.sym->ts;
1852 for (ref = expr->ref; ref; ref = ref->next)
1857 /* Array references don't change the string length. */
1861 /* Use the length of the component. */
1862 ts = &ref->u.c.component->ts;
1866 if (ref->u.ss.start->expr_type != EXPR_CONSTANT
1867 || ref->u.ss.end->expr_type != EXPR_CONSTANT)
1869 /* Note that this might evaluate expr. */
1870 get_array_ctor_all_strlen (block, expr, len);
1873 mpz_init_set_ui (char_len, 1);
1874 mpz_add (char_len, char_len, ref->u.ss.end->value.integer);
1875 mpz_sub (char_len, char_len, ref->u.ss.start->value.integer);
1876 *len = gfc_conv_mpz_to_tree (char_len, gfc_default_integer_kind);
1877 *len = convert (gfc_charlen_type_node, *len);
1878 mpz_clear (char_len);
1886 *len = ts->u.cl->backend_decl;
1890 /* Figure out the string length of a character array constructor.
1891 If len is NULL, don't calculate the length; this happens for recursive calls
1892 when a sub-array-constructor is an element but not at the first position,
1893 so when we're not interested in the length.
1894 Returns TRUE if all elements are character constants. */
1897 get_array_ctor_strlen (stmtblock_t *block, gfc_constructor_base base, tree * len)
1904 if (gfc_constructor_first (base) == NULL)
1907 *len = build_int_cstu (gfc_charlen_type_node, 0);
1911 /* Loop over all constructor elements to find out is_const, but in len we
1912 want to store the length of the first, not the last, element. We can
1913 of course exit the loop as soon as is_const is found to be false. */
1914 for (c = gfc_constructor_first (base);
1915 c && is_const; c = gfc_constructor_next (c))
1917 switch (c->expr->expr_type)
1920 if (len && !(*len && INTEGER_CST_P (*len)))
1921 *len = build_int_cstu (gfc_charlen_type_node,
1922 c->expr->value.character.length);
1926 if (!get_array_ctor_strlen (block, c->expr->value.constructor, len))
1933 get_array_ctor_var_strlen (block, c->expr, len);
1939 get_array_ctor_all_strlen (block, c->expr, len);
1943 /* After the first iteration, we don't want the length modified. */
1950 /* Check whether the array constructor C consists entirely of constant
1951 elements, and if so returns the number of those elements, otherwise
1952 return zero. Note, an empty or NULL array constructor returns zero. */
1954 unsigned HOST_WIDE_INT
1955 gfc_constant_array_constructor_p (gfc_constructor_base base)
1957 unsigned HOST_WIDE_INT nelem = 0;
1959 gfc_constructor *c = gfc_constructor_first (base);
1963 || c->expr->rank > 0
1964 || c->expr->expr_type != EXPR_CONSTANT)
1966 c = gfc_constructor_next (c);
1973 /* Given EXPR, the constant array constructor specified by an EXPR_ARRAY,
1974 and the tree type of it's elements, TYPE, return a static constant
1975 variable that is compile-time initialized. */
1978 gfc_build_constant_array_constructor (gfc_expr * expr, tree type)
1980 tree tmptype, init, tmp;
1981 HOST_WIDE_INT nelem;
1986 vec<constructor_elt, va_gc> *v = NULL;
1988 /* First traverse the constructor list, converting the constants
1989 to tree to build an initializer. */
1991 c = gfc_constructor_first (expr->value.constructor);
1994 gfc_init_se (&se, NULL);
1995 gfc_conv_constant (&se, c->expr);
1996 if (c->expr->ts.type != BT_CHARACTER)
1997 se.expr = fold_convert (type, se.expr);
1998 else if (POINTER_TYPE_P (type))
1999 se.expr = gfc_build_addr_expr (gfc_get_pchar_type (c->expr->ts.kind),
2001 CONSTRUCTOR_APPEND_ELT (v, build_int_cst (gfc_array_index_type, nelem),
2003 c = gfc_constructor_next (c);
2007 /* Next determine the tree type for the array. We use the gfortran
2008 front-end's gfc_get_nodesc_array_type in order to create a suitable
2009 GFC_ARRAY_TYPE_P that may be used by the scalarizer. */
2011 memset (&as, 0, sizeof (gfc_array_spec));
2013 as.rank = expr->rank;
2014 as.type = AS_EXPLICIT;
2017 as.lower[0] = gfc_get_int_expr (gfc_default_integer_kind, NULL, 0);
2018 as.upper[0] = gfc_get_int_expr (gfc_default_integer_kind,
2022 for (i = 0; i < expr->rank; i++)
2024 int tmp = (int) mpz_get_si (expr->shape[i]);
2025 as.lower[i] = gfc_get_int_expr (gfc_default_integer_kind, NULL, 0);
2026 as.upper[i] = gfc_get_int_expr (gfc_default_integer_kind,
2030 tmptype = gfc_get_nodesc_array_type (type, &as, PACKED_STATIC, true);
2032 /* as is not needed anymore. */
2033 for (i = 0; i < as.rank + as.corank; i++)
2035 gfc_free_expr (as.lower[i]);
2036 gfc_free_expr (as.upper[i]);
2039 init = build_constructor (tmptype, v);
2041 TREE_CONSTANT (init) = 1;
2042 TREE_STATIC (init) = 1;
2044 tmp = gfc_create_var (tmptype, "A");
2045 TREE_STATIC (tmp) = 1;
2046 TREE_CONSTANT (tmp) = 1;
2047 TREE_READONLY (tmp) = 1;
2048 DECL_INITIAL (tmp) = init;
2054 /* Translate a constant EXPR_ARRAY array constructor for the scalarizer.
2055 This mostly initializes the scalarizer state info structure with the
2056 appropriate values to directly use the array created by the function
2057 gfc_build_constant_array_constructor. */
2060 trans_constant_array_constructor (gfc_ss * ss, tree type)
2062 gfc_array_info *info;
2066 tmp = gfc_build_constant_array_constructor (ss->info->expr, type);
2068 info = &ss->info->data.array;
2070 info->descriptor = tmp;
2071 info->data = gfc_build_addr_expr (NULL_TREE, tmp);
2072 info->offset = gfc_index_zero_node;
2074 for (i = 0; i < ss->dimen; i++)
2076 info->delta[i] = gfc_index_zero_node;
2077 info->start[i] = gfc_index_zero_node;
2078 info->end[i] = gfc_index_zero_node;
2079 info->stride[i] = gfc_index_one_node;
2085 get_rank (gfc_loopinfo *loop)
2090 for (; loop; loop = loop->parent)
2091 rank += loop->dimen;
2097 /* Helper routine of gfc_trans_array_constructor to determine if the
2098 bounds of the loop specified by LOOP are constant and simple enough
2099 to use with trans_constant_array_constructor. Returns the
2100 iteration count of the loop if suitable, and NULL_TREE otherwise. */
2103 constant_array_constructor_loop_size (gfc_loopinfo * l)
2106 tree size = gfc_index_one_node;
2110 total_dim = get_rank (l);
2112 for (loop = l; loop; loop = loop->parent)
2114 for (i = 0; i < loop->dimen; i++)
2116 /* If the bounds aren't constant, return NULL_TREE. */
2117 if (!INTEGER_CST_P (loop->from[i]) || !INTEGER_CST_P (loop->to[i]))
2119 if (!integer_zerop (loop->from[i]))
2121 /* Only allow nonzero "from" in one-dimensional arrays. */
2124 tmp = fold_build2_loc (input_location, MINUS_EXPR,
2125 gfc_array_index_type,
2126 loop->to[i], loop->from[i]);
2130 tmp = fold_build2_loc (input_location, PLUS_EXPR,
2131 gfc_array_index_type, tmp, gfc_index_one_node);
2132 size = fold_build2_loc (input_location, MULT_EXPR,
2133 gfc_array_index_type, size, tmp);
2142 get_loop_upper_bound_for_array (gfc_ss *array, int array_dim)
2147 gcc_assert (array->nested_ss == NULL);
2149 for (ss = array; ss; ss = ss->parent)
2150 for (n = 0; n < ss->loop->dimen; n++)
2151 if (array_dim == get_array_ref_dim_for_loop_dim (ss, n))
2152 return &(ss->loop->to[n]);
2158 static gfc_loopinfo *
2159 outermost_loop (gfc_loopinfo * loop)
2161 while (loop->parent != NULL)
2162 loop = loop->parent;
2168 /* Array constructors are handled by constructing a temporary, then using that
2169 within the scalarization loop. This is not optimal, but seems by far the
2173 trans_array_constructor (gfc_ss * ss, locus * where)
2175 gfc_constructor_base c;
2183 bool old_first_len, old_typespec_chararray_ctor;
2184 tree old_first_len_val;
2185 gfc_loopinfo *loop, *outer_loop;
2186 gfc_ss_info *ss_info;
2190 /* Save the old values for nested checking. */
2191 old_first_len = first_len;
2192 old_first_len_val = first_len_val;
2193 old_typespec_chararray_ctor = typespec_chararray_ctor;
2196 outer_loop = outermost_loop (loop);
2198 expr = ss_info->expr;
2200 /* Do bounds-checking here and in gfc_trans_array_ctor_element only if no
2201 typespec was given for the array constructor. */
2202 typespec_chararray_ctor = (expr->ts.u.cl
2203 && expr->ts.u.cl->length_from_typespec);
2205 if ((gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
2206 && expr->ts.type == BT_CHARACTER && !typespec_chararray_ctor)
2208 first_len_val = gfc_create_var (gfc_charlen_type_node, "len");
2212 gcc_assert (ss->dimen == ss->loop->dimen);
2214 c = expr->value.constructor;
2215 if (expr->ts.type == BT_CHARACTER)
2219 /* get_array_ctor_strlen walks the elements of the constructor, if a
2220 typespec was given, we already know the string length and want the one
2222 if (typespec_chararray_ctor && expr->ts.u.cl->length
2223 && expr->ts.u.cl->length->expr_type != EXPR_CONSTANT)
2227 const_string = false;
2228 gfc_init_se (&length_se, NULL);
2229 gfc_conv_expr_type (&length_se, expr->ts.u.cl->length,
2230 gfc_charlen_type_node);
2231 ss_info->string_length = length_se.expr;
2232 gfc_add_block_to_block (&outer_loop->pre, &length_se.pre);
2233 gfc_add_block_to_block (&outer_loop->post, &length_se.post);
2236 const_string = get_array_ctor_strlen (&outer_loop->pre, c,
2237 &ss_info->string_length);
2239 /* Complex character array constructors should have been taken care of
2240 and not end up here. */
2241 gcc_assert (ss_info->string_length);
2243 expr->ts.u.cl->backend_decl = ss_info->string_length;
2245 type = gfc_get_character_type_len (expr->ts.kind, ss_info->string_length);
2247 type = build_pointer_type (type);
2250 type = gfc_typenode_for_spec (&expr->ts);
2252 /* See if the constructor determines the loop bounds. */
2255 loop_ubound0 = get_loop_upper_bound_for_array (ss, 0);
2257 if (expr->shape && get_rank (loop) > 1 && *loop_ubound0 == NULL_TREE)
2259 /* We have a multidimensional parameter. */
2260 for (s = ss; s; s = s->parent)
2263 for (n = 0; n < s->loop->dimen; n++)
2265 s->loop->from[n] = gfc_index_zero_node;
2266 s->loop->to[n] = gfc_conv_mpz_to_tree (expr->shape[s->dim[n]],
2267 gfc_index_integer_kind);
2268 s->loop->to[n] = fold_build2_loc (input_location, MINUS_EXPR,
2269 gfc_array_index_type,
2271 gfc_index_one_node);
2276 if (*loop_ubound0 == NULL_TREE)
2280 /* We should have a 1-dimensional, zero-based loop. */
2281 gcc_assert (loop->parent == NULL && loop->nested == NULL);
2282 gcc_assert (loop->dimen == 1);
2283 gcc_assert (integer_zerop (loop->from[0]));
2285 /* Split the constructor size into a static part and a dynamic part.
2286 Allocate the static size up-front and record whether the dynamic
2287 size might be nonzero. */
2289 dynamic = gfc_get_array_constructor_size (&size, c);
2290 mpz_sub_ui (size, size, 1);
2291 loop->to[0] = gfc_conv_mpz_to_tree (size, gfc_index_integer_kind);
2295 /* Special case constant array constructors. */
2298 unsigned HOST_WIDE_INT nelem = gfc_constant_array_constructor_p (c);
2301 tree size = constant_array_constructor_loop_size (loop);
2302 if (size && compare_tree_int (size, nelem) == 0)
2304 trans_constant_array_constructor (ss, type);
2310 gfc_trans_create_temp_array (&outer_loop->pre, &outer_loop->post, ss, type,
2311 NULL_TREE, dynamic, true, false, where);
2313 desc = ss_info->data.array.descriptor;
2314 offset = gfc_index_zero_node;
2315 offsetvar = gfc_create_var_np (gfc_array_index_type, "offset");
2316 TREE_NO_WARNING (offsetvar) = 1;
2317 TREE_USED (offsetvar) = 0;
2318 gfc_trans_array_constructor_value (&outer_loop->pre, type, desc, c,
2319 &offset, &offsetvar, dynamic);
2321 /* If the array grows dynamically, the upper bound of the loop variable
2322 is determined by the array's final upper bound. */
2325 tmp = fold_build2_loc (input_location, MINUS_EXPR,
2326 gfc_array_index_type,
2327 offsetvar, gfc_index_one_node);
2328 tmp = gfc_evaluate_now (tmp, &outer_loop->pre);
2329 gfc_conv_descriptor_ubound_set (&loop->pre, desc, gfc_rank_cst[0], tmp);
2330 if (*loop_ubound0 && TREE_CODE (*loop_ubound0) == VAR_DECL)
2331 gfc_add_modify (&outer_loop->pre, *loop_ubound0, tmp);
2333 *loop_ubound0 = tmp;
2336 if (TREE_USED (offsetvar))
2337 pushdecl (offsetvar);
2339 gcc_assert (INTEGER_CST_P (offset));
2342 /* Disable bound checking for now because it's probably broken. */
2343 if (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
2350 /* Restore old values of globals. */
2351 first_len = old_first_len;
2352 first_len_val = old_first_len_val;
2353 typespec_chararray_ctor = old_typespec_chararray_ctor;
2357 /* INFO describes a GFC_SS_SECTION in loop LOOP, and this function is
2358 called after evaluating all of INFO's vector dimensions. Go through
2359 each such vector dimension and see if we can now fill in any missing
2363 set_vector_loop_bounds (gfc_ss * ss)
2365 gfc_loopinfo *loop, *outer_loop;
2366 gfc_array_info *info;
2374 outer_loop = outermost_loop (ss->loop);
2376 info = &ss->info->data.array;
2378 for (; ss; ss = ss->parent)
2382 for (n = 0; n < loop->dimen; n++)
2385 if (info->ref->u.ar.dimen_type[dim] != DIMEN_VECTOR
2386 || loop->to[n] != NULL)
2389 /* Loop variable N indexes vector dimension DIM, and we don't
2390 yet know the upper bound of loop variable N. Set it to the
2391 difference between the vector's upper and lower bounds. */
2392 gcc_assert (loop->from[n] == gfc_index_zero_node);
2393 gcc_assert (info->subscript[dim]
2394 && info->subscript[dim]->info->type == GFC_SS_VECTOR);
2396 gfc_init_se (&se, NULL);
2397 desc = info->subscript[dim]->info->data.array.descriptor;
2398 zero = gfc_rank_cst[0];
2399 tmp = fold_build2_loc (input_location, MINUS_EXPR,
2400 gfc_array_index_type,
2401 gfc_conv_descriptor_ubound_get (desc, zero),
2402 gfc_conv_descriptor_lbound_get (desc, zero));
2403 tmp = gfc_evaluate_now (tmp, &outer_loop->pre);
2410 /* Add the pre and post chains for all the scalar expressions in a SS chain
2411 to loop. This is called after the loop parameters have been calculated,
2412 but before the actual scalarizing loops. */
2415 gfc_add_loop_ss_code (gfc_loopinfo * loop, gfc_ss * ss, bool subscript,
2418 gfc_loopinfo *nested_loop, *outer_loop;
2420 gfc_ss_info *ss_info;
2421 gfc_array_info *info;
2425 /* Don't evaluate the arguments for realloc_lhs_loop_for_fcn_call; otherwise,
2426 arguments could get evaluated multiple times. */
2427 if (ss->is_alloc_lhs)
2430 outer_loop = outermost_loop (loop);
2432 /* TODO: This can generate bad code if there are ordering dependencies,
2433 e.g., a callee allocated function and an unknown size constructor. */
2434 gcc_assert (ss != NULL);
2436 for (; ss != gfc_ss_terminator; ss = ss->loop_chain)
2440 /* Cross loop arrays are handled from within the most nested loop. */
2441 if (ss->nested_ss != NULL)
2445 expr = ss_info->expr;
2446 info = &ss_info->data.array;
2448 switch (ss_info->type)
2451 /* Scalar expression. Evaluate this now. This includes elemental
2452 dimension indices, but not array section bounds. */
2453 gfc_init_se (&se, NULL);
2454 gfc_conv_expr (&se, expr);
2455 gfc_add_block_to_block (&outer_loop->pre, &se.pre);
2457 if (expr->ts.type != BT_CHARACTER)
2459 /* Move the evaluation of scalar expressions outside the
2460 scalarization loop, except for WHERE assignments. */
2462 se.expr = convert(gfc_array_index_type, se.expr);
2463 if (!ss_info->where)
2464 se.expr = gfc_evaluate_now (se.expr, &outer_loop->pre);
2465 gfc_add_block_to_block (&outer_loop->pre, &se.post);
2468 gfc_add_block_to_block (&outer_loop->post, &se.post);
2470 ss_info->data.scalar.value = se.expr;
2471 ss_info->string_length = se.string_length;
2474 case GFC_SS_REFERENCE:
2475 /* Scalar argument to elemental procedure. */
2476 gfc_init_se (&se, NULL);
2477 if (ss_info->can_be_null_ref)
2479 /* If the actual argument can be absent (in other words, it can
2480 be a NULL reference), don't try to evaluate it; pass instead
2481 the reference directly. */
2482 gfc_conv_expr_reference (&se, expr);
2486 /* Otherwise, evaluate the argument outside the loop and pass
2487 a reference to the value. */
2488 gfc_conv_expr (&se, expr);
2490 gfc_add_block_to_block (&outer_loop->pre, &se.pre);
2491 gfc_add_block_to_block (&outer_loop->post, &se.post);
2492 if (gfc_is_class_scalar_expr (expr))
2493 /* This is necessary because the dynamic type will always be
2494 large than the declared type. In consequence, assigning
2495 the value to a temporary could segfault.
2496 OOP-TODO: see if this is generally correct or is the value
2497 has to be written to an allocated temporary, whose address
2498 is passed via ss_info. */
2499 ss_info->data.scalar.value = se.expr;
2501 ss_info->data.scalar.value = gfc_evaluate_now (se.expr,
2504 ss_info->string_length = se.string_length;
2507 case GFC_SS_SECTION:
2508 /* Add the expressions for scalar and vector subscripts. */
2509 for (n = 0; n < GFC_MAX_DIMENSIONS; n++)
2510 if (info->subscript[n])
2511 gfc_add_loop_ss_code (loop, info->subscript[n], true, where);
2513 set_vector_loop_bounds (ss);
2517 /* Get the vector's descriptor and store it in SS. */
2518 gfc_init_se (&se, NULL);
2519 gfc_conv_expr_descriptor (&se, expr);
2520 gfc_add_block_to_block (&outer_loop->pre, &se.pre);
2521 gfc_add_block_to_block (&outer_loop->post, &se.post);
2522 info->descriptor = se.expr;
2525 case GFC_SS_INTRINSIC:
2526 gfc_add_intrinsic_ss_code (loop, ss);
2529 case GFC_SS_FUNCTION:
2530 /* Array function return value. We call the function and save its
2531 result in a temporary for use inside the loop. */
2532 gfc_init_se (&se, NULL);
2535 gfc_conv_expr (&se, expr);
2536 gfc_add_block_to_block (&outer_loop->pre, &se.pre);
2537 gfc_add_block_to_block (&outer_loop->post, &se.post);
2538 ss_info->string_length = se.string_length;
2541 case GFC_SS_CONSTRUCTOR:
2542 if (expr->ts.type == BT_CHARACTER
2543 && ss_info->string_length == NULL
2545 && expr->ts.u.cl->length)
2547 gfc_init_se (&se, NULL);
2548 gfc_conv_expr_type (&se, expr->ts.u.cl->length,
2549 gfc_charlen_type_node);
2550 ss_info->string_length = se.expr;
2551 gfc_add_block_to_block (&outer_loop->pre, &se.pre);
2552 gfc_add_block_to_block (&outer_loop->post, &se.post);
2554 trans_array_constructor (ss, where);
2558 case GFC_SS_COMPONENT:
2559 /* Do nothing. These are handled elsewhere. */
2568 for (nested_loop = loop->nested; nested_loop;
2569 nested_loop = nested_loop->next)
2570 gfc_add_loop_ss_code (nested_loop, nested_loop->ss, subscript, where);
2574 /* Translate expressions for the descriptor and data pointer of a SS. */
2578 gfc_conv_ss_descriptor (stmtblock_t * block, gfc_ss * ss, int base)
2581 gfc_ss_info *ss_info;
2582 gfc_array_info *info;
2586 info = &ss_info->data.array;
2588 /* Get the descriptor for the array to be scalarized. */
2589 gcc_assert (ss_info->expr->expr_type == EXPR_VARIABLE);
2590 gfc_init_se (&se, NULL);
2591 se.descriptor_only = 1;
2592 gfc_conv_expr_lhs (&se, ss_info->expr);
2593 gfc_add_block_to_block (block, &se.pre);
2594 info->descriptor = se.expr;
2595 ss_info->string_length = se.string_length;
2599 /* Also the data pointer. */
2600 tmp = gfc_conv_array_data (se.expr);
2601 /* If this is a variable or address of a variable we use it directly.
2602 Otherwise we must evaluate it now to avoid breaking dependency
2603 analysis by pulling the expressions for elemental array indices
2606 || (TREE_CODE (tmp) == ADDR_EXPR
2607 && DECL_P (TREE_OPERAND (tmp, 0)))))
2608 tmp = gfc_evaluate_now (tmp, block);
2611 tmp = gfc_conv_array_offset (se.expr);
2612 info->offset = gfc_evaluate_now (tmp, block);
2614 /* Make absolutely sure that the saved_offset is indeed saved
2615 so that the variable is still accessible after the loops
2617 info->saved_offset = info->offset;
2622 /* Initialize a gfc_loopinfo structure. */
2625 gfc_init_loopinfo (gfc_loopinfo * loop)
2629 memset (loop, 0, sizeof (gfc_loopinfo));
2630 gfc_init_block (&loop->pre);
2631 gfc_init_block (&loop->post);
2633 /* Initially scalarize in order and default to no loop reversal. */
2634 for (n = 0; n < GFC_MAX_DIMENSIONS; n++)
2637 loop->reverse[n] = GFC_INHIBIT_REVERSE;
2640 loop->ss = gfc_ss_terminator;
2644 /* Copies the loop variable info to a gfc_se structure. Does not copy the SS
2648 gfc_copy_loopinfo_to_se (gfc_se * se, gfc_loopinfo * loop)
2654 /* Return an expression for the data pointer of an array. */
2657 gfc_conv_array_data (tree descriptor)
2661 type = TREE_TYPE (descriptor);
2662 if (GFC_ARRAY_TYPE_P (type))
2664 if (TREE_CODE (type) == POINTER_TYPE)
2668 /* Descriptorless arrays. */
2669 return gfc_build_addr_expr (NULL_TREE, descriptor);
2673 return gfc_conv_descriptor_data_get (descriptor);
2677 /* Return an expression for the base offset of an array. */
2680 gfc_conv_array_offset (tree descriptor)
2684 type = TREE_TYPE (descriptor);
2685 if (GFC_ARRAY_TYPE_P (type))
2686 return GFC_TYPE_ARRAY_OFFSET (type);
2688 return gfc_conv_descriptor_offset_get (descriptor);
2692 /* Get an expression for the array stride. */
2695 gfc_conv_array_stride (tree descriptor, int dim)
2700 type = TREE_TYPE (descriptor);
2702 /* For descriptorless arrays use the array size. */
2703 tmp = GFC_TYPE_ARRAY_STRIDE (type, dim);
2704 if (tmp != NULL_TREE)
2707 tmp = gfc_conv_descriptor_stride_get (descriptor, gfc_rank_cst[dim]);
2712 /* Like gfc_conv_array_stride, but for the lower bound. */
2715 gfc_conv_array_lbound (tree descriptor, int dim)
2720 type = TREE_TYPE (descriptor);
2722 tmp = GFC_TYPE_ARRAY_LBOUND (type, dim);
2723 if (tmp != NULL_TREE)
2726 tmp = gfc_conv_descriptor_lbound_get (descriptor, gfc_rank_cst[dim]);
2731 /* Like gfc_conv_array_stride, but for the upper bound. */
2734 gfc_conv_array_ubound (tree descriptor, int dim)
2739 type = TREE_TYPE (descriptor);
2741 tmp = GFC_TYPE_ARRAY_UBOUND (type, dim);
2742 if (tmp != NULL_TREE)
2745 /* This should only ever happen when passing an assumed shape array
2746 as an actual parameter. The value will never be used. */
2747 if (GFC_ARRAY_TYPE_P (TREE_TYPE (descriptor)))
2748 return gfc_index_zero_node;
2750 tmp = gfc_conv_descriptor_ubound_get (descriptor, gfc_rank_cst[dim]);
2755 /* Generate code to perform an array index bound check. */
2758 trans_array_bound_check (gfc_se * se, gfc_ss *ss, tree index, int n,
2759 locus * where, bool check_upper)
2762 tree tmp_lo, tmp_up;
2765 const char * name = NULL;
2767 if (!(gfc_option.rtcheck & GFC_RTCHECK_BOUNDS))
2770 descriptor = ss->info->data.array.descriptor;
2772 index = gfc_evaluate_now (index, &se->pre);
2774 /* We find a name for the error message. */
2775 name = ss->info->expr->symtree->n.sym->name;
2776 gcc_assert (name != NULL);
2778 if (TREE_CODE (descriptor) == VAR_DECL)
2779 name = IDENTIFIER_POINTER (DECL_NAME (descriptor));
2781 /* If upper bound is present, include both bounds in the error message. */
2784 tmp_lo = gfc_conv_array_lbound (descriptor, n);
2785 tmp_up = gfc_conv_array_ubound (descriptor, n);
2788 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
2789 "outside of expected range (%%ld:%%ld)", n+1, name);
2791 asprintf (&msg, "Index '%%ld' of dimension %d "
2792 "outside of expected range (%%ld:%%ld)", n+1);
2794 fault = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
2796 gfc_trans_runtime_check (true, false, fault, &se->pre, where, msg,
2797 fold_convert (long_integer_type_node, index),
2798 fold_convert (long_integer_type_node, tmp_lo),
2799 fold_convert (long_integer_type_node, tmp_up));
2800 fault = fold_build2_loc (input_location, GT_EXPR, boolean_type_node,
2802 gfc_trans_runtime_check (true, false, fault, &se->pre, where, msg,
2803 fold_convert (long_integer_type_node, index),
2804 fold_convert (long_integer_type_node, tmp_lo),
2805 fold_convert (long_integer_type_node, tmp_up));
2810 tmp_lo = gfc_conv_array_lbound (descriptor, n);
2813 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
2814 "below lower bound of %%ld", n+1, name);
2816 asprintf (&msg, "Index '%%ld' of dimension %d "
2817 "below lower bound of %%ld", n+1);
2819 fault = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
2821 gfc_trans_runtime_check (true, false, fault, &se->pre, where, msg,
2822 fold_convert (long_integer_type_node, index),
2823 fold_convert (long_integer_type_node, tmp_lo));
2831 /* Return the offset for an index. Performs bound checking for elemental
2832 dimensions. Single element references are processed separately.
2833 DIM is the array dimension, I is the loop dimension. */
2836 conv_array_index_offset (gfc_se * se, gfc_ss * ss, int dim, int i,
2837 gfc_array_ref * ar, tree stride)
2839 gfc_array_info *info;
2844 info = &ss->info->data.array;
2846 /* Get the index into the array for this dimension. */
2849 gcc_assert (ar->type != AR_ELEMENT);
2850 switch (ar->dimen_type[dim])
2852 case DIMEN_THIS_IMAGE:
2856 /* Elemental dimension. */
2857 gcc_assert (info->subscript[dim]
2858 && info->subscript[dim]->info->type == GFC_SS_SCALAR);
2859 /* We've already translated this value outside the loop. */
2860 index = info->subscript[dim]->info->data.scalar.value;
2862 index = trans_array_bound_check (se, ss, index, dim, &ar->where,
2863 ar->as->type != AS_ASSUMED_SIZE
2864 || dim < ar->dimen - 1);
2868 gcc_assert (info && se->loop);
2869 gcc_assert (info->subscript[dim]
2870 && info->subscript[dim]->info->type == GFC_SS_VECTOR);
2871 desc = info->subscript[dim]->info->data.array.descriptor;
2873 /* Get a zero-based index into the vector. */
2874 index = fold_build2_loc (input_location, MINUS_EXPR,
2875 gfc_array_index_type,
2876 se->loop->loopvar[i], se->loop->from[i]);
2878 /* Multiply the index by the stride. */
2879 index = fold_build2_loc (input_location, MULT_EXPR,
2880 gfc_array_index_type,
2881 index, gfc_conv_array_stride (desc, 0));
2883 /* Read the vector to get an index into info->descriptor. */
2884 data = build_fold_indirect_ref_loc (input_location,
2885 gfc_conv_array_data (desc));
2886 index = gfc_build_array_ref (data, index, NULL);
2887 index = gfc_evaluate_now (index, &se->pre);
2888 index = fold_convert (gfc_array_index_type, index);
2890 /* Do any bounds checking on the final info->descriptor index. */
2891 index = trans_array_bound_check (se, ss, index, dim, &ar->where,
2892 ar->as->type != AS_ASSUMED_SIZE
2893 || dim < ar->dimen - 1);
2897 /* Scalarized dimension. */
2898 gcc_assert (info && se->loop);
2900 /* Multiply the loop variable by the stride and delta. */
2901 index = se->loop->loopvar[i];
2902 if (!integer_onep (info->stride[dim]))
2903 index = fold_build2_loc (input_location, MULT_EXPR,
2904 gfc_array_index_type, index,
2906 if (!integer_zerop (info->delta[dim]))
2907 index = fold_build2_loc (input_location, PLUS_EXPR,
2908 gfc_array_index_type, index,
2918 /* Temporary array or derived type component. */
2919 gcc_assert (se->loop);
2920 index = se->loop->loopvar[se->loop->order[i]];
2922 /* Pointer functions can have stride[0] different from unity.
2923 Use the stride returned by the function call and stored in
2924 the descriptor for the temporary. */
2925 if (se->ss && se->ss->info->type == GFC_SS_FUNCTION
2926 && se->ss->info->expr
2927 && se->ss->info->expr->symtree
2928 && se->ss->info->expr->symtree->n.sym->result
2929 && se->ss->info->expr->symtree->n.sym->result->attr.pointer)
2930 stride = gfc_conv_descriptor_stride_get (info->descriptor,
2933 if (!integer_zerop (info->delta[dim]))
2934 index = fold_build2_loc (input_location, PLUS_EXPR,
2935 gfc_array_index_type, index, info->delta[dim]);
2938 /* Multiply by the stride. */
2939 if (!integer_onep (stride))
2940 index = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
2947 /* Build a scalarized array reference using the vptr 'size'. */
2950 build_class_array_ref (gfc_se *se, tree base, tree index)
2957 gfc_expr *expr = se->ss->info->expr;
2962 if (expr == NULL || expr->ts.type != BT_CLASS)
2965 if (expr->symtree && expr->symtree->n.sym->ts.type == BT_CLASS)
2966 ts = &expr->symtree->n.sym->ts;
2971 for (ref = expr->ref; ref; ref = ref->next)
2973 if (ref->type == REF_COMPONENT
2974 && ref->u.c.component->ts.type == BT_CLASS
2975 && ref->next && ref->next->type == REF_COMPONENT
2976 && strcmp (ref->next->u.c.component->name, "_data") == 0
2978 && ref->next->next->type == REF_ARRAY
2979 && ref->next->next->u.ar.type != AR_ELEMENT)
2981 ts = &ref->u.c.component->ts;
2990 if (class_ref == NULL)
2991 decl = expr->symtree->n.sym->backend_decl;
2994 /* Remove everything after the last class reference, convert the
2995 expression and then recover its tailend once more. */
2997 ref = class_ref->next;
2998 class_ref->next = NULL;
2999 gfc_init_se (&tmpse, NULL);
3000 gfc_conv_expr (&tmpse, expr);
3002 class_ref->next = ref;
3005 size = gfc_vtable_size_get (decl);
3007 /* Build the address of the element. */
3008 type = TREE_TYPE (TREE_TYPE (base));
3009 size = fold_convert (TREE_TYPE (index), size);
3010 offset = fold_build2_loc (input_location, MULT_EXPR,
3011 gfc_array_index_type,
3013 tmp = gfc_build_addr_expr (pvoid_type_node, base);
3014 tmp = fold_build_pointer_plus_loc (input_location, tmp, offset);
3015 tmp = fold_convert (build_pointer_type (type), tmp);
3017 /* Return the element in the se expression. */
3018 se->expr = build_fold_indirect_ref_loc (input_location, tmp);
3023 /* Build a scalarized reference to an array. */
3026 gfc_conv_scalarized_array_ref (gfc_se * se, gfc_array_ref * ar)
3028 gfc_array_info *info;
3029 tree decl = NULL_TREE;
3037 expr = ss->info->expr;
3038 info = &ss->info->data.array;
3040 n = se->loop->order[0];
3044 index = conv_array_index_offset (se, ss, ss->dim[n], n, ar, info->stride0);
3045 /* Add the offset for this dimension to the stored offset for all other
3047 if (!integer_zerop (info->offset))
3048 index = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3049 index, info->offset);
3051 if (expr && is_subref_array (expr))
3052 decl = expr->symtree->n.sym->backend_decl;
3054 tmp = build_fold_indirect_ref_loc (input_location, info->data);
3056 /* Use the vptr 'size' field to access a class the element of a class
3058 if (build_class_array_ref (se, tmp, index))
3061 se->expr = gfc_build_array_ref (tmp, index, decl);
3065 /* Translate access of temporary array. */
3068 gfc_conv_tmp_array_ref (gfc_se * se)
3070 se->string_length = se->ss->info->string_length;
3071 gfc_conv_scalarized_array_ref (se, NULL);
3072 gfc_advance_se_ss_chain (se);
3075 /* Add T to the offset pair *OFFSET, *CST_OFFSET. */
3078 add_to_offset (tree *cst_offset, tree *offset, tree t)
3080 if (TREE_CODE (t) == INTEGER_CST)
3081 *cst_offset = int_const_binop (PLUS_EXPR, *cst_offset, t);
3084 if (!integer_zerop (*offset))
3085 *offset = fold_build2_loc (input_location, PLUS_EXPR,
3086 gfc_array_index_type, *offset, t);
3094 build_array_ref (tree desc, tree offset, tree decl)
3099 /* Class container types do not always have the GFC_CLASS_TYPE_P
3100 but the canonical type does. */
3101 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (desc))
3102 && TREE_CODE (desc) == COMPONENT_REF)
3104 type = TREE_TYPE (TREE_OPERAND (desc, 0));
3105 if (TYPE_CANONICAL (type)
3106 && GFC_CLASS_TYPE_P (TYPE_CANONICAL (type)))
3107 type = TYPE_CANONICAL (type);
3112 /* Class array references need special treatment because the assigned
3113 type size needs to be used to point to the element. */
3114 if (type && GFC_CLASS_TYPE_P (type))
3116 type = gfc_get_element_type (TREE_TYPE (desc));
3117 tmp = TREE_OPERAND (desc, 0);
3118 tmp = gfc_get_class_array_ref (offset, tmp);
3119 tmp = fold_convert (build_pointer_type (type), tmp);
3120 tmp = build_fold_indirect_ref_loc (input_location, tmp);
3124 tmp = gfc_conv_array_data (desc);
3125 tmp = build_fold_indirect_ref_loc (input_location, tmp);
3126 tmp = gfc_build_array_ref (tmp, offset, decl);
3131 /* Build an array reference. se->expr already holds the array descriptor.
3132 This should be either a variable, indirect variable reference or component
3133 reference. For arrays which do not have a descriptor, se->expr will be
3135 a(i, j, k) = base[offset + i * stride[0] + j * stride[1] + k * stride[2]]*/
3138 gfc_conv_array_ref (gfc_se * se, gfc_array_ref * ar, gfc_symbol * sym,
3142 tree offset, cst_offset;
3150 gcc_assert (ar->codimen);
3152 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (se->expr)))
3153 se->expr = build_fold_indirect_ref (gfc_conv_array_data (se->expr));
3156 if (GFC_ARRAY_TYPE_P (TREE_TYPE (se->expr))
3157 && TREE_CODE (TREE_TYPE (se->expr)) == POINTER_TYPE)
3158 se->expr = build_fold_indirect_ref_loc (input_location, se->expr);
3160 /* Use the actual tree type and not the wrapped coarray. */
3161 if (!se->want_pointer)
3162 se->expr = fold_convert (TYPE_MAIN_VARIANT (TREE_TYPE (se->expr)),
3169 /* Handle scalarized references separately. */
3170 if (ar->type != AR_ELEMENT)
3172 gfc_conv_scalarized_array_ref (se, ar);
3173 gfc_advance_se_ss_chain (se);
3177 cst_offset = offset = gfc_index_zero_node;
3178 add_to_offset (&cst_offset, &offset, gfc_conv_array_offset (se->expr));
3180 /* Calculate the offsets from all the dimensions. Make sure to associate
3181 the final offset so that we form a chain of loop invariant summands. */
3182 for (n = ar->dimen - 1; n >= 0; n--)
3184 /* Calculate the index for this dimension. */
3185 gfc_init_se (&indexse, se);
3186 gfc_conv_expr_type (&indexse, ar->start[n], gfc_array_index_type);
3187 gfc_add_block_to_block (&se->pre, &indexse.pre);
3189 if (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
3191 /* Check array bounds. */
3195 /* Evaluate the indexse.expr only once. */
3196 indexse.expr = save_expr (indexse.expr);
3199 tmp = gfc_conv_array_lbound (se->expr, n);
3200 if (sym->attr.temporary)
3202 gfc_init_se (&tmpse, se);
3203 gfc_conv_expr_type (&tmpse, ar->as->lower[n],
3204 gfc_array_index_type);
3205 gfc_add_block_to_block (&se->pre, &tmpse.pre);
3209 cond = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
3211 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
3212 "below lower bound of %%ld", n+1, sym->name);
3213 gfc_trans_runtime_check (true, false, cond, &se->pre, where, msg,
3214 fold_convert (long_integer_type_node,
3216 fold_convert (long_integer_type_node, tmp));
3219 /* Upper bound, but not for the last dimension of assumed-size
3221 if (n < ar->dimen - 1 || ar->as->type != AS_ASSUMED_SIZE)
3223 tmp = gfc_conv_array_ubound (se->expr, n);
3224 if (sym->attr.temporary)
3226 gfc_init_se (&tmpse, se);
3227 gfc_conv_expr_type (&tmpse, ar->as->upper[n],
3228 gfc_array_index_type);
3229 gfc_add_block_to_block (&se->pre, &tmpse.pre);
3233 cond = fold_build2_loc (input_location, GT_EXPR,
3234 boolean_type_node, indexse.expr, tmp);
3235 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
3236 "above upper bound of %%ld", n+1, sym->name);
3237 gfc_trans_runtime_check (true, false, cond, &se->pre, where, msg,
3238 fold_convert (long_integer_type_node,
3240 fold_convert (long_integer_type_node, tmp));
3245 /* Multiply the index by the stride. */
3246 stride = gfc_conv_array_stride (se->expr, n);
3247 tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
3248 indexse.expr, stride);
3250 /* And add it to the total. */
3251 add_to_offset (&cst_offset, &offset, tmp);
3254 if (!integer_zerop (cst_offset))
3255 offset = fold_build2_loc (input_location, PLUS_EXPR,
3256 gfc_array_index_type, offset, cst_offset);
3258 se->expr = build_array_ref (se->expr, offset, sym->backend_decl);
3262 /* Add the offset corresponding to array's ARRAY_DIM dimension and loop's
3263 LOOP_DIM dimension (if any) to array's offset. */
3266 add_array_offset (stmtblock_t *pblock, gfc_loopinfo *loop, gfc_ss *ss,
3267 gfc_array_ref *ar, int array_dim, int loop_dim)
3270 gfc_array_info *info;
3273 info = &ss->info->data.array;
3275 gfc_init_se (&se, NULL);
3277 se.expr = info->descriptor;
3278 stride = gfc_conv_array_stride (info->descriptor, array_dim);
3279 index = conv_array_index_offset (&se, ss, array_dim, loop_dim, ar, stride);
3280 gfc_add_block_to_block (pblock, &se.pre);
3282 info->offset = fold_build2_loc (input_location, PLUS_EXPR,
3283 gfc_array_index_type,
3284 info->offset, index);
3285 info->offset = gfc_evaluate_now (info->offset, pblock);
3289 /* Generate the code to be executed immediately before entering a
3290 scalarization loop. */
3293 gfc_trans_preloop_setup (gfc_loopinfo * loop, int dim, int flag,
3294 stmtblock_t * pblock)
3297 gfc_ss_info *ss_info;
3298 gfc_array_info *info;
3299 gfc_ss_type ss_type;
3301 gfc_loopinfo *ploop;
3305 /* This code will be executed before entering the scalarization loop
3306 for this dimension. */
3307 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3311 if ((ss_info->useflags & flag) == 0)
3314 ss_type = ss_info->type;
3315 if (ss_type != GFC_SS_SECTION
3316 && ss_type != GFC_SS_FUNCTION
3317 && ss_type != GFC_SS_CONSTRUCTOR
3318 && ss_type != GFC_SS_COMPONENT)
3321 info = &ss_info->data.array;
3323 gcc_assert (dim < ss->dimen);
3324 gcc_assert (ss->dimen == loop->dimen);
3327 ar = &info->ref->u.ar;
3331 if (dim == loop->dimen - 1 && loop->parent != NULL)
3333 /* If we are in the outermost dimension of this loop, the previous
3334 dimension shall be in the parent loop. */
3335 gcc_assert (ss->parent != NULL);
3338 ploop = loop->parent;
3340 /* ss and ss->parent are about the same array. */
3341 gcc_assert (ss_info == pss->info);
3349 if (dim == loop->dimen - 1)
3354 /* For the time being, there is no loop reordering. */
3355 gcc_assert (i == ploop->order[i]);
3356 i = ploop->order[i];
3358 if (dim == loop->dimen - 1 && loop->parent == NULL)
3360 stride = gfc_conv_array_stride (info->descriptor,
3361 innermost_ss (ss)->dim[i]);
3363 /* Calculate the stride of the innermost loop. Hopefully this will
3364 allow the backend optimizers to do their stuff more effectively.
3366 info->stride0 = gfc_evaluate_now (stride, pblock);
3368 /* For the outermost loop calculate the offset due to any
3369 elemental dimensions. It will have been initialized with the
3370 base offset of the array. */
3373 for (i = 0; i < ar->dimen; i++)
3375 if (ar->dimen_type[i] != DIMEN_ELEMENT)
3378 add_array_offset (pblock, loop, ss, ar, i, /* unused */ -1);
3383 /* Add the offset for the previous loop dimension. */
3384 add_array_offset (pblock, ploop, ss, ar, pss->dim[i], i);
3386 /* Remember this offset for the second loop. */
3387 if (dim == loop->temp_dim - 1 && loop->parent == NULL)
3388 info->saved_offset = info->offset;
3393 /* Start a scalarized expression. Creates a scope and declares loop
3397 gfc_start_scalarized_body (gfc_loopinfo * loop, stmtblock_t * pbody)
3403 gcc_assert (!loop->array_parameter);
3405 for (dim = loop->dimen - 1; dim >= 0; dim--)
3407 n = loop->order[dim];
3409 gfc_start_block (&loop->code[n]);
3411 /* Create the loop variable. */
3412 loop->loopvar[n] = gfc_create_var (gfc_array_index_type, "S");
3414 if (dim < loop->temp_dim)
3418 /* Calculate values that will be constant within this loop. */
3419 gfc_trans_preloop_setup (loop, dim, flags, &loop->code[n]);
3421 gfc_start_block (pbody);
3425 /* Generates the actual loop code for a scalarization loop. */
3428 gfc_trans_scalarized_loop_end (gfc_loopinfo * loop, int n,
3429 stmtblock_t * pbody)
3440 if ((ompws_flags & (OMPWS_WORKSHARE_FLAG | OMPWS_SCALARIZER_WS))
3441 == (OMPWS_WORKSHARE_FLAG | OMPWS_SCALARIZER_WS)
3442 && n == loop->dimen - 1)
3444 /* We create an OMP_FOR construct for the outermost scalarized loop. */
3445 init = make_tree_vec (1);
3446 cond = make_tree_vec (1);
3447 incr = make_tree_vec (1);
3449 /* Cycle statement is implemented with a goto. Exit statement must not
3450 be present for this loop. */
3451 exit_label = gfc_build_label_decl (NULL_TREE);
3452 TREE_USED (exit_label) = 1;
3454 /* Label for cycle statements (if needed). */
3455 tmp = build1_v (LABEL_EXPR, exit_label);
3456 gfc_add_expr_to_block (pbody, tmp);
3458 stmt = make_node (OMP_FOR);
3460 TREE_TYPE (stmt) = void_type_node;
3461 OMP_FOR_BODY (stmt) = loopbody = gfc_finish_block (pbody);
3463 OMP_FOR_CLAUSES (stmt) = build_omp_clause (input_location,
3464 OMP_CLAUSE_SCHEDULE);
3465 OMP_CLAUSE_SCHEDULE_KIND (OMP_FOR_CLAUSES (stmt))
3466 = OMP_CLAUSE_SCHEDULE_STATIC;
3467 if (ompws_flags & OMPWS_NOWAIT)
3468 OMP_CLAUSE_CHAIN (OMP_FOR_CLAUSES (stmt))
3469 = build_omp_clause (input_location, OMP_CLAUSE_NOWAIT);
3471 /* Initialize the loopvar. */
3472 TREE_VEC_ELT (init, 0) = build2_v (MODIFY_EXPR, loop->loopvar[n],
3474 OMP_FOR_INIT (stmt) = init;
3475 /* The exit condition. */
3476 TREE_VEC_ELT (cond, 0) = build2_loc (input_location, LE_EXPR,
3478 loop->loopvar[n], loop->to[n]);
3479 SET_EXPR_LOCATION (TREE_VEC_ELT (cond, 0), input_location);
3480 OMP_FOR_COND (stmt) = cond;
3481 /* Increment the loopvar. */
3482 tmp = build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3483 loop->loopvar[n], gfc_index_one_node);
3484 TREE_VEC_ELT (incr, 0) = fold_build2_loc (input_location, MODIFY_EXPR,
3485 void_type_node, loop->loopvar[n], tmp);
3486 OMP_FOR_INCR (stmt) = incr;
3488 ompws_flags &= ~OMPWS_CURR_SINGLEUNIT;
3489 gfc_add_expr_to_block (&loop->code[n], stmt);
3493 bool reverse_loop = (loop->reverse[n] == GFC_REVERSE_SET)
3494 && (loop->temp_ss == NULL);
3496 loopbody = gfc_finish_block (pbody);
3500 tmp = loop->from[n];
3501 loop->from[n] = loop->to[n];
3505 /* Initialize the loopvar. */
3506 if (loop->loopvar[n] != loop->from[n])
3507 gfc_add_modify (&loop->code[n], loop->loopvar[n], loop->from[n]);
3509 exit_label = gfc_build_label_decl (NULL_TREE);
3511 /* Generate the loop body. */
3512 gfc_init_block (&block);
3514 /* The exit condition. */
3515 cond = fold_build2_loc (input_location, reverse_loop ? LT_EXPR : GT_EXPR,
3516 boolean_type_node, loop->loopvar[n], loop->to[n]);
3517 tmp = build1_v (GOTO_EXPR, exit_label);
3518 TREE_USED (exit_label) = 1;
3519 tmp = build3_v (COND_EXPR, cond, tmp, build_empty_stmt (input_location));
3520 gfc_add_expr_to_block (&block, tmp);
3522 /* The main body. */
3523 gfc_add_expr_to_block (&block, loopbody);
3525 /* Increment the loopvar. */
3526 tmp = fold_build2_loc (input_location,
3527 reverse_loop ? MINUS_EXPR : PLUS_EXPR,
3528 gfc_array_index_type, loop->loopvar[n],
3529 gfc_index_one_node);
3531 gfc_add_modify (&block, loop->loopvar[n], tmp);
3533 /* Build the loop. */
3534 tmp = gfc_finish_block (&block);
3535 tmp = build1_v (LOOP_EXPR, tmp);
3536 gfc_add_expr_to_block (&loop->code[n], tmp);
3538 /* Add the exit label. */
3539 tmp = build1_v (LABEL_EXPR, exit_label);
3540 gfc_add_expr_to_block (&loop->code[n], tmp);
3546 /* Finishes and generates the loops for a scalarized expression. */
3549 gfc_trans_scalarizing_loops (gfc_loopinfo * loop, stmtblock_t * body)
3554 stmtblock_t *pblock;
3558 /* Generate the loops. */
3559 for (dim = 0; dim < loop->dimen; dim++)
3561 n = loop->order[dim];
3562 gfc_trans_scalarized_loop_end (loop, n, pblock);
3563 loop->loopvar[n] = NULL_TREE;
3564 pblock = &loop->code[n];
3567 tmp = gfc_finish_block (pblock);
3568 gfc_add_expr_to_block (&loop->pre, tmp);
3570 /* Clear all the used flags. */
3571 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3572 if (ss->parent == NULL)
3573 ss->info->useflags = 0;
3577 /* Finish the main body of a scalarized expression, and start the secondary
3581 gfc_trans_scalarized_loop_boundary (gfc_loopinfo * loop, stmtblock_t * body)
3585 stmtblock_t *pblock;
3589 /* We finish as many loops as are used by the temporary. */
3590 for (dim = 0; dim < loop->temp_dim - 1; dim++)
3592 n = loop->order[dim];
3593 gfc_trans_scalarized_loop_end (loop, n, pblock);
3594 loop->loopvar[n] = NULL_TREE;
3595 pblock = &loop->code[n];
3598 /* We don't want to finish the outermost loop entirely. */
3599 n = loop->order[loop->temp_dim - 1];
3600 gfc_trans_scalarized_loop_end (loop, n, pblock);
3602 /* Restore the initial offsets. */
3603 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3605 gfc_ss_type ss_type;
3606 gfc_ss_info *ss_info;
3610 if ((ss_info->useflags & 2) == 0)
3613 ss_type = ss_info->type;
3614 if (ss_type != GFC_SS_SECTION
3615 && ss_type != GFC_SS_FUNCTION
3616 && ss_type != GFC_SS_CONSTRUCTOR
3617 && ss_type != GFC_SS_COMPONENT)
3620 ss_info->data.array.offset = ss_info->data.array.saved_offset;
3623 /* Restart all the inner loops we just finished. */
3624 for (dim = loop->temp_dim - 2; dim >= 0; dim--)
3626 n = loop->order[dim];
3628 gfc_start_block (&loop->code[n]);
3630 loop->loopvar[n] = gfc_create_var (gfc_array_index_type, "Q");
3632 gfc_trans_preloop_setup (loop, dim, 2, &loop->code[n]);
3635 /* Start a block for the secondary copying code. */
3636 gfc_start_block (body);
3640 /* Precalculate (either lower or upper) bound of an array section.
3641 BLOCK: Block in which the (pre)calculation code will go.
3642 BOUNDS[DIM]: Where the bound value will be stored once evaluated.
3643 VALUES[DIM]: Specified bound (NULL <=> unspecified).
3644 DESC: Array descriptor from which the bound will be picked if unspecified
3645 (either lower or upper bound according to LBOUND). */
3648 evaluate_bound (stmtblock_t *block, tree *bounds, gfc_expr ** values,
3649 tree desc, int dim, bool lbound)
3652 gfc_expr * input_val = values[dim];
3653 tree *output = &bounds[dim];
3658 /* Specified section bound. */
3659 gfc_init_se (&se, NULL);
3660 gfc_conv_expr_type (&se, input_val, gfc_array_index_type);
3661 gfc_add_block_to_block (block, &se.pre);
3666 /* No specific bound specified so use the bound of the array. */
3667 *output = lbound ? gfc_conv_array_lbound (desc, dim) :
3668 gfc_conv_array_ubound (desc, dim);
3670 *output = gfc_evaluate_now (*output, block);
3674 /* Calculate the lower bound of an array section. */
3677 gfc_conv_section_startstride (stmtblock_t * block, gfc_ss * ss, int dim)
3679 gfc_expr *stride = NULL;
3682 gfc_array_info *info;
3685 gcc_assert (ss->info->type == GFC_SS_SECTION);
3687 info = &ss->info->data.array;
3688 ar = &info->ref->u.ar;
3690 if (ar->dimen_type[dim] == DIMEN_VECTOR)
3692 /* We use a zero-based index to access the vector. */
3693 info->start[dim] = gfc_index_zero_node;
3694 info->end[dim] = NULL;
3695 info->stride[dim] = gfc_index_one_node;
3699 gcc_assert (ar->dimen_type[dim] == DIMEN_RANGE
3700 || ar->dimen_type[dim] == DIMEN_THIS_IMAGE);
3701 desc = info->descriptor;
3702 stride = ar->stride[dim];
3704 /* Calculate the start of the range. For vector subscripts this will
3705 be the range of the vector. */
3706 evaluate_bound (block, info->start, ar->start, desc, dim, true);
3708 /* Similarly calculate the end. Although this is not used in the
3709 scalarizer, it is needed when checking bounds and where the end
3710 is an expression with side-effects. */
3711 evaluate_bound (block, info->end, ar->end, desc, dim, false);
3713 /* Calculate the stride. */
3715 info->stride[dim] = gfc_index_one_node;
3718 gfc_init_se (&se, NULL);
3719 gfc_conv_expr_type (&se, stride, gfc_array_index_type);
3720 gfc_add_block_to_block (block, &se.pre);
3721 info->stride[dim] = gfc_evaluate_now (se.expr, block);
3726 /* Calculates the range start and stride for a SS chain. Also gets the
3727 descriptor and data pointer. The range of vector subscripts is the size
3728 of the vector. Array bounds are also checked. */
3731 gfc_conv_ss_startstride (gfc_loopinfo * loop)
3738 gfc_loopinfo * const outer_loop = outermost_loop (loop);
3741 /* Determine the rank of the loop. */
3742 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3744 switch (ss->info->type)
3746 case GFC_SS_SECTION:
3747 case GFC_SS_CONSTRUCTOR:
3748 case GFC_SS_FUNCTION:
3749 case GFC_SS_COMPONENT:
3750 loop->dimen = ss->dimen;
3753 /* As usual, lbound and ubound are exceptions!. */
3754 case GFC_SS_INTRINSIC:
3755 switch (ss->info->expr->value.function.isym->id)
3757 case GFC_ISYM_LBOUND:
3758 case GFC_ISYM_UBOUND:
3759 case GFC_ISYM_LCOBOUND:
3760 case GFC_ISYM_UCOBOUND:
3761 case GFC_ISYM_THIS_IMAGE:
3762 loop->dimen = ss->dimen;
3774 /* We should have determined the rank of the expression by now. If
3775 not, that's bad news. */
3779 /* Loop over all the SS in the chain. */
3780 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3782 gfc_ss_info *ss_info;
3783 gfc_array_info *info;
3787 expr = ss_info->expr;
3788 info = &ss_info->data.array;
3790 if (expr && expr->shape && !info->shape)
3791 info->shape = expr->shape;
3793 switch (ss_info->type)
3795 case GFC_SS_SECTION:
3796 /* Get the descriptor for the array. If it is a cross loops array,
3797 we got the descriptor already in the outermost loop. */
3798 if (ss->parent == NULL)
3799 gfc_conv_ss_descriptor (&outer_loop->pre, ss,
3800 !loop->array_parameter);
3802 for (n = 0; n < ss->dimen; n++)
3803 gfc_conv_section_startstride (&outer_loop->pre, ss, ss->dim[n]);
3806 case GFC_SS_INTRINSIC:
3807 switch (expr->value.function.isym->id)
3809 /* Fall through to supply start and stride. */
3810 case GFC_ISYM_LBOUND:
3811 case GFC_ISYM_UBOUND:
3815 /* This is the variant without DIM=... */
3816 gcc_assert (expr->value.function.actual->next->expr == NULL);
3818 arg = expr->value.function.actual->expr;
3819 if (arg->rank == -1)
3824 /* The rank (hence the return value's shape) is unknown,
3825 we have to retrieve it. */
3826 gfc_init_se (&se, NULL);
3827 se.descriptor_only = 1;
3828 gfc_conv_expr (&se, arg);
3829 /* This is a bare variable, so there is no preliminary
3831 gcc_assert (se.pre.head == NULL_TREE
3832 && se.post.head == NULL_TREE);
3833 rank = gfc_conv_descriptor_rank (se.expr);
3834 tmp = fold_build2_loc (input_location, MINUS_EXPR,
3835 gfc_array_index_type,
3836 fold_convert (gfc_array_index_type,
3838 gfc_index_one_node);
3839 info->end[0] = gfc_evaluate_now (tmp, &outer_loop->pre);
3840 info->start[0] = gfc_index_zero_node;
3841 info->stride[0] = gfc_index_one_node;
3844 /* Otherwise fall through GFC_SS_FUNCTION. */
3846 case GFC_ISYM_LCOBOUND:
3847 case GFC_ISYM_UCOBOUND:
3848 case GFC_ISYM_THIS_IMAGE:
3855 case GFC_SS_CONSTRUCTOR:
3856 case GFC_SS_FUNCTION:
3857 for (n = 0; n < ss->dimen; n++)
3859 int dim = ss->dim[n];
3861 info->start[dim] = gfc_index_zero_node;
3862 info->end[dim] = gfc_index_zero_node;
3863 info->stride[dim] = gfc_index_one_node;
3872 /* The rest is just runtime bound checking. */
3873 if (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
3876 tree lbound, ubound;
3878 tree size[GFC_MAX_DIMENSIONS];
3879 tree stride_pos, stride_neg, non_zerosized, tmp2, tmp3;
3880 gfc_array_info *info;
3884 gfc_start_block (&block);
3886 for (n = 0; n < loop->dimen; n++)
3887 size[n] = NULL_TREE;
3889 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3892 gfc_ss_info *ss_info;
3895 const char *expr_name;
3898 if (ss_info->type != GFC_SS_SECTION)
3901 /* Catch allocatable lhs in f2003. */
3902 if (gfc_option.flag_realloc_lhs && ss->is_alloc_lhs)
3905 expr = ss_info->expr;
3906 expr_loc = &expr->where;
3907 expr_name = expr->symtree->name;
3909 gfc_start_block (&inner);
3911 /* TODO: range checking for mapped dimensions. */
3912 info = &ss_info->data.array;
3914 /* This code only checks ranges. Elemental and vector
3915 dimensions are checked later. */
3916 for (n = 0; n < loop->dimen; n++)
3921 if (info->ref->u.ar.dimen_type[dim] != DIMEN_RANGE)
3924 if (dim == info->ref->u.ar.dimen - 1
3925 && info->ref->u.ar.as->type == AS_ASSUMED_SIZE)
3926 check_upper = false;
3930 /* Zero stride is not allowed. */
3931 tmp = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
3932 info->stride[dim], gfc_index_zero_node);
3933 asprintf (&msg, "Zero stride is not allowed, for dimension %d "
3934 "of array '%s'", dim + 1, expr_name);
3935 gfc_trans_runtime_check (true, false, tmp, &inner,
3939 desc = info->descriptor;
3941 /* This is the run-time equivalent of resolve.c's
3942 check_dimension(). The logical is more readable there
3943 than it is here, with all the trees. */
3944 lbound = gfc_conv_array_lbound (desc, dim);
3945 end = info->end[dim];
3947 ubound = gfc_conv_array_ubound (desc, dim);
3951 /* non_zerosized is true when the selected range is not
3953 stride_pos = fold_build2_loc (input_location, GT_EXPR,
3954 boolean_type_node, info->stride[dim],
3955 gfc_index_zero_node);
3956 tmp = fold_build2_loc (input_location, LE_EXPR, boolean_type_node,
3957 info->start[dim], end);
3958 stride_pos = fold_build2_loc (input_location, TRUTH_AND_EXPR,
3959 boolean_type_node, stride_pos, tmp);
3961 stride_neg = fold_build2_loc (input_location, LT_EXPR,
3963 info->stride[dim], gfc_index_zero_node);
3964 tmp = fold_build2_loc (input_location, GE_EXPR, boolean_type_node,
3965 info->start[dim], end);
3966 stride_neg = fold_build2_loc (input_location, TRUTH_AND_EXPR,
3969 non_zerosized = fold_build2_loc (input_location, TRUTH_OR_EXPR,
3971 stride_pos, stride_neg);
3973 /* Check the start of the range against the lower and upper
3974 bounds of the array, if the range is not empty.
3975 If upper bound is present, include both bounds in the
3979 tmp = fold_build2_loc (input_location, LT_EXPR,
3981 info->start[dim], lbound);
3982 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR,
3984 non_zerosized, tmp);
3985 tmp2 = fold_build2_loc (input_location, GT_EXPR,
3987 info->start[dim], ubound);
3988 tmp2 = fold_build2_loc (input_location, TRUTH_AND_EXPR,
3990 non_zerosized, tmp2);
3991 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
3992 "outside of expected range (%%ld:%%ld)",
3993 dim + 1, expr_name);
3994 gfc_trans_runtime_check (true, false, tmp, &inner,
3996 fold_convert (long_integer_type_node, info->start[dim]),
3997 fold_convert (long_integer_type_node, lbound),
3998 fold_convert (long_integer_type_node, ubound));
3999 gfc_trans_runtime_check (true, false, tmp2, &inner,
4001 fold_convert (long_integer_type_node, info->start[dim]),
4002 fold_convert (long_integer_type_node, lbound),
4003 fold_convert (long_integer_type_node, ubound));
4008 tmp = fold_build2_loc (input_location, LT_EXPR,
4010 info->start[dim], lbound);
4011 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR,
4012 boolean_type_node, non_zerosized, tmp);
4013 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
4014 "below lower bound of %%ld",
4015 dim + 1, expr_name);
4016 gfc_trans_runtime_check (true, false, tmp, &inner,
4018 fold_convert (long_integer_type_node, info->start[dim]),
4019 fold_convert (long_integer_type_node, lbound));
4023 /* Compute the last element of the range, which is not
4024 necessarily "end" (think 0:5:3, which doesn't contain 5)
4025 and check it against both lower and upper bounds. */
4027 tmp = fold_build2_loc (input_location, MINUS_EXPR,
4028 gfc_array_index_type, end,
4030 tmp = fold_build2_loc (input_location, TRUNC_MOD_EXPR,
4031 gfc_array_index_type, tmp,
4033 tmp = fold_build2_loc (input_location, MINUS_EXPR,
4034 gfc_array_index_type, end, tmp);
4035 tmp2 = fold_build2_loc (input_location, LT_EXPR,
4036 boolean_type_node, tmp, lbound);
4037 tmp2 = fold_build2_loc (input_location, TRUTH_AND_EXPR,
4038 boolean_type_node, non_zerosized, tmp2);
4041 tmp3 = fold_build2_loc (input_location, GT_EXPR,
4042 boolean_type_node, tmp, ubound);
4043 tmp3 = fold_build2_loc (input_location, TRUTH_AND_EXPR,
4044 boolean_type_node, non_zerosized, tmp3);
4045 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
4046 "outside of expected range (%%ld:%%ld)",
4047 dim + 1, expr_name);
4048 gfc_trans_runtime_check (true, false, tmp2, &inner,
4050 fold_convert (long_integer_type_node, tmp),
4051 fold_convert (long_integer_type_node, ubound),
4052 fold_convert (long_integer_type_node, lbound));
4053 gfc_trans_runtime_check (true, false, tmp3, &inner,
4055 fold_convert (long_integer_type_node, tmp),
4056 fold_convert (long_integer_type_node, ubound),
4057 fold_convert (long_integer_type_node, lbound));
4062 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
4063 "below lower bound of %%ld",
4064 dim + 1, expr_name);
4065 gfc_trans_runtime_check (true, false, tmp2, &inner,
4067 fold_convert (long_integer_type_node, tmp),
4068 fold_convert (long_integer_type_node, lbound));
4072 /* Check the section sizes match. */
4073 tmp = fold_build2_loc (input_location, MINUS_EXPR,
4074 gfc_array_index_type, end,
4076 tmp = fold_build2_loc (input_location, FLOOR_DIV_EXPR,
4077 gfc_array_index_type, tmp,
4079 tmp = fold_build2_loc (input_location, PLUS_EXPR,
4080 gfc_array_index_type,
4081 gfc_index_one_node, tmp);
4082 tmp = fold_build2_loc (input_location, MAX_EXPR,
4083 gfc_array_index_type, tmp,
4084 build_int_cst (gfc_array_index_type, 0));
4085 /* We remember the size of the first section, and check all the
4086 others against this. */
4089 tmp3 = fold_build2_loc (input_location, NE_EXPR,
4090 boolean_type_node, tmp, size[n]);
4091 asprintf (&msg, "Array bound mismatch for dimension %d "
4092 "of array '%s' (%%ld/%%ld)",
4093 dim + 1, expr_name);
4095 gfc_trans_runtime_check (true, false, tmp3, &inner,
4097 fold_convert (long_integer_type_node, tmp),
4098 fold_convert (long_integer_type_node, size[n]));
4103 size[n] = gfc_evaluate_now (tmp, &inner);
4106 tmp = gfc_finish_block (&inner);
4108 /* For optional arguments, only check bounds if the argument is
4110 if (expr->symtree->n.sym->attr.optional
4111 || expr->symtree->n.sym->attr.not_always_present)
4112 tmp = build3_v (COND_EXPR,
4113 gfc_conv_expr_present (expr->symtree->n.sym),
4114 tmp, build_empty_stmt (input_location));
4116 gfc_add_expr_to_block (&block, tmp);
4120 tmp = gfc_finish_block (&block);
4121 gfc_add_expr_to_block (&outer_loop->pre, tmp);
4124 for (loop = loop->nested; loop; loop = loop->next)
4125 gfc_conv_ss_startstride (loop);
4128 /* Return true if both symbols could refer to the same data object. Does
4129 not take account of aliasing due to equivalence statements. */
4132 symbols_could_alias (gfc_symbol *lsym, gfc_symbol *rsym, bool lsym_pointer,
4133 bool lsym_target, bool rsym_pointer, bool rsym_target)
4135 /* Aliasing isn't possible if the symbols have different base types. */
4136 if (gfc_compare_types (&lsym->ts, &rsym->ts) == 0)
4139 /* Pointers can point to other pointers and target objects. */
4141 if ((lsym_pointer && (rsym_pointer || rsym_target))
4142 || (rsym_pointer && (lsym_pointer || lsym_target)))
4145 /* Special case: Argument association, cf. F90 12.4.1.6, F2003 12.4.1.7
4146 and F2008 12.5.2.13 items 3b and 4b. The pointer case (a) is already
4148 if (lsym_target && rsym_target
4149 && ((lsym->attr.dummy && !lsym->attr.contiguous
4150 && (!lsym->attr.dimension || lsym->as->type == AS_ASSUMED_SHAPE))
4151 || (rsym->attr.dummy && !rsym->attr.contiguous
4152 && (!rsym->attr.dimension
4153 || rsym->as->type == AS_ASSUMED_SHAPE))))
4160 /* Return true if the two SS could be aliased, i.e. both point to the same data
4162 /* TODO: resolve aliases based on frontend expressions. */
4165 gfc_could_be_alias (gfc_ss * lss, gfc_ss * rss)
4169 gfc_expr *lexpr, *rexpr;
4172 bool lsym_pointer, lsym_target, rsym_pointer, rsym_target;
4174 lexpr = lss->info->expr;
4175 rexpr = rss->info->expr;
4177 lsym = lexpr->symtree->n.sym;
4178 rsym = rexpr->symtree->n.sym;
4180 lsym_pointer = lsym->attr.pointer;
4181 lsym_target = lsym->attr.target;
4182 rsym_pointer = rsym->attr.pointer;
4183 rsym_target = rsym->attr.target;
4185 if (symbols_could_alias (lsym, rsym, lsym_pointer, lsym_target,
4186 rsym_pointer, rsym_target))
4189 if (rsym->ts.type != BT_DERIVED && rsym->ts.type != BT_CLASS
4190 && lsym->ts.type != BT_DERIVED && lsym->ts.type != BT_CLASS)
4193 /* For derived types we must check all the component types. We can ignore
4194 array references as these will have the same base type as the previous
4196 for (lref = lexpr->ref; lref != lss->info->data.array.ref; lref = lref->next)
4198 if (lref->type != REF_COMPONENT)
4201 lsym_pointer = lsym_pointer || lref->u.c.sym->attr.pointer;
4202 lsym_target = lsym_target || lref->u.c.sym->attr.target;
4204 if (symbols_could_alias (lref->u.c.sym, rsym, lsym_pointer, lsym_target,
4205 rsym_pointer, rsym_target))
4208 if ((lsym_pointer && (rsym_pointer || rsym_target))
4209 || (rsym_pointer && (lsym_pointer || lsym_target)))
4211 if (gfc_compare_types (&lref->u.c.component->ts,
4216 for (rref = rexpr->ref; rref != rss->info->data.array.ref;
4219 if (rref->type != REF_COMPONENT)
4222 rsym_pointer = rsym_pointer || rref->u.c.sym->attr.pointer;
4223 rsym_target = lsym_target || rref->u.c.sym->attr.target;
4225 if (symbols_could_alias (lref->u.c.sym, rref->u.c.sym,
4226 lsym_pointer, lsym_target,
4227 rsym_pointer, rsym_target))
4230 if ((lsym_pointer && (rsym_pointer || rsym_target))
4231 || (rsym_pointer && (lsym_pointer || lsym_target)))
4233 if (gfc_compare_types (&lref->u.c.component->ts,
4234 &rref->u.c.sym->ts))
4236 if (gfc_compare_types (&lref->u.c.sym->ts,
4237 &rref->u.c.component->ts))
4239 if (gfc_compare_types (&lref->u.c.component->ts,
4240 &rref->u.c.component->ts))
4246 lsym_pointer = lsym->attr.pointer;
4247 lsym_target = lsym->attr.target;
4248 lsym_pointer = lsym->attr.pointer;
4249 lsym_target = lsym->attr.target;
4251 for (rref = rexpr->ref; rref != rss->info->data.array.ref; rref = rref->next)
4253 if (rref->type != REF_COMPONENT)
4256 rsym_pointer = rsym_pointer || rref->u.c.sym->attr.pointer;
4257 rsym_target = lsym_target || rref->u.c.sym->attr.target;
4259 if (symbols_could_alias (rref->u.c.sym, lsym,
4260 lsym_pointer, lsym_target,
4261 rsym_pointer, rsym_target))
4264 if ((lsym_pointer && (rsym_pointer || rsym_target))
4265 || (rsym_pointer && (lsym_pointer || lsym_target)))
4267 if (gfc_compare_types (&lsym->ts, &rref->u.c.component->ts))
4276 /* Resolve array data dependencies. Creates a temporary if required. */
4277 /* TODO: Calc dependencies with gfc_expr rather than gfc_ss, and move to
4281 gfc_conv_resolve_dependencies (gfc_loopinfo * loop, gfc_ss * dest,
4287 gfc_expr *dest_expr;
4292 loop->temp_ss = NULL;
4293 dest_expr = dest->info->expr;
4295 for (ss = rss; ss != gfc_ss_terminator; ss = ss->next)
4297 if (ss->info->type != GFC_SS_SECTION)
4300 ss_expr = ss->info->expr;
4302 if (dest_expr->symtree->n.sym != ss_expr->symtree->n.sym)
4304 if (gfc_could_be_alias (dest, ss)
4305 || gfc_are_equivalenced_arrays (dest_expr, ss_expr))
4313 lref = dest_expr->ref;
4314 rref = ss_expr->ref;
4316 nDepend = gfc_dep_resolver (lref, rref, &loop->reverse[0]);
4321 for (i = 0; i < dest->dimen; i++)
4322 for (j = 0; j < ss->dimen; j++)
4324 && dest->dim[i] == ss->dim[j])
4326 /* If we don't access array elements in the same order,
4327 there is a dependency. */
4332 /* TODO : loop shifting. */
4335 /* Mark the dimensions for LOOP SHIFTING */
4336 for (n = 0; n < loop->dimen; n++)
4338 int dim = dest->data.info.dim[n];
4340 if (lref->u.ar.dimen_type[dim] == DIMEN_VECTOR)
4342 else if (! gfc_is_same_range (&lref->u.ar,
4343 &rref->u.ar, dim, 0))
4347 /* Put all the dimensions with dependencies in the
4350 for (n = 0; n < loop->dimen; n++)
4352 gcc_assert (loop->order[n] == n);
4354 loop->order[dim++] = n;
4356 for (n = 0; n < loop->dimen; n++)
4359 loop->order[dim++] = n;
4362 gcc_assert (dim == loop->dimen);
4373 tree base_type = gfc_typenode_for_spec (&dest_expr->ts);
4374 if (GFC_ARRAY_TYPE_P (base_type)
4375 || GFC_DESCRIPTOR_TYPE_P (base_type))
4376 base_type = gfc_get_element_type (base_type);
4377 loop->temp_ss = gfc_get_temp_ss (base_type, dest->info->string_length,
4379 gfc_add_ss_to_loop (loop, loop->temp_ss);
4382 loop->temp_ss = NULL;
4386 /* Browse through each array's information from the scalarizer and set the loop
4387 bounds according to the "best" one (per dimension), i.e. the one which
4388 provides the most information (constant bounds, shape, etc.). */
4391 set_loop_bounds (gfc_loopinfo *loop)
4393 int n, dim, spec_dim;
4394 gfc_array_info *info;
4395 gfc_array_info *specinfo;
4399 bool dynamic[GFC_MAX_DIMENSIONS];
4402 bool nonoptional_arr;
4404 gfc_loopinfo * const outer_loop = outermost_loop (loop);
4406 loopspec = loop->specloop;
4409 for (n = 0; n < loop->dimen; n++)
4414 /* If there are both optional and nonoptional array arguments, scalarize
4415 over the nonoptional; otherwise, it does not matter as then all
4416 (optional) arrays have to be present per F2008, 125.2.12p3(6). */
4418 nonoptional_arr = false;
4420 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
4421 if (ss->info->type != GFC_SS_SCALAR && ss->info->type != GFC_SS_TEMP
4422 && ss->info->type != GFC_SS_REFERENCE && !ss->info->can_be_null_ref)
4423 nonoptional_arr = true;
4425 /* We use one SS term, and use that to determine the bounds of the
4426 loop for this dimension. We try to pick the simplest term. */
4427 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
4429 gfc_ss_type ss_type;
4431 ss_type = ss->info->type;
4432 if (ss_type == GFC_SS_SCALAR
4433 || ss_type == GFC_SS_TEMP
4434 || ss_type == GFC_SS_REFERENCE
4435 || (ss->info->can_be_null_ref && nonoptional_arr))
4438 info = &ss->info->data.array;
4441 if (loopspec[n] != NULL)
4443 specinfo = &loopspec[n]->info->data.array;
4444 spec_dim = loopspec[n]->dim[n];
4448 /* Silence uninitialized warnings. */
4455 gcc_assert (info->shape[dim]);
4456 /* The frontend has worked out the size for us. */
4459 || !integer_zerop (specinfo->start[spec_dim]))
4460 /* Prefer zero-based descriptors if possible. */
4465 if (ss_type == GFC_SS_CONSTRUCTOR)
4467 gfc_constructor_base base;
4468 /* An unknown size constructor will always be rank one.
4469 Higher rank constructors will either have known shape,
4470 or still be wrapped in a call to reshape. */
4471 gcc_assert (loop->dimen == 1);
4473 /* Always prefer to use the constructor bounds if the size
4474 can be determined at compile time. Prefer not to otherwise,
4475 since the general case involves realloc, and it's better to
4476 avoid that overhead if possible. */
4477 base = ss->info->expr->value.constructor;
4478 dynamic[n] = gfc_get_array_constructor_size (&i, base);
4479 if (!dynamic[n] || !loopspec[n])
4484 /* Avoid using an allocatable lhs in an assignment, since
4485 there might be a reallocation coming. */
4486 if (loopspec[n] && ss->is_alloc_lhs)
4491 /* Criteria for choosing a loop specifier (most important first):
4492 doesn't need realloc
4498 else if (loopspec[n]->info->type == GFC_SS_CONSTRUCTOR && dynamic[n])
4500 else if (integer_onep (info->stride[dim])
4501 && !integer_onep (specinfo->stride[spec_dim]))
4503 else if (INTEGER_CST_P (info->stride[dim])
4504 && !INTEGER_CST_P (specinfo->stride[spec_dim]))
4506 else if (INTEGER_CST_P (info->start[dim])
4507 && !INTEGER_CST_P (specinfo->start[spec_dim])
4508 && integer_onep (info->stride[dim])
4509 == integer_onep (specinfo->stride[spec_dim])
4510 && INTEGER_CST_P (info->stride[dim])
4511 == INTEGER_CST_P (specinfo->stride[spec_dim]))
4513 /* We don't work out the upper bound.
4514 else if (INTEGER_CST_P (info->finish[n])
4515 && ! INTEGER_CST_P (specinfo->finish[n]))
4516 loopspec[n] = ss; */
4519 /* We should have found the scalarization loop specifier. If not,
4521 gcc_assert (loopspec[n]);
4523 info = &loopspec[n]->info->data.array;
4524 dim = loopspec[n]->dim[n];
4526 /* Set the extents of this range. */
4527 cshape = info->shape;
4528 if (cshape && INTEGER_CST_P (info->start[dim])
4529 && INTEGER_CST_P (info->stride[dim]))
4531 loop->from[n] = info->start[dim];
4532 mpz_set (i, cshape[get_array_ref_dim_for_loop_dim (loopspec[n], n)]);
4533 mpz_sub_ui (i, i, 1);
4534 /* To = from + (size - 1) * stride. */
4535 tmp = gfc_conv_mpz_to_tree (i, gfc_index_integer_kind);
4536 if (!integer_onep (info->stride[dim]))
4537 tmp = fold_build2_loc (input_location, MULT_EXPR,
4538 gfc_array_index_type, tmp,
4540 loop->to[n] = fold_build2_loc (input_location, PLUS_EXPR,
4541 gfc_array_index_type,
4542 loop->from[n], tmp);
4546 loop->from[n] = info->start[dim];
4547 switch (loopspec[n]->info->type)
4549 case GFC_SS_CONSTRUCTOR:
4550 /* The upper bound is calculated when we expand the
4552 gcc_assert (loop->to[n] == NULL_TREE);
4555 case GFC_SS_SECTION:
4556 /* Use the end expression if it exists and is not constant,
4557 so that it is only evaluated once. */
4558 loop->to[n] = info->end[dim];
4561 case GFC_SS_FUNCTION:
4562 /* The loop bound will be set when we generate the call. */
4563 gcc_assert (loop->to[n] == NULL_TREE);
4566 case GFC_SS_INTRINSIC:
4568 gfc_expr *expr = loopspec[n]->info->expr;
4570 /* The {l,u}bound of an assumed rank. */
4571 gcc_assert ((expr->value.function.isym->id == GFC_ISYM_LBOUND
4572 || expr->value.function.isym->id == GFC_ISYM_UBOUND)
4573 && expr->value.function.actual->next->expr == NULL
4574 && expr->value.function.actual->expr->rank == -1);
4576 loop->to[n] = info->end[dim];
4585 /* Transform everything so we have a simple incrementing variable. */
4586 if (integer_onep (info->stride[dim]))
4587 info->delta[dim] = gfc_index_zero_node;
4590 /* Set the delta for this section. */
4591 info->delta[dim] = gfc_evaluate_now (loop->from[n], &outer_loop->pre);
4592 /* Number of iterations is (end - start + step) / step.
4593 with start = 0, this simplifies to
4595 for (i = 0; i<=last; i++){...}; */
4596 tmp = fold_build2_loc (input_location, MINUS_EXPR,
4597 gfc_array_index_type, loop->to[n],
4599 tmp = fold_build2_loc (input_location, FLOOR_DIV_EXPR,
4600 gfc_array_index_type, tmp, info->stride[dim]);
4601 tmp = fold_build2_loc (input_location, MAX_EXPR, gfc_array_index_type,
4602 tmp, build_int_cst (gfc_array_index_type, -1));
4603 loop->to[n] = gfc_evaluate_now (tmp, &outer_loop->pre);
4604 /* Make the loop variable start at 0. */
4605 loop->from[n] = gfc_index_zero_node;
4610 for (loop = loop->nested; loop; loop = loop->next)
4611 set_loop_bounds (loop);
4615 /* Initialize the scalarization loop. Creates the loop variables. Determines
4616 the range of the loop variables. Creates a temporary if required.
4617 Also generates code for scalar expressions which have been
4618 moved outside the loop. */
4621 gfc_conv_loop_setup (gfc_loopinfo * loop, locus * where)
4626 set_loop_bounds (loop);
4628 /* Add all the scalar code that can be taken out of the loops.
4629 This may include calculating the loop bounds, so do it before
4630 allocating the temporary. */
4631 gfc_add_loop_ss_code (loop, loop->ss, false, where);
4633 tmp_ss = loop->temp_ss;
4634 /* If we want a temporary then create it. */
4637 gfc_ss_info *tmp_ss_info;
4639 tmp_ss_info = tmp_ss->info;
4640 gcc_assert (tmp_ss_info->type == GFC_SS_TEMP);
4641 gcc_assert (loop->parent == NULL);
4643 /* Make absolutely sure that this is a complete type. */
4644 if (tmp_ss_info->string_length)
4645 tmp_ss_info->data.temp.type
4646 = gfc_get_character_type_len_for_eltype
4647 (TREE_TYPE (tmp_ss_info->data.temp.type),
4648 tmp_ss_info->string_length);
4650 tmp = tmp_ss_info->data.temp.type;
4651 memset (&tmp_ss_info->data.array, 0, sizeof (gfc_array_info));
4652 tmp_ss_info->type = GFC_SS_SECTION;
4654 gcc_assert (tmp_ss->dimen != 0);
4656 gfc_trans_create_temp_array (&loop->pre, &loop->post, tmp_ss, tmp,
4657 NULL_TREE, false, true, false, where);
4660 /* For array parameters we don't have loop variables, so don't calculate the
4662 if (!loop->array_parameter)
4663 gfc_set_delta (loop);
4667 /* Calculates how to transform from loop variables to array indices for each
4668 array: once loop bounds are chosen, sets the difference (DELTA field) between
4669 loop bounds and array reference bounds, for each array info. */
4672 gfc_set_delta (gfc_loopinfo *loop)
4674 gfc_ss *ss, **loopspec;
4675 gfc_array_info *info;
4679 gfc_loopinfo * const outer_loop = outermost_loop (loop);
4681 loopspec = loop->specloop;
4683 /* Calculate the translation from loop variables to array indices. */
4684 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
4686 gfc_ss_type ss_type;
4688 ss_type = ss->info->type;
4689 if (ss_type != GFC_SS_SECTION
4690 && ss_type != GFC_SS_COMPONENT
4691 && ss_type != GFC_SS_CONSTRUCTOR)
4694 info = &ss->info->data.array;
4696 for (n = 0; n < ss->dimen; n++)
4698 /* If we are specifying the range the delta is already set. */
4699 if (loopspec[n] != ss)
4703 /* Calculate the offset relative to the loop variable.
4704 First multiply by the stride. */
4705 tmp = loop->from[n];
4706 if (!integer_onep (info->stride[dim]))
4707 tmp = fold_build2_loc (input_location, MULT_EXPR,
4708 gfc_array_index_type,
4709 tmp, info->stride[dim]);
4711 /* Then subtract this from our starting value. */
4712 tmp = fold_build2_loc (input_location, MINUS_EXPR,
4713 gfc_array_index_type,
4714 info->start[dim], tmp);
4716 info->delta[dim] = gfc_evaluate_now (tmp, &outer_loop->pre);
4721 for (loop = loop->nested; loop; loop = loop->next)
4722 gfc_set_delta (loop);
4726 /* Calculate the size of a given array dimension from the bounds. This
4727 is simply (ubound - lbound + 1) if this expression is positive
4728 or 0 if it is negative (pick either one if it is zero). Optionally
4729 (if or_expr is present) OR the (expression != 0) condition to it. */
4732 gfc_conv_array_extent_dim (tree lbound, tree ubound, tree* or_expr)
4737 /* Calculate (ubound - lbound + 1). */
4738 res = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
4740 res = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type, res,
4741 gfc_index_one_node);
4743 /* Check whether the size for this dimension is negative. */
4744 cond = fold_build2_loc (input_location, LE_EXPR, boolean_type_node, res,
4745 gfc_index_zero_node);
4746 res = fold_build3_loc (input_location, COND_EXPR, gfc_array_index_type, cond,
4747 gfc_index_zero_node, res);
4749 /* Build OR expression. */
4751 *or_expr = fold_build2_loc (input_location, TRUTH_OR_EXPR,
4752 boolean_type_node, *or_expr, cond);
4758 /* For an array descriptor, get the total number of elements. This is just
4759 the product of the extents along from_dim to to_dim. */
4762 gfc_conv_descriptor_size_1 (tree desc, int from_dim, int to_dim)
4767 res = gfc_index_one_node;
4769 for (dim = from_dim; dim < to_dim; ++dim)
4775 lbound = gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[dim]);
4776 ubound = gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[dim]);
4778 extent = gfc_conv_array_extent_dim (lbound, ubound, NULL);
4779 res = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
4787 /* Full size of an array. */
4790 gfc_conv_descriptor_size (tree desc, int rank)
4792 return gfc_conv_descriptor_size_1 (desc, 0, rank);
4796 /* Size of a coarray for all dimensions but the last. */
4799 gfc_conv_descriptor_cosize (tree desc, int rank, int corank)
4801 return gfc_conv_descriptor_size_1 (desc, rank, rank + corank - 1);
4805 /* Fills in an array descriptor, and returns the size of the array.
4806 The size will be a simple_val, ie a variable or a constant. Also
4807 calculates the offset of the base. The pointer argument overflow,
4808 which should be of integer type, will increase in value if overflow
4809 occurs during the size calculation. Returns the size of the array.
4813 for (n = 0; n < rank; n++)
4815 a.lbound[n] = specified_lower_bound;
4816 offset = offset + a.lbond[n] * stride;
4818 a.ubound[n] = specified_upper_bound;
4819 a.stride[n] = stride;
4820 size = size >= 0 ? ubound + size : 0; //size = ubound + 1 - lbound
4821 overflow += size == 0 ? 0: (MAX/size < stride ? 1: 0);
4822 stride = stride * size;
4824 for (n = rank; n < rank+corank; n++)
4825 (Set lcobound/ucobound as above.)
4826 element_size = sizeof (array element);
4829 stride = (size_t) stride;
4830 overflow += element_size == 0 ? 0: (MAX/element_size < stride ? 1: 0);
4831 stride = stride * element_size;
4837 gfc_array_init_size (tree descriptor, int rank, int corank, tree * poffset,
4838 gfc_expr ** lower, gfc_expr ** upper, stmtblock_t * pblock,
4839 stmtblock_t * descriptor_block, tree * overflow,
4840 tree expr3_elem_size, tree *nelems, gfc_expr *expr3)
4853 stmtblock_t thenblock;
4854 stmtblock_t elseblock;
4859 type = TREE_TYPE (descriptor);
4861 stride = gfc_index_one_node;
4862 offset = gfc_index_zero_node;
4864 /* Set the dtype. */
4865 tmp = gfc_conv_descriptor_dtype (descriptor);
4866 gfc_add_modify (descriptor_block, tmp, gfc_get_dtype (TREE_TYPE (descriptor)));
4868 or_expr = boolean_false_node;
4870 for (n = 0; n < rank; n++)
4875 /* We have 3 possibilities for determining the size of the array:
4876 lower == NULL => lbound = 1, ubound = upper[n]
4877 upper[n] = NULL => lbound = 1, ubound = lower[n]
4878 upper[n] != NULL => lbound = lower[n], ubound = upper[n] */
4881 /* Set lower bound. */
4882 gfc_init_se (&se, NULL);
4884 se.expr = gfc_index_one_node;
4887 gcc_assert (lower[n]);
4890 gfc_conv_expr_type (&se, lower[n], gfc_array_index_type);
4891 gfc_add_block_to_block (pblock, &se.pre);
4895 se.expr = gfc_index_one_node;
4899 gfc_conv_descriptor_lbound_set (descriptor_block, descriptor,
4900 gfc_rank_cst[n], se.expr);
4901 conv_lbound = se.expr;
4903 /* Work out the offset for this component. */
4904 tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
4906 offset = fold_build2_loc (input_location, MINUS_EXPR,
4907 gfc_array_index_type, offset, tmp);
4909 /* Set upper bound. */
4910 gfc_init_se (&se, NULL);
4911 gcc_assert (ubound);
4912 gfc_conv_expr_type (&se, ubound, gfc_array_index_type);
4913 gfc_add_block_to_block (pblock, &se.pre);
4915 gfc_conv_descriptor_ubound_set (descriptor_block, descriptor,
4916 gfc_rank_cst[n], se.expr);
4917 conv_ubound = se.expr;
4919 /* Store the stride. */
4920 gfc_conv_descriptor_stride_set (descriptor_block, descriptor,
4921 gfc_rank_cst[n], stride);
4923 /* Calculate size and check whether extent is negative. */
4924 size = gfc_conv_array_extent_dim (conv_lbound, conv_ubound, &or_expr);
4925 size = gfc_evaluate_now (size, pblock);
4927 /* Check whether multiplying the stride by the number of
4928 elements in this dimension would overflow. We must also check
4929 whether the current dimension has zero size in order to avoid
4932 tmp = fold_build2_loc (input_location, TRUNC_DIV_EXPR,
4933 gfc_array_index_type,
4934 fold_convert (gfc_array_index_type,
4935 TYPE_MAX_VALUE (gfc_array_index_type)),
4937 cond = gfc_unlikely (fold_build2_loc (input_location, LT_EXPR,
4938 boolean_type_node, tmp, stride));
4939 tmp = fold_build3_loc (input_location, COND_EXPR, integer_type_node, cond,
4940 integer_one_node, integer_zero_node);
4941 cond = gfc_unlikely (fold_build2_loc (input_location, EQ_EXPR,
4942 boolean_type_node, size,
4943 gfc_index_zero_node));
4944 tmp = fold_build3_loc (input_location, COND_EXPR, integer_type_node, cond,
4945 integer_zero_node, tmp);
4946 tmp = fold_build2_loc (input_location, PLUS_EXPR, integer_type_node,
4948 *overflow = gfc_evaluate_now (tmp, pblock);
4950 /* Multiply the stride by the number of elements in this dimension. */
4951 stride = fold_build2_loc (input_location, MULT_EXPR,
4952 gfc_array_index_type, stride, size);
4953 stride = gfc_evaluate_now (stride, pblock);
4956 for (n = rank; n < rank + corank; n++)
4960 /* Set lower bound. */
4961 gfc_init_se (&se, NULL);
4962 if (lower == NULL || lower[n] == NULL)
4964 gcc_assert (n == rank + corank - 1);
4965 se.expr = gfc_index_one_node;
4969 if (ubound || n == rank + corank - 1)
4971 gfc_conv_expr_type (&se, lower[n], gfc_array_index_type);
4972 gfc_add_block_to_block (pblock, &se.pre);
4976 se.expr = gfc_index_one_node;
4980 gfc_conv_descriptor_lbound_set (descriptor_block, descriptor,
4981 gfc_rank_cst[n], se.expr);
4983 if (n < rank + corank - 1)
4985 gfc_init_se (&se, NULL);
4986 gcc_assert (ubound);
4987 gfc_conv_expr_type (&se, ubound, gfc_array_index_type);
4988 gfc_add_block_to_block (pblock, &se.pre);
4989 gfc_conv_descriptor_ubound_set (descriptor_block, descriptor,
4990 gfc_rank_cst[n], se.expr);
4994 /* The stride is the number of elements in the array, so multiply by the
4995 size of an element to get the total size. Obviously, if there is a
4996 SOURCE expression (expr3) we must use its element size. */
4997 if (expr3_elem_size != NULL_TREE)
4998 tmp = expr3_elem_size;
4999 else if (expr3 != NULL)
5001 if (expr3->ts.type == BT_CLASS)
5004 gfc_expr *sz = gfc_copy_expr (expr3);
5005 gfc_add_vptr_component (sz);
5006 gfc_add_size_component (sz);
5007 gfc_init_se (&se_sz, NULL);
5008 gfc_conv_expr (&se_sz, sz);
5014 tmp = gfc_typenode_for_spec (&expr3->ts);
5015 tmp = TYPE_SIZE_UNIT (tmp);
5019 tmp = TYPE_SIZE_UNIT (gfc_get_element_type (type));
5021 /* Convert to size_t. */
5022 element_size = fold_convert (size_type_node, tmp);
5025 return element_size;
5027 *nelems = gfc_evaluate_now (stride, pblock);
5028 stride = fold_convert (size_type_node, stride);
5030 /* First check for overflow. Since an array of type character can
5031 have zero element_size, we must check for that before
5033 tmp = fold_build2_loc (input_location, TRUNC_DIV_EXPR,
5035 TYPE_MAX_VALUE (size_type_node), element_size);
5036 cond = gfc_unlikely (fold_build2_loc (input_location, LT_EXPR,
5037 boolean_type_node, tmp, stride));
5038 tmp = fold_build3_loc (input_location, COND_EXPR, integer_type_node, cond,
5039 integer_one_node, integer_zero_node);
5040 cond = gfc_unlikely (fold_build2_loc (input_location, EQ_EXPR,
5041 boolean_type_node, element_size,
5042 build_int_cst (size_type_node, 0)));
5043 tmp = fold_build3_loc (input_location, COND_EXPR, integer_type_node, cond,
5044 integer_zero_node, tmp);
5045 tmp = fold_build2_loc (input_location, PLUS_EXPR, integer_type_node,
5047 *overflow = gfc_evaluate_now (tmp, pblock);
5049 size = fold_build2_loc (input_location, MULT_EXPR, size_type_node,
5050 stride, element_size);
5052 if (poffset != NULL)
5054 offset = gfc_evaluate_now (offset, pblock);
5058 if (integer_zerop (or_expr))
5060 if (integer_onep (or_expr))
5061 return build_int_cst (size_type_node, 0);
5063 var = gfc_create_var (TREE_TYPE (size), "size");
5064 gfc_start_block (&thenblock);
5065 gfc_add_modify (&thenblock, var, build_int_cst (size_type_node, 0));
5066 thencase = gfc_finish_block (&thenblock);
5068 gfc_start_block (&elseblock);
5069 gfc_add_modify (&elseblock, var, size);
5070 elsecase = gfc_finish_block (&elseblock);
5072 tmp = gfc_evaluate_now (or_expr, pblock);
5073 tmp = build3_v (COND_EXPR, tmp, thencase, elsecase);
5074 gfc_add_expr_to_block (pblock, tmp);
5080 /* Initializes the descriptor and generates a call to _gfor_allocate. Does
5081 the work for an ALLOCATE statement. */
5085 gfc_array_allocate (gfc_se * se, gfc_expr * expr, tree status, tree errmsg,
5086 tree errlen, tree label_finish, tree expr3_elem_size,
5087 tree *nelems, gfc_expr *expr3)
5091 tree offset = NULL_TREE;
5092 tree token = NULL_TREE;
5095 tree error = NULL_TREE;
5096 tree overflow; /* Boolean storing whether size calculation overflows. */
5097 tree var_overflow = NULL_TREE;
5099 tree set_descriptor;
5100 stmtblock_t set_descriptor_block;
5101 stmtblock_t elseblock;
5104 gfc_ref *ref, *prev_ref = NULL;
5105 bool allocatable, coarray, dimension;
5109 /* Find the last reference in the chain. */
5110 while (ref && ref->next != NULL)
5112 gcc_assert (ref->type != REF_ARRAY || ref->u.ar.type == AR_ELEMENT
5113 || (ref->u.ar.dimen == 0 && ref->u.ar.codimen > 0));
5118 if (ref == NULL || ref->type != REF_ARRAY)
5123 allocatable = expr->symtree->n.sym->attr.allocatable;
5124 coarray = expr->symtree->n.sym->attr.codimension;
5125 dimension = expr->symtree->n.sym->attr.dimension;
5129 allocatable = prev_ref->u.c.component->attr.allocatable;
5130 coarray = prev_ref->u.c.component->attr.codimension;
5131 dimension = prev_ref->u.c.component->attr.dimension;
5135 gcc_assert (coarray);
5137 /* Figure out the size of the array. */
5138 switch (ref->u.ar.type)
5144 upper = ref->u.ar.start;
5150 lower = ref->u.ar.start;
5151 upper = ref->u.ar.end;
5155 gcc_assert (ref->u.ar.as->type == AS_EXPLICIT);
5157 lower = ref->u.ar.as->lower;
5158 upper = ref->u.ar.as->upper;
5166 overflow = integer_zero_node;
5168 gfc_init_block (&set_descriptor_block);
5169 size = gfc_array_init_size (se->expr, ref->u.ar.as->rank,
5170 ref->u.ar.as->corank, &offset, lower, upper,
5171 &se->pre, &set_descriptor_block, &overflow,
5172 expr3_elem_size, nelems, expr3);
5177 var_overflow = gfc_create_var (integer_type_node, "overflow");
5178 gfc_add_modify (&se->pre, var_overflow, overflow);
5180 /* Generate the block of code handling overflow. */
5181 msg = gfc_build_addr_expr (pchar_type_node,
5182 gfc_build_localized_cstring_const
5183 ("Integer overflow when calculating the amount of "
5184 "memory to allocate"));
5185 error = build_call_expr_loc (input_location, gfor_fndecl_runtime_error,
5189 if (status != NULL_TREE)
5191 tree status_type = TREE_TYPE (status);
5192 stmtblock_t set_status_block;
5194 gfc_start_block (&set_status_block);
5195 gfc_add_modify (&set_status_block, status,
5196 build_int_cst (status_type, LIBERROR_ALLOCATION));
5197 error = gfc_finish_block (&set_status_block);
5200 gfc_start_block (&elseblock);
5202 /* Allocate memory to store the data. */
5203 if (POINTER_TYPE_P (TREE_TYPE (se->expr)))
5204 se->expr = build_fold_indirect_ref_loc (input_location, se->expr);
5206 pointer = gfc_conv_descriptor_data_get (se->expr);
5207 STRIP_NOPS (pointer);
5209 if (coarray && gfc_option.coarray == GFC_FCOARRAY_LIB)
5210 token = gfc_build_addr_expr (NULL_TREE,
5211 gfc_conv_descriptor_token (se->expr));
5213 /* The allocatable variant takes the old pointer as first argument. */
5215 gfc_allocate_allocatable (&elseblock, pointer, size, token,
5216 status, errmsg, errlen, label_finish, expr);
5218 gfc_allocate_using_malloc (&elseblock, pointer, size, status);
5222 cond = gfc_unlikely (fold_build2_loc (input_location, NE_EXPR,
5223 boolean_type_node, var_overflow, integer_zero_node));
5224 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node, cond,
5225 error, gfc_finish_block (&elseblock));
5228 tmp = gfc_finish_block (&elseblock);
5230 gfc_add_expr_to_block (&se->pre, tmp);
5232 if (expr->ts.type == BT_CLASS)
5234 tmp = build_int_cst (unsigned_char_type_node, 0);
5235 /* With class objects, it is best to play safe and null the
5236 memory because we cannot know if dynamic types have allocatable
5237 components or not. */
5238 tmp = build_call_expr_loc (input_location,
5239 builtin_decl_explicit (BUILT_IN_MEMSET),
5240 3, pointer, tmp, size);
5241 gfc_add_expr_to_block (&se->pre, tmp);
5244 /* Update the array descriptors. */
5246 gfc_conv_descriptor_offset_set (&set_descriptor_block, se->expr, offset);
5248 set_descriptor = gfc_finish_block (&set_descriptor_block);
5249 if (status != NULL_TREE)
5251 cond = fold_build2_loc (input_location, EQ_EXPR,
5252 boolean_type_node, status,
5253 build_int_cst (TREE_TYPE (status), 0));
5254 gfc_add_expr_to_block (&se->pre,
5255 fold_build3_loc (input_location, COND_EXPR, void_type_node,
5256 gfc_likely (cond), set_descriptor,
5257 build_empty_stmt (input_location)));
5260 gfc_add_expr_to_block (&se->pre, set_descriptor);
5262 if ((expr->ts.type == BT_DERIVED)
5263 && expr->ts.u.derived->attr.alloc_comp)
5265 tmp = gfc_nullify_alloc_comp (expr->ts.u.derived, se->expr,
5266 ref->u.ar.as->rank);
5267 gfc_add_expr_to_block (&se->pre, tmp);
5274 /* Deallocate an array variable. Also used when an allocated variable goes
5279 gfc_array_deallocate (tree descriptor, tree pstat, tree errmsg, tree errlen,
5280 tree label_finish, gfc_expr* expr)
5285 bool coarray = gfc_is_coarray (expr);
5287 gfc_start_block (&block);
5289 /* Get a pointer to the data. */
5290 var = gfc_conv_descriptor_data_get (descriptor);
5293 /* Parameter is the address of the data component. */
5294 tmp = gfc_deallocate_with_status (coarray ? descriptor : var, pstat, errmsg,
5295 errlen, label_finish, false, expr, coarray);
5296 gfc_add_expr_to_block (&block, tmp);
5298 /* Zero the data pointer; only for coarrays an error can occur and then
5299 the allocation status may not be changed. */
5300 tmp = fold_build2_loc (input_location, MODIFY_EXPR, void_type_node,
5301 var, build_int_cst (TREE_TYPE (var), 0));
5302 if (pstat != NULL_TREE && coarray && gfc_option.coarray == GFC_FCOARRAY_LIB)
5305 tree stat = build_fold_indirect_ref_loc (input_location, pstat);
5307 cond = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
5308 stat, build_int_cst (TREE_TYPE (stat), 0));
5309 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
5310 cond, tmp, build_empty_stmt (input_location));
5313 gfc_add_expr_to_block (&block, tmp);
5315 return gfc_finish_block (&block);
5319 /* Create an array constructor from an initialization expression.
5320 We assume the frontend already did any expansions and conversions. */
5323 gfc_conv_array_initializer (tree type, gfc_expr * expr)
5329 unsigned HOST_WIDE_INT lo;
5331 vec<constructor_elt, va_gc> *v = NULL;
5333 if (expr->expr_type == EXPR_VARIABLE
5334 && expr->symtree->n.sym->attr.flavor == FL_PARAMETER
5335 && expr->symtree->n.sym->value)
5336 expr = expr->symtree->n.sym->value;
5338 switch (expr->expr_type)
5341 case EXPR_STRUCTURE:
5342 /* A single scalar or derived type value. Create an array with all
5343 elements equal to that value. */
5344 gfc_init_se (&se, NULL);
5346 if (expr->expr_type == EXPR_CONSTANT)
5347 gfc_conv_constant (&se, expr);
5349 gfc_conv_structure (&se, expr, 1);
5351 tmp = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
5352 gcc_assert (tmp && INTEGER_CST_P (tmp));
5353 hi = TREE_INT_CST_HIGH (tmp);
5354 lo = TREE_INT_CST_LOW (tmp);
5358 /* This will probably eat buckets of memory for large arrays. */
5359 while (hi != 0 || lo != 0)
5361 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, se.expr);
5369 /* Create a vector of all the elements. */
5370 for (c = gfc_constructor_first (expr->value.constructor);
5371 c; c = gfc_constructor_next (c))
5375 /* Problems occur when we get something like
5376 integer :: a(lots) = (/(i, i=1, lots)/) */
5377 gfc_fatal_error ("The number of elements in the array constructor "
5378 "at %L requires an increase of the allowed %d "
5379 "upper limit. See -fmax-array-constructor "
5380 "option", &expr->where,
5381 gfc_option.flag_max_array_constructor);
5384 if (mpz_cmp_si (c->offset, 0) != 0)
5385 index = gfc_conv_mpz_to_tree (c->offset, gfc_index_integer_kind);
5389 if (mpz_cmp_si (c->repeat, 1) > 0)
5395 mpz_add (maxval, c->offset, c->repeat);
5396 mpz_sub_ui (maxval, maxval, 1);
5397 tmp2 = gfc_conv_mpz_to_tree (maxval, gfc_index_integer_kind);
5398 if (mpz_cmp_si (c->offset, 0) != 0)
5400 mpz_add_ui (maxval, c->offset, 1);
5401 tmp1 = gfc_conv_mpz_to_tree (maxval, gfc_index_integer_kind);
5404 tmp1 = gfc_conv_mpz_to_tree (c->offset, gfc_index_integer_kind);
5406 range = fold_build2 (RANGE_EXPR, gfc_array_index_type, tmp1, tmp2);
5412 gfc_init_se (&se, NULL);
5413 switch (c->expr->expr_type)
5416 gfc_conv_constant (&se, c->expr);
5419 case EXPR_STRUCTURE:
5420 gfc_conv_structure (&se, c->expr, 1);
5424 /* Catch those occasional beasts that do not simplify
5425 for one reason or another, assuming that if they are
5426 standard defying the frontend will catch them. */
5427 gfc_conv_expr (&se, c->expr);
5431 if (range == NULL_TREE)
5432 CONSTRUCTOR_APPEND_ELT (v, index, se.expr);
5435 if (index != NULL_TREE)
5436 CONSTRUCTOR_APPEND_ELT (v, index, se.expr);
5437 CONSTRUCTOR_APPEND_ELT (v, range, se.expr);
5443 return gfc_build_null_descriptor (type);
5449 /* Create a constructor from the list of elements. */
5450 tmp = build_constructor (type, v);
5451 TREE_CONSTANT (tmp) = 1;
5456 /* Generate code to evaluate non-constant coarray cobounds. */
5459 gfc_trans_array_cobounds (tree type, stmtblock_t * pblock,
5460 const gfc_symbol *sym)
5470 for (dim = as->rank; dim < as->rank + as->corank; dim++)
5472 /* Evaluate non-constant array bound expressions. */
5473 lbound = GFC_TYPE_ARRAY_LBOUND (type, dim);
5474 if (as->lower[dim] && !INTEGER_CST_P (lbound))
5476 gfc_init_se (&se, NULL);
5477 gfc_conv_expr_type (&se, as->lower[dim], gfc_array_index_type);
5478 gfc_add_block_to_block (pblock, &se.pre);
5479 gfc_add_modify (pblock, lbound, se.expr);
5481 ubound = GFC_TYPE_ARRAY_UBOUND (type, dim);
5482 if (as->upper[dim] && !INTEGER_CST_P (ubound))
5484 gfc_init_se (&se, NULL);
5485 gfc_conv_expr_type (&se, as->upper[dim], gfc_array_index_type);
5486 gfc_add_block_to_block (pblock, &se.pre);
5487 gfc_add_modify (pblock, ubound, se.expr);
5493 /* Generate code to evaluate non-constant array bounds. Sets *poffset and
5494 returns the size (in elements) of the array. */
5497 gfc_trans_array_bounds (tree type, gfc_symbol * sym, tree * poffset,
5498 stmtblock_t * pblock)
5513 size = gfc_index_one_node;
5514 offset = gfc_index_zero_node;
5515 for (dim = 0; dim < as->rank; dim++)
5517 /* Evaluate non-constant array bound expressions. */
5518 lbound = GFC_TYPE_ARRAY_LBOUND (type, dim);
5519 if (as->lower[dim] && !INTEGER_CST_P (lbound))
5521 gfc_init_se (&se, NULL);
5522 gfc_conv_expr_type (&se, as->lower[dim], gfc_array_index_type);
5523 gfc_add_block_to_block (pblock, &se.pre);
5524 gfc_add_modify (pblock, lbound, se.expr);
5526 ubound = GFC_TYPE_ARRAY_UBOUND (type, dim);
5527 if (as->upper[dim] && !INTEGER_CST_P (ubound))
5529 gfc_init_se (&se, NULL);
5530 gfc_conv_expr_type (&se, as->upper[dim], gfc_array_index_type);
5531 gfc_add_block_to_block (pblock, &se.pre);
5532 gfc_add_modify (pblock, ubound, se.expr);
5534 /* The offset of this dimension. offset = offset - lbound * stride. */
5535 tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
5537 offset = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
5540 /* The size of this dimension, and the stride of the next. */
5541 if (dim + 1 < as->rank)
5542 stride = GFC_TYPE_ARRAY_STRIDE (type, dim + 1);
5544 stride = GFC_TYPE_ARRAY_SIZE (type);
5546 if (ubound != NULL_TREE && !(stride && INTEGER_CST_P (stride)))
5548 /* Calculate stride = size * (ubound + 1 - lbound). */
5549 tmp = fold_build2_loc (input_location, MINUS_EXPR,
5550 gfc_array_index_type,
5551 gfc_index_one_node, lbound);
5552 tmp = fold_build2_loc (input_location, PLUS_EXPR,
5553 gfc_array_index_type, ubound, tmp);
5554 tmp = fold_build2_loc (input_location, MULT_EXPR,
5555 gfc_array_index_type, size, tmp);
5557 gfc_add_modify (pblock, stride, tmp);
5559 stride = gfc_evaluate_now (tmp, pblock);
5561 /* Make sure that negative size arrays are translated
5562 to being zero size. */
5563 tmp = fold_build2_loc (input_location, GE_EXPR, boolean_type_node,
5564 stride, gfc_index_zero_node);
5565 tmp = fold_build3_loc (input_location, COND_EXPR,
5566 gfc_array_index_type, tmp,
5567 stride, gfc_index_zero_node);
5568 gfc_add_modify (pblock, stride, tmp);
5574 gfc_trans_array_cobounds (type, pblock, sym);
5575 gfc_trans_vla_type_sizes (sym, pblock);
5582 /* Generate code to initialize/allocate an array variable. */
5585 gfc_trans_auto_array_allocation (tree decl, gfc_symbol * sym,
5586 gfc_wrapped_block * block)
5590 tree tmp = NULL_TREE;
5597 gcc_assert (!(sym->attr.pointer || sym->attr.allocatable));
5599 /* Do nothing for USEd variables. */
5600 if (sym->attr.use_assoc)
5603 type = TREE_TYPE (decl);
5604 gcc_assert (GFC_ARRAY_TYPE_P (type));
5605 onstack = TREE_CODE (type) != POINTER_TYPE;
5607 gfc_init_block (&init);
5609 /* Evaluate character string length. */
5610 if (sym->ts.type == BT_CHARACTER
5611 && onstack && !INTEGER_CST_P (sym->ts.u.cl->backend_decl))
5613 gfc_conv_string_length (sym->ts.u.cl, NULL, &init);
5615 gfc_trans_vla_type_sizes (sym, &init);
5617 /* Emit a DECL_EXPR for this variable, which will cause the
5618 gimplifier to allocate storage, and all that good stuff. */
5619 tmp = fold_build1_loc (input_location, DECL_EXPR, TREE_TYPE (decl), decl);
5620 gfc_add_expr_to_block (&init, tmp);
5625 gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE);
5629 type = TREE_TYPE (type);
5631 gcc_assert (!sym->attr.use_assoc);
5632 gcc_assert (!TREE_STATIC (decl));
5633 gcc_assert (!sym->module);
5635 if (sym->ts.type == BT_CHARACTER
5636 && !INTEGER_CST_P (sym->ts.u.cl->backend_decl))
5637 gfc_conv_string_length (sym->ts.u.cl, NULL, &init);
5639 size = gfc_trans_array_bounds (type, sym, &offset, &init);
5641 /* Don't actually allocate space for Cray Pointees. */
5642 if (sym->attr.cray_pointee)
5644 if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type)) == VAR_DECL)
5645 gfc_add_modify (&init, GFC_TYPE_ARRAY_OFFSET (type), offset);
5647 gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE);
5651 if (gfc_option.flag_stack_arrays)
5653 gcc_assert (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE);
5654 space = build_decl (sym->declared_at.lb->location,
5655 VAR_DECL, create_tmp_var_name ("A"),
5656 TREE_TYPE (TREE_TYPE (decl)));
5657 gfc_trans_vla_type_sizes (sym, &init);
5661 /* The size is the number of elements in the array, so multiply by the
5662 size of an element to get the total size. */
5663 tmp = TYPE_SIZE_UNIT (gfc_get_element_type (type));
5664 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
5665 size, fold_convert (gfc_array_index_type, tmp));
5667 /* Allocate memory to hold the data. */
5668 tmp = gfc_call_malloc (&init, TREE_TYPE (decl), size);
5669 gfc_add_modify (&init, decl, tmp);
5671 /* Free the temporary. */
5672 tmp = gfc_call_free (convert (pvoid_type_node, decl));
5676 /* Set offset of the array. */
5677 if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type)) == VAR_DECL)
5678 gfc_add_modify (&init, GFC_TYPE_ARRAY_OFFSET (type), offset);
5680 /* Automatic arrays should not have initializers. */
5681 gcc_assert (!sym->value);
5683 inittree = gfc_finish_block (&init);
5690 /* Don't create new scope, emit the DECL_EXPR in exactly the scope
5691 where also space is located. */
5692 gfc_init_block (&init);
5693 tmp = fold_build1_loc (input_location, DECL_EXPR,
5694 TREE_TYPE (space), space);
5695 gfc_add_expr_to_block (&init, tmp);
5696 addr = fold_build1_loc (sym->declared_at.lb->location,
5697 ADDR_EXPR, TREE_TYPE (decl), space);
5698 gfc_add_modify (&init, decl, addr);
5699 gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE);
5702 gfc_add_init_cleanup (block, inittree, tmp);
5706 /* Generate entry and exit code for g77 calling convention arrays. */
5709 gfc_trans_g77_array (gfc_symbol * sym, gfc_wrapped_block * block)
5719 gfc_save_backend_locus (&loc);
5720 gfc_set_backend_locus (&sym->declared_at);
5722 /* Descriptor type. */
5723 parm = sym->backend_decl;
5724 type = TREE_TYPE (parm);
5725 gcc_assert (GFC_ARRAY_TYPE_P (type));
5727 gfc_start_block (&init);
5729 if (sym->ts.type == BT_CHARACTER
5730 && TREE_CODE (sym->ts.u.cl->backend_decl) == VAR_DECL)
5731 gfc_conv_string_length (sym->ts.u.cl, NULL, &init);
5733 /* Evaluate the bounds of the array. */
5734 gfc_trans_array_bounds (type, sym, &offset, &init);
5736 /* Set the offset. */
5737 if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type)) == VAR_DECL)
5738 gfc_add_modify (&init, GFC_TYPE_ARRAY_OFFSET (type), offset);
5740 /* Set the pointer itself if we aren't using the parameter directly. */
5741 if (TREE_CODE (parm) != PARM_DECL)
5743 tmp = convert (TREE_TYPE (parm), GFC_DECL_SAVED_DESCRIPTOR (parm));
5744 gfc_add_modify (&init, parm, tmp);
5746 stmt = gfc_finish_block (&init);
5748 gfc_restore_backend_locus (&loc);
5750 /* Add the initialization code to the start of the function. */
5752 if (sym->attr.optional || sym->attr.not_always_present)
5754 tmp = gfc_conv_expr_present (sym);
5755 stmt = build3_v (COND_EXPR, tmp, stmt, build_empty_stmt (input_location));
5758 gfc_add_init_cleanup (block, stmt, NULL_TREE);
5762 /* Modify the descriptor of an array parameter so that it has the
5763 correct lower bound. Also move the upper bound accordingly.
5764 If the array is not packed, it will be copied into a temporary.
5765 For each dimension we set the new lower and upper bounds. Then we copy the
5766 stride and calculate the offset for this dimension. We also work out
5767 what the stride of a packed array would be, and see it the two match.
5768 If the array need repacking, we set the stride to the values we just
5769 calculated, recalculate the offset and copy the array data.
5770 Code is also added to copy the data back at the end of the function.
5774 gfc_trans_dummy_array_bias (gfc_symbol * sym, tree tmpdesc,
5775 gfc_wrapped_block * block)
5782 tree stmtInit, stmtCleanup;
5789 tree stride, stride2;
5799 /* Do nothing for pointer and allocatable arrays. */
5800 if (sym->attr.pointer || sym->attr.allocatable)
5803 if (sym->attr.dummy && gfc_is_nodesc_array (sym))
5805 gfc_trans_g77_array (sym, block);
5809 gfc_save_backend_locus (&loc);
5810 gfc_set_backend_locus (&sym->declared_at);
5812 /* Descriptor type. */
5813 type = TREE_TYPE (tmpdesc);
5814 gcc_assert (GFC_ARRAY_TYPE_P (type));
5815 dumdesc = GFC_DECL_SAVED_DESCRIPTOR (tmpdesc);
5816 dumdesc = build_fold_indirect_ref_loc (input_location, dumdesc);
5817 gfc_start_block (&init);
5819 if (sym->ts.type == BT_CHARACTER
5820 && TREE_CODE (sym->ts.u.cl->backend_decl) == VAR_DECL)
5821 gfc_conv_string_length (sym->ts.u.cl, NULL, &init);
5823 checkparm = (sym->as->type == AS_EXPLICIT
5824 && (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS));
5826 no_repack = !(GFC_DECL_PACKED_ARRAY (tmpdesc)
5827 || GFC_DECL_PARTIAL_PACKED_ARRAY (tmpdesc));
5829 if (GFC_DECL_PARTIAL_PACKED_ARRAY (tmpdesc))
5831 /* For non-constant shape arrays we only check if the first dimension
5832 is contiguous. Repacking higher dimensions wouldn't gain us
5833 anything as we still don't know the array stride. */
5834 partial = gfc_create_var (boolean_type_node, "partial");
5835 TREE_USED (partial) = 1;
5836 tmp = gfc_conv_descriptor_stride_get (dumdesc, gfc_rank_cst[0]);
5837 tmp = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node, tmp,
5838 gfc_index_one_node);
5839 gfc_add_modify (&init, partial, tmp);
5842 partial = NULL_TREE;
5844 /* The naming of stmt_unpacked and stmt_packed may be counter-intuitive
5845 here, however I think it does the right thing. */
5848 /* Set the first stride. */
5849 stride = gfc_conv_descriptor_stride_get (dumdesc, gfc_rank_cst[0]);
5850 stride = gfc_evaluate_now (stride, &init);
5852 tmp = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
5853 stride, gfc_index_zero_node);
5854 tmp = fold_build3_loc (input_location, COND_EXPR, gfc_array_index_type,
5855 tmp, gfc_index_one_node, stride);
5856 stride = GFC_TYPE_ARRAY_STRIDE (type, 0);
5857 gfc_add_modify (&init, stride, tmp);
5859 /* Allow the user to disable array repacking. */
5860 stmt_unpacked = NULL_TREE;
5864 gcc_assert (integer_onep (GFC_TYPE_ARRAY_STRIDE (type, 0)));
5865 /* A library call to repack the array if necessary. */
5866 tmp = GFC_DECL_SAVED_DESCRIPTOR (tmpdesc);
5867 stmt_unpacked = build_call_expr_loc (input_location,
5868 gfor_fndecl_in_pack, 1, tmp);
5870 stride = gfc_index_one_node;
5872 if (gfc_option.warn_array_temp)
5873 gfc_warning ("Creating array temporary at %L", &loc);
5876 /* This is for the case where the array data is used directly without
5877 calling the repack function. */
5878 if (no_repack || partial != NULL_TREE)
5879 stmt_packed = gfc_conv_descriptor_data_get (dumdesc);
5881 stmt_packed = NULL_TREE;
5883 /* Assign the data pointer. */
5884 if (stmt_packed != NULL_TREE && stmt_unpacked != NULL_TREE)
5886 /* Don't repack unknown shape arrays when the first stride is 1. */
5887 tmp = fold_build3_loc (input_location, COND_EXPR, TREE_TYPE (stmt_packed),
5888 partial, stmt_packed, stmt_unpacked);
5891 tmp = stmt_packed != NULL_TREE ? stmt_packed : stmt_unpacked;
5892 gfc_add_modify (&init, tmpdesc, fold_convert (type, tmp));
5894 offset = gfc_index_zero_node;
5895 size = gfc_index_one_node;
5897 /* Evaluate the bounds of the array. */
5898 for (n = 0; n < sym->as->rank; n++)
5900 if (checkparm || !sym->as->upper[n])
5902 /* Get the bounds of the actual parameter. */
5903 dubound = gfc_conv_descriptor_ubound_get (dumdesc, gfc_rank_cst[n]);
5904 dlbound = gfc_conv_descriptor_lbound_get (dumdesc, gfc_rank_cst[n]);
5908 dubound = NULL_TREE;
5909 dlbound = NULL_TREE;
5912 lbound = GFC_TYPE_ARRAY_LBOUND (type, n);
5913 if (!INTEGER_CST_P (lbound))
5915 gfc_init_se (&se, NULL);
5916 gfc_conv_expr_type (&se, sym->as->lower[n],
5917 gfc_array_index_type);
5918 gfc_add_block_to_block (&init, &se.pre);
5919 gfc_add_modify (&init, lbound, se.expr);
5922 ubound = GFC_TYPE_ARRAY_UBOUND (type, n);
5923 /* Set the desired upper bound. */
5924 if (sym->as->upper[n])
5926 /* We know what we want the upper bound to be. */
5927 if (!INTEGER_CST_P (ubound))
5929 gfc_init_se (&se, NULL);
5930 gfc_conv_expr_type (&se, sym->as->upper[n],
5931 gfc_array_index_type);
5932 gfc_add_block_to_block (&init, &se.pre);
5933 gfc_add_modify (&init, ubound, se.expr);
5936 /* Check the sizes match. */
5939 /* Check (ubound(a) - lbound(a) == ubound(b) - lbound(b)). */
5943 temp = fold_build2_loc (input_location, MINUS_EXPR,
5944 gfc_array_index_type, ubound, lbound);
5945 temp = fold_build2_loc (input_location, PLUS_EXPR,
5946 gfc_array_index_type,
5947 gfc_index_one_node, temp);
5948 stride2 = fold_build2_loc (input_location, MINUS_EXPR,
5949 gfc_array_index_type, dubound,
5951 stride2 = fold_build2_loc (input_location, PLUS_EXPR,
5952 gfc_array_index_type,
5953 gfc_index_one_node, stride2);
5954 tmp = fold_build2_loc (input_location, NE_EXPR,
5955 gfc_array_index_type, temp, stride2);
5956 asprintf (&msg, "Dimension %d of array '%s' has extent "
5957 "%%ld instead of %%ld", n+1, sym->name);
5959 gfc_trans_runtime_check (true, false, tmp, &init, &loc, msg,
5960 fold_convert (long_integer_type_node, temp),
5961 fold_convert (long_integer_type_node, stride2));
5968 /* For assumed shape arrays move the upper bound by the same amount
5969 as the lower bound. */
5970 tmp = fold_build2_loc (input_location, MINUS_EXPR,
5971 gfc_array_index_type, dubound, dlbound);
5972 tmp = fold_build2_loc (input_location, PLUS_EXPR,
5973 gfc_array_index_type, tmp, lbound);
5974 gfc_add_modify (&init, ubound, tmp);
5976 /* The offset of this dimension. offset = offset - lbound * stride. */
5977 tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
5979 offset = fold_build2_loc (input_location, MINUS_EXPR,
5980 gfc_array_index_type, offset, tmp);
5982 /* The size of this dimension, and the stride of the next. */
5983 if (n + 1 < sym->as->rank)
5985 stride = GFC_TYPE_ARRAY_STRIDE (type, n + 1);
5987 if (no_repack || partial != NULL_TREE)
5989 gfc_conv_descriptor_stride_get (dumdesc, gfc_rank_cst[n+1]);
5991 /* Figure out the stride if not a known constant. */
5992 if (!INTEGER_CST_P (stride))
5995 stmt_packed = NULL_TREE;
5998 /* Calculate stride = size * (ubound + 1 - lbound). */
5999 tmp = fold_build2_loc (input_location, MINUS_EXPR,
6000 gfc_array_index_type,
6001 gfc_index_one_node, lbound);
6002 tmp = fold_build2_loc (input_location, PLUS_EXPR,
6003 gfc_array_index_type, ubound, tmp);
6004 size = fold_build2_loc (input_location, MULT_EXPR,
6005 gfc_array_index_type, size, tmp);
6009 /* Assign the stride. */
6010 if (stmt_packed != NULL_TREE && stmt_unpacked != NULL_TREE)
6011 tmp = fold_build3_loc (input_location, COND_EXPR,
6012 gfc_array_index_type, partial,
6013 stmt_unpacked, stmt_packed);
6015 tmp = (stmt_packed != NULL_TREE) ? stmt_packed : stmt_unpacked;
6016 gfc_add_modify (&init, stride, tmp);
6021 stride = GFC_TYPE_ARRAY_SIZE (type);
6023 if (stride && !INTEGER_CST_P (stride))
6025 /* Calculate size = stride * (ubound + 1 - lbound). */
6026 tmp = fold_build2_loc (input_location, MINUS_EXPR,
6027 gfc_array_index_type,
6028 gfc_index_one_node, lbound);
6029 tmp = fold_build2_loc (input_location, PLUS_EXPR,
6030 gfc_array_index_type,
6032 tmp = fold_build2_loc (input_location, MULT_EXPR,
6033 gfc_array_index_type,
6034 GFC_TYPE_ARRAY_STRIDE (type, n), tmp);
6035 gfc_add_modify (&init, stride, tmp);
6040 gfc_trans_array_cobounds (type, &init, sym);
6042 /* Set the offset. */
6043 if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type)) == VAR_DECL)
6044 gfc_add_modify (&init, GFC_TYPE_ARRAY_OFFSET (type), offset);
6046 gfc_trans_vla_type_sizes (sym, &init);
6048 stmtInit = gfc_finish_block (&init);
6050 /* Only do the entry/initialization code if the arg is present. */
6051 dumdesc = GFC_DECL_SAVED_DESCRIPTOR (tmpdesc);
6052 optional_arg = (sym->attr.optional
6053 || (sym->ns->proc_name->attr.entry_master
6054 && sym->attr.dummy));
6057 tmp = gfc_conv_expr_present (sym);
6058 stmtInit = build3_v (COND_EXPR, tmp, stmtInit,
6059 build_empty_stmt (input_location));
6064 stmtCleanup = NULL_TREE;
6067 stmtblock_t cleanup;
6068 gfc_start_block (&cleanup);
6070 if (sym->attr.intent != INTENT_IN)
6072 /* Copy the data back. */
6073 tmp = build_call_expr_loc (input_location,
6074 gfor_fndecl_in_unpack, 2, dumdesc, tmpdesc);
6075 gfc_add_expr_to_block (&cleanup, tmp);
6078 /* Free the temporary. */
6079 tmp = gfc_call_free (tmpdesc);
6080 gfc_add_expr_to_block (&cleanup, tmp);
6082 stmtCleanup = gfc_finish_block (&cleanup);
6084 /* Only do the cleanup if the array was repacked. */
6085 tmp = build_fold_indirect_ref_loc (input_location, dumdesc);
6086 tmp = gfc_conv_descriptor_data_get (tmp);
6087 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
6089 stmtCleanup = build3_v (COND_EXPR, tmp, stmtCleanup,
6090 build_empty_stmt (input_location));
6094 tmp = gfc_conv_expr_present (sym);
6095 stmtCleanup = build3_v (COND_EXPR, tmp, stmtCleanup,
6096 build_empty_stmt (input_location));
6100 /* We don't need to free any memory allocated by internal_pack as it will
6101 be freed at the end of the function by pop_context. */
6102 gfc_add_init_cleanup (block, stmtInit, stmtCleanup);
6104 gfc_restore_backend_locus (&loc);
6108 /* Calculate the overall offset, including subreferences. */
6110 gfc_get_dataptr_offset (stmtblock_t *block, tree parm, tree desc, tree offset,
6111 bool subref, gfc_expr *expr)
6121 /* If offset is NULL and this is not a subreferenced array, there is
6123 if (offset == NULL_TREE)
6126 offset = gfc_index_zero_node;
6131 tmp = build_array_ref (desc, offset, NULL);
6133 /* Offset the data pointer for pointer assignments from arrays with
6134 subreferences; e.g. my_integer => my_type(:)%integer_component. */
6137 /* Go past the array reference. */
6138 for (ref = expr->ref; ref; ref = ref->next)
6139 if (ref->type == REF_ARRAY &&
6140 ref->u.ar.type != AR_ELEMENT)
6146 /* Calculate the offset for each subsequent subreference. */
6147 for (; ref; ref = ref->next)
6152 field = ref->u.c.component->backend_decl;
6153 gcc_assert (field && TREE_CODE (field) == FIELD_DECL);
6154 tmp = fold_build3_loc (input_location, COMPONENT_REF,
6156 tmp, field, NULL_TREE);
6160 gcc_assert (TREE_CODE (TREE_TYPE (tmp)) == ARRAY_TYPE);
6161 gfc_init_se (&start, NULL);
6162 gfc_conv_expr_type (&start, ref->u.ss.start, gfc_charlen_type_node);
6163 gfc_add_block_to_block (block, &start.pre);
6164 tmp = gfc_build_array_ref (tmp, start.expr, NULL);
6168 gcc_assert (TREE_CODE (TREE_TYPE (tmp)) == ARRAY_TYPE
6169 && ref->u.ar.type == AR_ELEMENT);
6171 /* TODO - Add bounds checking. */
6172 stride = gfc_index_one_node;
6173 index = gfc_index_zero_node;
6174 for (n = 0; n < ref->u.ar.dimen; n++)
6179 /* Update the index. */
6180 gfc_init_se (&start, NULL);
6181 gfc_conv_expr_type (&start, ref->u.ar.start[n], gfc_array_index_type);
6182 itmp = gfc_evaluate_now (start.expr, block);
6183 gfc_init_se (&start, NULL);
6184 gfc_conv_expr_type (&start, ref->u.ar.as->lower[n], gfc_array_index_type);
6185 jtmp = gfc_evaluate_now (start.expr, block);
6186 itmp = fold_build2_loc (input_location, MINUS_EXPR,
6187 gfc_array_index_type, itmp, jtmp);
6188 itmp = fold_build2_loc (input_location, MULT_EXPR,
6189 gfc_array_index_type, itmp, stride);
6190 index = fold_build2_loc (input_location, PLUS_EXPR,
6191 gfc_array_index_type, itmp, index);
6192 index = gfc_evaluate_now (index, block);
6194 /* Update the stride. */
6195 gfc_init_se (&start, NULL);
6196 gfc_conv_expr_type (&start, ref->u.ar.as->upper[n], gfc_array_index_type);
6197 itmp = fold_build2_loc (input_location, MINUS_EXPR,
6198 gfc_array_index_type, start.expr,
6200 itmp = fold_build2_loc (input_location, PLUS_EXPR,
6201 gfc_array_index_type,
6202 gfc_index_one_node, itmp);
6203 stride = fold_build2_loc (input_location, MULT_EXPR,
6204 gfc_array_index_type, stride, itmp);
6205 stride = gfc_evaluate_now (stride, block);
6208 /* Apply the index to obtain the array element. */
6209 tmp = gfc_build_array_ref (tmp, index, NULL);
6219 /* Set the target data pointer. */
6220 offset = gfc_build_addr_expr (gfc_array_dataptr_type (desc), tmp);
6221 gfc_conv_descriptor_data_set (block, parm, offset);
6225 /* gfc_conv_expr_descriptor needs the string length an expression
6226 so that the size of the temporary can be obtained. This is done
6227 by adding up the string lengths of all the elements in the
6228 expression. Function with non-constant expressions have their
6229 string lengths mapped onto the actual arguments using the
6230 interface mapping machinery in trans-expr.c. */
6232 get_array_charlen (gfc_expr *expr, gfc_se *se)
6234 gfc_interface_mapping mapping;
6235 gfc_formal_arglist *formal;
6236 gfc_actual_arglist *arg;
6239 if (expr->ts.u.cl->length
6240 && gfc_is_constant_expr (expr->ts.u.cl->length))
6242 if (!expr->ts.u.cl->backend_decl)
6243 gfc_conv_string_length (expr->ts.u.cl, expr, &se->pre);
6247 switch (expr->expr_type)
6250 get_array_charlen (expr->value.op.op1, se);
6252 /* For parentheses the expression ts.u.cl is identical. */
6253 if (expr->value.op.op == INTRINSIC_PARENTHESES)
6256 expr->ts.u.cl->backend_decl =
6257 gfc_create_var (gfc_charlen_type_node, "sln");
6259 if (expr->value.op.op2)
6261 get_array_charlen (expr->value.op.op2, se);
6263 gcc_assert (expr->value.op.op == INTRINSIC_CONCAT);
6265 /* Add the string lengths and assign them to the expression
6266 string length backend declaration. */
6267 gfc_add_modify (&se->pre, expr->ts.u.cl->backend_decl,
6268 fold_build2_loc (input_location, PLUS_EXPR,
6269 gfc_charlen_type_node,
6270 expr->value.op.op1->ts.u.cl->backend_decl,
6271 expr->value.op.op2->ts.u.cl->backend_decl));
6274 gfc_add_modify (&se->pre, expr->ts.u.cl->backend_decl,
6275 expr->value.op.op1->ts.u.cl->backend_decl);
6279 if (expr->value.function.esym == NULL
6280 || expr->ts.u.cl->length->expr_type == EXPR_CONSTANT)
6282 gfc_conv_string_length (expr->ts.u.cl, expr, &se->pre);
6286 /* Map expressions involving the dummy arguments onto the actual
6287 argument expressions. */
6288 gfc_init_interface_mapping (&mapping);
6289 formal = gfc_sym_get_dummy_args (expr->symtree->n.sym);
6290 arg = expr->value.function.actual;
6292 /* Set se = NULL in the calls to the interface mapping, to suppress any
6294 for (; arg != NULL; arg = arg->next, formal = formal ? formal->next : NULL)
6299 gfc_add_interface_mapping (&mapping, formal->sym, NULL, arg->expr);
6302 gfc_init_se (&tse, NULL);
6304 /* Build the expression for the character length and convert it. */
6305 gfc_apply_interface_mapping (&mapping, &tse, expr->ts.u.cl->length);
6307 gfc_add_block_to_block (&se->pre, &tse.pre);
6308 gfc_add_block_to_block (&se->post, &tse.post);
6309 tse.expr = fold_convert (gfc_charlen_type_node, tse.expr);
6310 tse.expr = fold_build2_loc (input_location, MAX_EXPR,
6311 gfc_charlen_type_node, tse.expr,
6312 build_int_cst (gfc_charlen_type_node, 0));
6313 expr->ts.u.cl->backend_decl = tse.expr;
6314 gfc_free_interface_mapping (&mapping);
6318 gfc_conv_string_length (expr->ts.u.cl, expr, &se->pre);
6324 /* Helper function to check dimensions. */
6326 transposed_dims (gfc_ss *ss)
6330 for (n = 0; n < ss->dimen; n++)
6331 if (ss->dim[n] != n)
6337 /* Convert the last ref of a scalar coarray from an AR_ELEMENT to an
6338 AR_FULL, suitable for the scalarizer. */
6341 walk_coarray (gfc_expr *e)
6345 gcc_assert (gfc_get_corank (e) > 0);
6347 ss = gfc_walk_expr (e);
6349 /* Fix scalar coarray. */
6350 if (ss == gfc_ss_terminator)
6357 if (ref->type == REF_ARRAY
6358 && ref->u.ar.codimen > 0)
6364 gcc_assert (ref != NULL);
6365 if (ref->u.ar.type == AR_ELEMENT)
6366 ref->u.ar.type = AR_SECTION;
6367 ss = gfc_reverse_ss (gfc_walk_array_ref (ss, e, ref));
6374 /* Convert an array for passing as an actual argument. Expressions and
6375 vector subscripts are evaluated and stored in a temporary, which is then
6376 passed. For whole arrays the descriptor is passed. For array sections
6377 a modified copy of the descriptor is passed, but using the original data.
6379 This function is also used for array pointer assignments, and there
6382 - se->want_pointer && !se->direct_byref
6383 EXPR is an actual argument. On exit, se->expr contains a
6384 pointer to the array descriptor.
6386 - !se->want_pointer && !se->direct_byref
6387 EXPR is an actual argument to an intrinsic function or the
6388 left-hand side of a pointer assignment. On exit, se->expr
6389 contains the descriptor for EXPR.
6391 - !se->want_pointer && se->direct_byref
6392 EXPR is the right-hand side of a pointer assignment and
6393 se->expr is the descriptor for the previously-evaluated
6394 left-hand side. The function creates an assignment from
6398 The se->force_tmp flag disables the non-copying descriptor optimization
6399 that is used for transpose. It may be used in cases where there is an
6400 alias between the transpose argument and another argument in the same
6404 gfc_conv_expr_descriptor (gfc_se *se, gfc_expr *expr)
6407 gfc_ss_type ss_type;
6408 gfc_ss_info *ss_info;
6410 gfc_array_info *info;
6419 bool subref_array_target = false;
6420 gfc_expr *arg, *ss_expr;
6422 if (se->want_coarray)
6423 ss = walk_coarray (expr);
6425 ss = gfc_walk_expr (expr);
6427 gcc_assert (ss != NULL);
6428 gcc_assert (ss != gfc_ss_terminator);
6431 ss_type = ss_info->type;
6432 ss_expr = ss_info->expr;
6434 /* Special case: TRANSPOSE which needs no temporary. */
6435 while (expr->expr_type == EXPR_FUNCTION && expr->value.function.isym
6436 && NULL != (arg = gfc_get_noncopying_intrinsic_argument (expr)))
6438 /* This is a call to transpose which has already been handled by the
6439 scalarizer, so that we just need to get its argument's descriptor. */
6440 gcc_assert (expr->value.function.isym->id == GFC_ISYM_TRANSPOSE);
6441 expr = expr->value.function.actual->expr;
6444 /* Special case things we know we can pass easily. */
6445 switch (expr->expr_type)
6448 /* If we have a linear array section, we can pass it directly.
6449 Otherwise we need to copy it into a temporary. */
6451 gcc_assert (ss_type == GFC_SS_SECTION);
6452 gcc_assert (ss_expr == expr);
6453 info = &ss_info->data.array;
6455 /* Get the descriptor for the array. */
6456 gfc_conv_ss_descriptor (&se->pre, ss, 0);
6457 desc = info->descriptor;
6459 subref_array_target = se->direct_byref && is_subref_array (expr);
6460 need_tmp = gfc_ref_needs_temporary_p (expr->ref)
6461 && !subref_array_target;
6468 else if (GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
6470 /* Create a new descriptor if the array doesn't have one. */
6473 else if (info->ref->u.ar.type == AR_FULL || se->descriptor_only)
6475 else if (se->direct_byref)
6478 full = gfc_full_array_ref_p (info->ref, NULL);
6480 if (full && !transposed_dims (ss))
6482 if (se->direct_byref && !se->byref_noassign)
6484 /* Copy the descriptor for pointer assignments. */
6485 gfc_add_modify (&se->pre, se->expr, desc);
6487 /* Add any offsets from subreferences. */
6488 gfc_get_dataptr_offset (&se->pre, se->expr, desc, NULL_TREE,
6489 subref_array_target, expr);
6491 else if (se->want_pointer)
6493 /* We pass full arrays directly. This means that pointers and
6494 allocatable arrays should also work. */
6495 se->expr = gfc_build_addr_expr (NULL_TREE, desc);
6502 if (expr->ts.type == BT_CHARACTER)
6503 se->string_length = gfc_get_expr_charlen (expr);
6505 gfc_free_ss_chain (ss);
6511 /* A transformational function return value will be a temporary
6512 array descriptor. We still need to go through the scalarizer
6513 to create the descriptor. Elemental functions are handled as
6514 arbitrary expressions, i.e. copy to a temporary. */
6516 if (se->direct_byref)
6518 gcc_assert (ss_type == GFC_SS_FUNCTION && ss_expr == expr);
6520 /* For pointer assignments pass the descriptor directly. */
6524 gcc_assert (se->ss == ss);
6525 se->expr = gfc_build_addr_expr (NULL_TREE, se->expr);
6526 gfc_conv_expr (se, expr);
6527 gfc_free_ss_chain (ss);
6531 if (ss_expr != expr || ss_type != GFC_SS_FUNCTION)
6533 if (ss_expr != expr)
6534 /* Elemental function. */
6535 gcc_assert ((expr->value.function.esym != NULL
6536 && expr->value.function.esym->attr.elemental)
6537 || (expr->value.function.isym != NULL
6538 && expr->value.function.isym->elemental)
6539 || gfc_inline_intrinsic_function_p (expr));
6541 gcc_assert (ss_type == GFC_SS_INTRINSIC);
6544 if (expr->ts.type == BT_CHARACTER
6545 && expr->ts.u.cl->length->expr_type != EXPR_CONSTANT)
6546 get_array_charlen (expr, se);
6552 /* Transformational function. */
6553 info = &ss_info->data.array;
6559 /* Constant array constructors don't need a temporary. */
6560 if (ss_type == GFC_SS_CONSTRUCTOR
6561 && expr->ts.type != BT_CHARACTER
6562 && gfc_constant_array_constructor_p (expr->value.constructor))
6565 info = &ss_info->data.array;
6575 /* Something complicated. Copy it into a temporary. */
6581 /* If we are creating a temporary, we don't need to bother about aliases
6586 gfc_init_loopinfo (&loop);
6588 /* Associate the SS with the loop. */
6589 gfc_add_ss_to_loop (&loop, ss);
6591 /* Tell the scalarizer not to bother creating loop variables, etc. */
6593 loop.array_parameter = 1;
6595 /* The right-hand side of a pointer assignment mustn't use a temporary. */
6596 gcc_assert (!se->direct_byref);
6598 /* Setup the scalarizing loops and bounds. */
6599 gfc_conv_ss_startstride (&loop);
6603 if (expr->ts.type == BT_CHARACTER && !expr->ts.u.cl->backend_decl)
6604 get_array_charlen (expr, se);
6606 /* Tell the scalarizer to make a temporary. */
6607 loop.temp_ss = gfc_get_temp_ss (gfc_typenode_for_spec (&expr->ts),
6608 ((expr->ts.type == BT_CHARACTER)
6609 ? expr->ts.u.cl->backend_decl
6613 se->string_length = loop.temp_ss->info->string_length;
6614 gcc_assert (loop.temp_ss->dimen == loop.dimen);
6615 gfc_add_ss_to_loop (&loop, loop.temp_ss);
6618 gfc_conv_loop_setup (&loop, & expr->where);
6622 /* Copy into a temporary and pass that. We don't need to copy the data
6623 back because expressions and vector subscripts must be INTENT_IN. */
6624 /* TODO: Optimize passing function return values. */
6628 /* Start the copying loops. */
6629 gfc_mark_ss_chain_used (loop.temp_ss, 1);
6630 gfc_mark_ss_chain_used (ss, 1);
6631 gfc_start_scalarized_body (&loop, &block);
6633 /* Copy each data element. */
6634 gfc_init_se (&lse, NULL);
6635 gfc_copy_loopinfo_to_se (&lse, &loop);
6636 gfc_init_se (&rse, NULL);
6637 gfc_copy_loopinfo_to_se (&rse, &loop);
6639 lse.ss = loop.temp_ss;
6642 gfc_conv_scalarized_array_ref (&lse, NULL);
6643 if (expr->ts.type == BT_CHARACTER)
6645 gfc_conv_expr (&rse, expr);
6646 if (POINTER_TYPE_P (TREE_TYPE (rse.expr)))
6647 rse.expr = build_fold_indirect_ref_loc (input_location,
6651 gfc_conv_expr_val (&rse, expr);
6653 gfc_add_block_to_block (&block, &rse.pre);
6654 gfc_add_block_to_block (&block, &lse.pre);
6656 lse.string_length = rse.string_length;
6657 tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts, true,
6658 expr->expr_type == EXPR_VARIABLE
6659 || expr->expr_type == EXPR_ARRAY, true);
6660 gfc_add_expr_to_block (&block, tmp);
6662 /* Finish the copying loops. */
6663 gfc_trans_scalarizing_loops (&loop, &block);
6665 desc = loop.temp_ss->info->data.array.descriptor;
6667 else if (expr->expr_type == EXPR_FUNCTION && !transposed_dims (ss))
6669 desc = info->descriptor;
6670 se->string_length = ss_info->string_length;
6674 /* We pass sections without copying to a temporary. Make a new
6675 descriptor and point it at the section we want. The loop variable
6676 limits will be the limits of the section.
6677 A function may decide to repack the array to speed up access, but
6678 we're not bothered about that here. */
6679 int dim, ndim, codim;
6687 ndim = info->ref ? info->ref->u.ar.dimen : ss->dimen;
6689 if (se->want_coarray)
6691 gfc_array_ref *ar = &info->ref->u.ar;
6693 codim = gfc_get_corank (expr);
6694 for (n = 0; n < codim - 1; n++)
6696 /* Make sure we are not lost somehow. */
6697 gcc_assert (ar->dimen_type[n + ndim] == DIMEN_THIS_IMAGE);
6699 /* Make sure the call to gfc_conv_section_startstride won't
6700 generate unnecessary code to calculate stride. */
6701 gcc_assert (ar->stride[n + ndim] == NULL);
6703 gfc_conv_section_startstride (&loop.pre, ss, n + ndim);
6704 loop.from[n + loop.dimen] = info->start[n + ndim];
6705 loop.to[n + loop.dimen] = info->end[n + ndim];
6708 gcc_assert (n == codim - 1);
6709 evaluate_bound (&loop.pre, info->start, ar->start,
6710 info->descriptor, n + ndim, true);
6711 loop.from[n + loop.dimen] = info->start[n + ndim];
6716 /* Set the string_length for a character array. */
6717 if (expr->ts.type == BT_CHARACTER)
6718 se->string_length = gfc_get_expr_charlen (expr);
6720 desc = info->descriptor;
6721 if (se->direct_byref && !se->byref_noassign)
6723 /* For pointer assignments we fill in the destination. */
6725 parmtype = TREE_TYPE (parm);
6729 /* Otherwise make a new one. */
6730 parmtype = gfc_get_element_type (TREE_TYPE (desc));
6731 parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen, codim,
6732 loop.from, loop.to, 0,
6733 GFC_ARRAY_UNKNOWN, false);
6734 parm = gfc_create_var (parmtype, "parm");
6737 offset = gfc_index_zero_node;
6739 /* The following can be somewhat confusing. We have two
6740 descriptors, a new one and the original array.
6741 {parm, parmtype, dim} refer to the new one.
6742 {desc, type, n, loop} refer to the original, which maybe
6743 a descriptorless array.
6744 The bounds of the scalarization are the bounds of the section.
6745 We don't have to worry about numeric overflows when calculating
6746 the offsets because all elements are within the array data. */
6748 /* Set the dtype. */
6749 tmp = gfc_conv_descriptor_dtype (parm);
6750 gfc_add_modify (&loop.pre, tmp, gfc_get_dtype (parmtype));
6752 /* Set offset for assignments to pointer only to zero if it is not
6754 if (se->direct_byref
6755 && info->ref && info->ref->u.ar.type != AR_FULL)
6756 base = gfc_index_zero_node;
6757 else if (GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
6758 base = gfc_evaluate_now (gfc_conv_array_offset (desc), &loop.pre);
6762 for (n = 0; n < ndim; n++)
6764 stride = gfc_conv_array_stride (desc, n);
6766 /* Work out the offset. */
6768 && info->ref->u.ar.dimen_type[n] == DIMEN_ELEMENT)
6770 gcc_assert (info->subscript[n]
6771 && info->subscript[n]->info->type == GFC_SS_SCALAR);
6772 start = info->subscript[n]->info->data.scalar.value;
6776 /* Evaluate and remember the start of the section. */
6777 start = info->start[n];
6778 stride = gfc_evaluate_now (stride, &loop.pre);
6781 tmp = gfc_conv_array_lbound (desc, n);
6782 tmp = fold_build2_loc (input_location, MINUS_EXPR, TREE_TYPE (tmp),
6784 tmp = fold_build2_loc (input_location, MULT_EXPR, TREE_TYPE (tmp),
6786 offset = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (tmp),
6790 && info->ref->u.ar.dimen_type[n] == DIMEN_ELEMENT)
6792 /* For elemental dimensions, we only need the offset. */
6796 /* Vector subscripts need copying and are handled elsewhere. */
6798 gcc_assert (info->ref->u.ar.dimen_type[n] == DIMEN_RANGE);
6800 /* look for the corresponding scalarizer dimension: dim. */
6801 for (dim = 0; dim < ndim; dim++)
6802 if (ss->dim[dim] == n)
6805 /* loop exited early: the DIM being looked for has been found. */
6806 gcc_assert (dim < ndim);
6808 /* Set the new lower bound. */
6809 from = loop.from[dim];
6812 /* If we have an array section or are assigning make sure that
6813 the lower bound is 1. References to the full
6814 array should otherwise keep the original bounds. */
6816 || info->ref->u.ar.type != AR_FULL)
6817 && !integer_onep (from))
6819 tmp = fold_build2_loc (input_location, MINUS_EXPR,
6820 gfc_array_index_type, gfc_index_one_node,
6822 to = fold_build2_loc (input_location, PLUS_EXPR,
6823 gfc_array_index_type, to, tmp);
6824 from = gfc_index_one_node;
6826 gfc_conv_descriptor_lbound_set (&loop.pre, parm,
6827 gfc_rank_cst[dim], from);
6829 /* Set the new upper bound. */
6830 gfc_conv_descriptor_ubound_set (&loop.pre, parm,
6831 gfc_rank_cst[dim], to);
6833 /* Multiply the stride by the section stride to get the
6835 stride = fold_build2_loc (input_location, MULT_EXPR,
6836 gfc_array_index_type,
6837 stride, info->stride[n]);
6839 if (se->direct_byref
6841 && info->ref->u.ar.type != AR_FULL)
6843 base = fold_build2_loc (input_location, MINUS_EXPR,
6844 TREE_TYPE (base), base, stride);
6846 else if (GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
6848 tmp = gfc_conv_array_lbound (desc, n);
6849 tmp = fold_build2_loc (input_location, MINUS_EXPR,
6850 TREE_TYPE (base), tmp, loop.from[dim]);
6851 tmp = fold_build2_loc (input_location, MULT_EXPR,
6852 TREE_TYPE (base), tmp,
6853 gfc_conv_array_stride (desc, n));
6854 base = fold_build2_loc (input_location, PLUS_EXPR,
6855 TREE_TYPE (base), tmp, base);
6858 /* Store the new stride. */
6859 gfc_conv_descriptor_stride_set (&loop.pre, parm,
6860 gfc_rank_cst[dim], stride);
6863 for (n = loop.dimen; n < loop.dimen + codim; n++)
6865 from = loop.from[n];
6867 gfc_conv_descriptor_lbound_set (&loop.pre, parm,
6868 gfc_rank_cst[n], from);
6869 if (n < loop.dimen + codim - 1)
6870 gfc_conv_descriptor_ubound_set (&loop.pre, parm,
6871 gfc_rank_cst[n], to);
6874 if (se->data_not_needed)
6875 gfc_conv_descriptor_data_set (&loop.pre, parm,
6876 gfc_index_zero_node);
6878 /* Point the data pointer at the 1st element in the section. */
6879 gfc_get_dataptr_offset (&loop.pre, parm, desc, offset,
6880 subref_array_target, expr);
6882 if ((se->direct_byref || GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
6883 && !se->data_not_needed)
6885 /* Set the offset. */
6886 gfc_conv_descriptor_offset_set (&loop.pre, parm, base);
6890 /* Only the callee knows what the correct offset it, so just set
6892 gfc_conv_descriptor_offset_set (&loop.pre, parm, gfc_index_zero_node);
6897 if (!se->direct_byref || se->byref_noassign)
6899 /* Get a pointer to the new descriptor. */
6900 if (se->want_pointer)
6901 se->expr = gfc_build_addr_expr (NULL_TREE, desc);
6906 gfc_add_block_to_block (&se->pre, &loop.pre);
6907 gfc_add_block_to_block (&se->post, &loop.post);
6909 /* Cleanup the scalarizer. */
6910 gfc_cleanup_loop (&loop);
6913 /* Helper function for gfc_conv_array_parameter if array size needs to be
6917 array_parameter_size (tree desc, gfc_expr *expr, tree *size)
6920 if (GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
6921 *size = GFC_TYPE_ARRAY_SIZE (TREE_TYPE (desc));
6922 else if (expr->rank > 1)
6923 *size = build_call_expr_loc (input_location,
6924 gfor_fndecl_size0, 1,
6925 gfc_build_addr_expr (NULL, desc));
6928 tree ubound = gfc_conv_descriptor_ubound_get (desc, gfc_index_zero_node);
6929 tree lbound = gfc_conv_descriptor_lbound_get (desc, gfc_index_zero_node);
6931 *size = fold_build2_loc (input_location, MINUS_EXPR,
6932 gfc_array_index_type, ubound, lbound);
6933 *size = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
6934 *size, gfc_index_one_node);
6935 *size = fold_build2_loc (input_location, MAX_EXPR, gfc_array_index_type,
6936 *size, gfc_index_zero_node);
6938 elem = TYPE_SIZE_UNIT (gfc_get_element_type (TREE_TYPE (desc)));
6939 *size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
6940 *size, fold_convert (gfc_array_index_type, elem));
6943 /* Convert an array for passing as an actual parameter. */
6944 /* TODO: Optimize passing g77 arrays. */
6947 gfc_conv_array_parameter (gfc_se * se, gfc_expr * expr, bool g77,
6948 const gfc_symbol *fsym, const char *proc_name,
6953 tree tmp = NULL_TREE;
6955 tree parent = DECL_CONTEXT (current_function_decl);
6956 bool full_array_var;
6957 bool this_array_result;
6960 bool array_constructor;
6961 bool good_allocatable;
6962 bool ultimate_ptr_comp;
6963 bool ultimate_alloc_comp;
6968 ultimate_ptr_comp = false;
6969 ultimate_alloc_comp = false;
6971 for (ref = expr->ref; ref; ref = ref->next)
6973 if (ref->next == NULL)
6976 if (ref->type == REF_COMPONENT)
6978 ultimate_ptr_comp = ref->u.c.component->attr.pointer;
6979 ultimate_alloc_comp = ref->u.c.component->attr.allocatable;
6983 full_array_var = false;
6986 if (expr->expr_type == EXPR_VARIABLE && ref && !ultimate_ptr_comp)
6987 full_array_var = gfc_full_array_ref_p (ref, &contiguous);
6989 sym = full_array_var ? expr->symtree->n.sym : NULL;
6991 /* The symbol should have an array specification. */
6992 gcc_assert (!sym || sym->as || ref->u.ar.as);
6994 if (expr->expr_type == EXPR_ARRAY && expr->ts.type == BT_CHARACTER)
6996 get_array_ctor_strlen (&se->pre, expr->value.constructor, &tmp);
6997 expr->ts.u.cl->backend_decl = tmp;
6998 se->string_length = tmp;
7001 /* Is this the result of the enclosing procedure? */
7002 this_array_result = (full_array_var && sym->attr.flavor == FL_PROCEDURE);
7003 if (this_array_result
7004 && (sym->backend_decl != current_function_decl)
7005 && (sym->backend_decl != parent))
7006 this_array_result = false;
7008 /* Passing address of the array if it is not pointer or assumed-shape. */
7009 if (full_array_var && g77 && !this_array_result
7010 && sym->ts.type != BT_DERIVED && sym->ts.type != BT_CLASS)
7012 tmp = gfc_get_symbol_decl (sym);
7014 if (sym->ts.type == BT_CHARACTER)
7015 se->string_length = sym->ts.u.cl->backend_decl;
7017 if (!sym->attr.pointer
7019 && sym->as->type != AS_ASSUMED_SHAPE
7020 && sym->as->type != AS_DEFERRED
7021 && sym->as->type != AS_ASSUMED_RANK
7022 && !sym->attr.allocatable)
7024 /* Some variables are declared directly, others are declared as
7025 pointers and allocated on the heap. */
7026 if (sym->attr.dummy || POINTER_TYPE_P (TREE_TYPE (tmp)))
7029 se->expr = gfc_build_addr_expr (NULL_TREE, tmp);
7031 array_parameter_size (tmp, expr, size);
7035 if (sym->attr.allocatable)
7037 if (sym->attr.dummy || sym->attr.result)
7039 gfc_conv_expr_descriptor (se, expr);
7043 array_parameter_size (tmp, expr, size);
7044 se->expr = gfc_conv_array_data (tmp);
7049 /* A convenient reduction in scope. */
7050 contiguous = g77 && !this_array_result && contiguous;
7052 /* There is no need to pack and unpack the array, if it is contiguous
7053 and not a deferred- or assumed-shape array, or if it is simply
7055 no_pack = ((sym && sym->as
7056 && !sym->attr.pointer
7057 && sym->as->type != AS_DEFERRED
7058 && sym->as->type != AS_ASSUMED_RANK
7059 && sym->as->type != AS_ASSUMED_SHAPE)
7061 (ref && ref->u.ar.as
7062 && ref->u.ar.as->type != AS_DEFERRED
7063 && ref->u.ar.as->type != AS_ASSUMED_RANK
7064 && ref->u.ar.as->type != AS_ASSUMED_SHAPE)
7066 gfc_is_simply_contiguous (expr, false));
7068 no_pack = contiguous && no_pack;
7070 /* Array constructors are always contiguous and do not need packing. */
7071 array_constructor = g77 && !this_array_result && expr->expr_type == EXPR_ARRAY;
7073 /* Same is true of contiguous sections from allocatable variables. */
7074 good_allocatable = contiguous
7076 && expr->symtree->n.sym->attr.allocatable;
7078 /* Or ultimate allocatable components. */
7079 ultimate_alloc_comp = contiguous && ultimate_alloc_comp;
7081 if (no_pack || array_constructor || good_allocatable || ultimate_alloc_comp)
7083 gfc_conv_expr_descriptor (se, expr);
7084 if (expr->ts.type == BT_CHARACTER)
7085 se->string_length = expr->ts.u.cl->backend_decl;
7087 array_parameter_size (se->expr, expr, size);
7088 se->expr = gfc_conv_array_data (se->expr);
7092 if (this_array_result)
7094 /* Result of the enclosing function. */
7095 gfc_conv_expr_descriptor (se, expr);
7097 array_parameter_size (se->expr, expr, size);
7098 se->expr = gfc_build_addr_expr (NULL_TREE, se->expr);
7100 if (g77 && TREE_TYPE (TREE_TYPE (se->expr)) != NULL_TREE
7101 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (se->expr))))
7102 se->expr = gfc_conv_array_data (build_fold_indirect_ref_loc (input_location,
7109 /* Every other type of array. */
7110 se->want_pointer = 1;
7111 gfc_conv_expr_descriptor (se, expr);
7113 array_parameter_size (build_fold_indirect_ref_loc (input_location,
7118 /* Deallocate the allocatable components of structures that are
7120 if ((expr->ts.type == BT_DERIVED || expr->ts.type == BT_CLASS)
7121 && expr->ts.u.derived->attr.alloc_comp
7122 && expr->expr_type != EXPR_VARIABLE)
7124 tmp = build_fold_indirect_ref_loc (input_location, se->expr);
7125 tmp = gfc_deallocate_alloc_comp (expr->ts.u.derived, tmp, expr->rank);
7127 /* The components shall be deallocated before their containing entity. */
7128 gfc_prepend_expr_to_block (&se->post, tmp);
7131 if (g77 || (fsym && fsym->attr.contiguous
7132 && !gfc_is_simply_contiguous (expr, false)))
7134 tree origptr = NULL_TREE;
7138 /* For contiguous arrays, save the original value of the descriptor. */
7141 origptr = gfc_create_var (pvoid_type_node, "origptr");
7142 tmp = build_fold_indirect_ref_loc (input_location, desc);
7143 tmp = gfc_conv_array_data (tmp);
7144 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
7145 TREE_TYPE (origptr), origptr,
7146 fold_convert (TREE_TYPE (origptr), tmp));
7147 gfc_add_expr_to_block (&se->pre, tmp);
7150 /* Repack the array. */
7151 if (gfc_option.warn_array_temp)
7154 gfc_warning ("Creating array temporary at %L for argument '%s'",
7155 &expr->where, fsym->name);
7157 gfc_warning ("Creating array temporary at %L", &expr->where);
7160 ptr = build_call_expr_loc (input_location,
7161 gfor_fndecl_in_pack, 1, desc);
7163 if (fsym && fsym->attr.optional && sym && sym->attr.optional)
7165 tmp = gfc_conv_expr_present (sym);
7166 ptr = build3_loc (input_location, COND_EXPR, TREE_TYPE (se->expr),
7167 tmp, fold_convert (TREE_TYPE (se->expr), ptr),
7168 fold_convert (TREE_TYPE (se->expr), null_pointer_node));
7171 ptr = gfc_evaluate_now (ptr, &se->pre);
7173 /* Use the packed data for the actual argument, except for contiguous arrays,
7174 where the descriptor's data component is set. */
7179 tmp = build_fold_indirect_ref_loc (input_location, desc);
7180 gfc_conv_descriptor_data_set (&se->pre, tmp, ptr);
7183 if (gfc_option.rtcheck & GFC_RTCHECK_ARRAY_TEMPS)
7187 if (fsym && proc_name)
7188 asprintf (&msg, "An array temporary was created for argument "
7189 "'%s' of procedure '%s'", fsym->name, proc_name);
7191 asprintf (&msg, "An array temporary was created");
7193 tmp = build_fold_indirect_ref_loc (input_location,
7195 tmp = gfc_conv_array_data (tmp);
7196 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
7197 fold_convert (TREE_TYPE (tmp), ptr), tmp);
7199 if (fsym && fsym->attr.optional && sym && sym->attr.optional)
7200 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR,
7202 gfc_conv_expr_present (sym), tmp);
7204 gfc_trans_runtime_check (false, true, tmp, &se->pre,
7209 gfc_start_block (&block);
7211 /* Copy the data back. */
7212 if (fsym == NULL || fsym->attr.intent != INTENT_IN)
7214 tmp = build_call_expr_loc (input_location,
7215 gfor_fndecl_in_unpack, 2, desc, ptr);
7216 gfc_add_expr_to_block (&block, tmp);
7219 /* Free the temporary. */
7220 tmp = gfc_call_free (convert (pvoid_type_node, ptr));
7221 gfc_add_expr_to_block (&block, tmp);
7223 stmt = gfc_finish_block (&block);
7225 gfc_init_block (&block);
7226 /* Only if it was repacked. This code needs to be executed before the
7227 loop cleanup code. */
7228 tmp = build_fold_indirect_ref_loc (input_location,
7230 tmp = gfc_conv_array_data (tmp);
7231 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
7232 fold_convert (TREE_TYPE (tmp), ptr), tmp);
7234 if (fsym && fsym->attr.optional && sym && sym->attr.optional)
7235 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR,
7237 gfc_conv_expr_present (sym), tmp);
7239 tmp = build3_v (COND_EXPR, tmp, stmt, build_empty_stmt (input_location));
7241 gfc_add_expr_to_block (&block, tmp);
7242 gfc_add_block_to_block (&block, &se->post);
7244 gfc_init_block (&se->post);
7246 /* Reset the descriptor pointer. */
7249 tmp = build_fold_indirect_ref_loc (input_location, desc);
7250 gfc_conv_descriptor_data_set (&se->post, tmp, origptr);
7253 gfc_add_block_to_block (&se->post, &block);
7258 /* Generate code to deallocate an array, if it is allocated. */
7261 gfc_trans_dealloc_allocated (tree descriptor, bool coarray)
7267 gfc_start_block (&block);
7269 var = gfc_conv_descriptor_data_get (descriptor);
7272 /* Call array_deallocate with an int * present in the second argument.
7273 Although it is ignored here, it's presence ensures that arrays that
7274 are already deallocated are ignored. */
7275 tmp = gfc_deallocate_with_status (coarray ? descriptor : var, NULL_TREE,
7276 NULL_TREE, NULL_TREE, NULL_TREE, true,
7278 gfc_add_expr_to_block (&block, tmp);
7280 /* Zero the data pointer. */
7281 tmp = fold_build2_loc (input_location, MODIFY_EXPR, void_type_node,
7282 var, build_int_cst (TREE_TYPE (var), 0));
7283 gfc_add_expr_to_block (&block, tmp);
7285 return gfc_finish_block (&block);
7289 /* This helper function calculates the size in words of a full array. */
7292 get_full_array_size (stmtblock_t *block, tree decl, int rank)
7297 idx = gfc_rank_cst[rank - 1];
7298 nelems = gfc_conv_descriptor_ubound_get (decl, idx);
7299 tmp = gfc_conv_descriptor_lbound_get (decl, idx);
7300 tmp = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
7302 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
7303 tmp, gfc_index_one_node);
7304 tmp = gfc_evaluate_now (tmp, block);
7306 nelems = gfc_conv_descriptor_stride_get (decl, idx);
7307 tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
7309 return gfc_evaluate_now (tmp, block);
7313 /* Allocate dest to the same size as src, and copy src -> dest.
7314 If no_malloc is set, only the copy is done. */
7317 duplicate_allocatable (tree dest, tree src, tree type, int rank,
7327 /* If the source is null, set the destination to null. Then,
7328 allocate memory to the destination. */
7329 gfc_init_block (&block);
7333 tmp = null_pointer_node;
7334 tmp = fold_build2_loc (input_location, MODIFY_EXPR, type, dest, tmp);
7335 gfc_add_expr_to_block (&block, tmp);
7336 null_data = gfc_finish_block (&block);
7338 gfc_init_block (&block);
7339 size = TYPE_SIZE_UNIT (TREE_TYPE (type));
7342 tmp = gfc_call_malloc (&block, type, size);
7343 tmp = fold_build2_loc (input_location, MODIFY_EXPR, void_type_node,
7344 dest, fold_convert (type, tmp));
7345 gfc_add_expr_to_block (&block, tmp);
7348 tmp = builtin_decl_explicit (BUILT_IN_MEMCPY);
7349 tmp = build_call_expr_loc (input_location, tmp, 3, dest, src,
7350 fold_convert (size_type_node, size));
7354 gfc_conv_descriptor_data_set (&block, dest, null_pointer_node);
7355 null_data = gfc_finish_block (&block);
7357 gfc_init_block (&block);
7358 nelems = get_full_array_size (&block, src, rank);
7359 tmp = fold_convert (gfc_array_index_type,
7360 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
7361 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
7365 tmp = TREE_TYPE (gfc_conv_descriptor_data_get (src));
7366 tmp = gfc_call_malloc (&block, tmp, size);
7367 gfc_conv_descriptor_data_set (&block, dest, tmp);
7370 /* We know the temporary and the value will be the same length,
7371 so can use memcpy. */
7372 tmp = builtin_decl_explicit (BUILT_IN_MEMCPY);
7373 tmp = build_call_expr_loc (input_location,
7374 tmp, 3, gfc_conv_descriptor_data_get (dest),
7375 gfc_conv_descriptor_data_get (src),
7376 fold_convert (size_type_node, size));
7379 gfc_add_expr_to_block (&block, tmp);
7380 tmp = gfc_finish_block (&block);
7382 /* Null the destination if the source is null; otherwise do
7383 the allocate and copy. */
7387 null_cond = gfc_conv_descriptor_data_get (src);
7389 null_cond = convert (pvoid_type_node, null_cond);
7390 null_cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
7391 null_cond, null_pointer_node);
7392 return build3_v (COND_EXPR, null_cond, tmp, null_data);
7396 /* Allocate dest to the same size as src, and copy data src -> dest. */
7399 gfc_duplicate_allocatable (tree dest, tree src, tree type, int rank)
7401 return duplicate_allocatable (dest, src, type, rank, false);
7405 /* Copy data src -> dest. */
7408 gfc_copy_allocatable_data (tree dest, tree src, tree type, int rank)
7410 return duplicate_allocatable (dest, src, type, rank, true);
7414 /* Recursively traverse an object of derived type, generating code to
7415 deallocate, nullify or copy allocatable components. This is the work horse
7416 function for the functions named in this enum. */
7418 enum {DEALLOCATE_ALLOC_COMP = 1, NULLIFY_ALLOC_COMP, COPY_ALLOC_COMP,
7419 COPY_ONLY_ALLOC_COMP};
7422 structure_alloc_comps (gfc_symbol * der_type, tree decl,
7423 tree dest, int rank, int purpose)
7427 stmtblock_t fnblock;
7428 stmtblock_t loopbody;
7429 stmtblock_t tmpblock;
7440 tree null_cond = NULL_TREE;
7441 bool called_dealloc_with_status;
7443 gfc_init_block (&fnblock);
7445 decl_type = TREE_TYPE (decl);
7447 if ((POINTER_TYPE_P (decl_type) && rank != 0)
7448 || (TREE_CODE (decl_type) == REFERENCE_TYPE && rank == 0))
7449 decl = build_fold_indirect_ref_loc (input_location, decl);
7451 /* Just in case in gets dereferenced. */
7452 decl_type = TREE_TYPE (decl);
7454 /* If this an array of derived types with allocatable components
7455 build a loop and recursively call this function. */
7456 if (TREE_CODE (decl_type) == ARRAY_TYPE
7457 || (GFC_DESCRIPTOR_TYPE_P (decl_type) && rank != 0))
7459 tmp = gfc_conv_array_data (decl);
7460 var = build_fold_indirect_ref_loc (input_location,
7463 /* Get the number of elements - 1 and set the counter. */
7464 if (GFC_DESCRIPTOR_TYPE_P (decl_type))
7466 /* Use the descriptor for an allocatable array. Since this
7467 is a full array reference, we only need the descriptor
7468 information from dimension = rank. */
7469 tmp = get_full_array_size (&fnblock, decl, rank);
7470 tmp = fold_build2_loc (input_location, MINUS_EXPR,
7471 gfc_array_index_type, tmp,
7472 gfc_index_one_node);
7474 null_cond = gfc_conv_descriptor_data_get (decl);
7475 null_cond = fold_build2_loc (input_location, NE_EXPR,
7476 boolean_type_node, null_cond,
7477 build_int_cst (TREE_TYPE (null_cond), 0));
7481 /* Otherwise use the TYPE_DOMAIN information. */
7482 tmp = array_type_nelts (decl_type);
7483 tmp = fold_convert (gfc_array_index_type, tmp);
7486 /* Remember that this is, in fact, the no. of elements - 1. */
7487 nelems = gfc_evaluate_now (tmp, &fnblock);
7488 index = gfc_create_var (gfc_array_index_type, "S");
7490 /* Build the body of the loop. */
7491 gfc_init_block (&loopbody);
7493 vref = gfc_build_array_ref (var, index, NULL);
7495 if (purpose == COPY_ALLOC_COMP)
7497 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (dest)))
7499 tmp = gfc_duplicate_allocatable (dest, decl, decl_type, rank);
7500 gfc_add_expr_to_block (&fnblock, tmp);
7502 tmp = build_fold_indirect_ref_loc (input_location,
7503 gfc_conv_array_data (dest));
7504 dref = gfc_build_array_ref (tmp, index, NULL);
7505 tmp = structure_alloc_comps (der_type, vref, dref, rank, purpose);
7507 else if (purpose == COPY_ONLY_ALLOC_COMP)
7509 tmp = build_fold_indirect_ref_loc (input_location,
7510 gfc_conv_array_data (dest));
7511 dref = gfc_build_array_ref (tmp, index, NULL);
7512 tmp = structure_alloc_comps (der_type, vref, dref, rank,
7516 tmp = structure_alloc_comps (der_type, vref, NULL_TREE, rank, purpose);
7518 gfc_add_expr_to_block (&loopbody, tmp);
7520 /* Build the loop and return. */
7521 gfc_init_loopinfo (&loop);
7523 loop.from[0] = gfc_index_zero_node;
7524 loop.loopvar[0] = index;
7525 loop.to[0] = nelems;
7526 gfc_trans_scalarizing_loops (&loop, &loopbody);
7527 gfc_add_block_to_block (&fnblock, &loop.pre);
7529 tmp = gfc_finish_block (&fnblock);
7530 if (null_cond != NULL_TREE)
7531 tmp = build3_v (COND_EXPR, null_cond, tmp,
7532 build_empty_stmt (input_location));
7537 /* Otherwise, act on the components or recursively call self to
7538 act on a chain of components. */
7539 for (c = der_type->components; c; c = c->next)
7541 bool cmp_has_alloc_comps = (c->ts.type == BT_DERIVED
7542 || c->ts.type == BT_CLASS)
7543 && c->ts.u.derived->attr.alloc_comp;
7544 cdecl = c->backend_decl;
7545 ctype = TREE_TYPE (cdecl);
7549 case DEALLOCATE_ALLOC_COMP:
7551 /* gfc_deallocate_scalar_with_status calls gfc_deallocate_alloc_comp
7552 (i.e. this function) so generate all the calls and suppress the
7553 recursion from here, if necessary. */
7554 called_dealloc_with_status = false;
7555 gfc_init_block (&tmpblock);
7557 if (c->attr.allocatable && (c->attr.dimension || c->attr.codimension)
7558 && !c->attr.proc_pointer)
7560 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
7561 decl, cdecl, NULL_TREE);
7562 tmp = gfc_trans_dealloc_allocated (comp, c->attr.codimension);
7563 gfc_add_expr_to_block (&tmpblock, tmp);
7565 else if (c->attr.allocatable)
7567 /* Allocatable scalar components. */
7568 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
7569 decl, cdecl, NULL_TREE);
7571 tmp = gfc_deallocate_scalar_with_status (comp, NULL, true, NULL,
7573 gfc_add_expr_to_block (&tmpblock, tmp);
7574 called_dealloc_with_status = true;
7576 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
7577 void_type_node, comp,
7578 build_int_cst (TREE_TYPE (comp), 0));
7579 gfc_add_expr_to_block (&tmpblock, tmp);
7581 else if (c->ts.type == BT_CLASS && CLASS_DATA (c)->attr.allocatable)
7583 /* Allocatable CLASS components. */
7584 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
7585 decl, cdecl, NULL_TREE);
7587 /* Add reference to '_data' component. */
7588 tmp = CLASS_DATA (c)->backend_decl;
7589 comp = fold_build3_loc (input_location, COMPONENT_REF,
7590 TREE_TYPE (tmp), comp, tmp, NULL_TREE);
7592 if (GFC_DESCRIPTOR_TYPE_P(TREE_TYPE (comp)))
7593 tmp = gfc_trans_dealloc_allocated (comp,
7594 CLASS_DATA (c)->attr.codimension);
7597 tmp = gfc_deallocate_scalar_with_status (comp, NULL_TREE, true, NULL,
7598 CLASS_DATA (c)->ts);
7599 gfc_add_expr_to_block (&tmpblock, tmp);
7600 called_dealloc_with_status = true;
7602 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
7603 void_type_node, comp,
7604 build_int_cst (TREE_TYPE (comp), 0));
7606 gfc_add_expr_to_block (&tmpblock, tmp);
7609 if (cmp_has_alloc_comps
7611 && !called_dealloc_with_status)
7613 /* Do not deallocate the components of ultimate pointer
7614 components or iteratively call self if call has been made
7615 to gfc_trans_dealloc_allocated */
7616 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
7617 decl, cdecl, NULL_TREE);
7618 rank = c->as ? c->as->rank : 0;
7619 tmp = structure_alloc_comps (c->ts.u.derived, comp, NULL_TREE,
7621 gfc_add_expr_to_block (&fnblock, tmp);
7624 /* Now add the deallocation of this component. */
7625 gfc_add_block_to_block (&fnblock, &tmpblock);
7628 case NULLIFY_ALLOC_COMP:
7629 if (c->attr.pointer)
7631 else if (c->attr.allocatable
7632 && (c->attr.dimension|| c->attr.codimension))
7634 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
7635 decl, cdecl, NULL_TREE);
7636 gfc_conv_descriptor_data_set (&fnblock, comp, null_pointer_node);
7638 else if (c->attr.allocatable)
7640 /* Allocatable scalar components. */
7641 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
7642 decl, cdecl, NULL_TREE);
7643 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
7644 void_type_node, comp,
7645 build_int_cst (TREE_TYPE (comp), 0));
7646 gfc_add_expr_to_block (&fnblock, tmp);
7648 else if (c->ts.type == BT_CLASS && CLASS_DATA (c)->attr.allocatable)
7650 /* Allocatable CLASS components. */
7651 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
7652 decl, cdecl, NULL_TREE);
7653 /* Add reference to '_data' component. */
7654 tmp = CLASS_DATA (c)->backend_decl;
7655 comp = fold_build3_loc (input_location, COMPONENT_REF,
7656 TREE_TYPE (tmp), comp, tmp, NULL_TREE);
7657 if (GFC_DESCRIPTOR_TYPE_P(TREE_TYPE (comp)))
7658 gfc_conv_descriptor_data_set (&fnblock, comp, null_pointer_node);
7661 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
7662 void_type_node, comp,
7663 build_int_cst (TREE_TYPE (comp), 0));
7664 gfc_add_expr_to_block (&fnblock, tmp);
7667 else if (cmp_has_alloc_comps)
7669 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
7670 decl, cdecl, NULL_TREE);
7671 rank = c->as ? c->as->rank : 0;
7672 tmp = structure_alloc_comps (c->ts.u.derived, comp, NULL_TREE,
7674 gfc_add_expr_to_block (&fnblock, tmp);
7678 case COPY_ALLOC_COMP:
7679 if (c->attr.pointer)
7682 /* We need source and destination components. */
7683 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype, decl,
7685 dcmp = fold_build3_loc (input_location, COMPONENT_REF, ctype, dest,
7687 dcmp = fold_convert (TREE_TYPE (comp), dcmp);
7689 if (c->ts.type == BT_CLASS && CLASS_DATA (c)->attr.allocatable)
7697 dst_data = gfc_class_data_get (dcmp);
7698 src_data = gfc_class_data_get (comp);
7699 size = fold_convert (size_type_node, gfc_vtable_size_get (comp));
7701 if (CLASS_DATA (c)->attr.dimension)
7703 nelems = gfc_conv_descriptor_size (src_data,
7704 CLASS_DATA (c)->as->rank);
7705 src_data = gfc_conv_descriptor_data_get (src_data);
7706 dst_data = gfc_conv_descriptor_data_get (dst_data);
7709 nelems = build_int_cst (size_type_node, 1);
7711 gfc_init_block (&tmpblock);
7713 /* We need to use CALLOC as _copy might try to free allocatable
7714 components of the destination. */
7715 ftn_tree = builtin_decl_explicit (BUILT_IN_CALLOC);
7716 tmp = build_call_expr_loc (input_location, ftn_tree, 2, nelems,
7718 gfc_add_modify (&tmpblock, dst_data,
7719 fold_convert (TREE_TYPE (dst_data), tmp));
7721 tmp = gfc_copy_class_to_class (comp, dcmp, nelems);
7722 gfc_add_expr_to_block (&tmpblock, tmp);
7723 tmp = gfc_finish_block (&tmpblock);
7725 gfc_init_block (&tmpblock);
7726 gfc_add_modify (&tmpblock, dst_data,
7727 fold_convert (TREE_TYPE (dst_data),
7728 null_pointer_node));
7729 null_data = gfc_finish_block (&tmpblock);
7731 null_cond = fold_build2_loc (input_location, NE_EXPR,
7732 boolean_type_node, src_data,
7735 gfc_add_expr_to_block (&fnblock, build3_v (COND_EXPR, null_cond,
7740 if (c->attr.allocatable && !c->attr.proc_pointer
7741 && !cmp_has_alloc_comps)
7743 rank = c->as ? c->as->rank : 0;
7744 tmp = gfc_duplicate_allocatable (dcmp, comp, ctype, rank);
7745 gfc_add_expr_to_block (&fnblock, tmp);
7748 if (cmp_has_alloc_comps)
7750 rank = c->as ? c->as->rank : 0;
7751 tmp = fold_convert (TREE_TYPE (dcmp), comp);
7752 gfc_add_modify (&fnblock, dcmp, tmp);
7753 tmp = structure_alloc_comps (c->ts.u.derived, comp, dcmp,
7755 gfc_add_expr_to_block (&fnblock, tmp);
7765 return gfc_finish_block (&fnblock);
7768 /* Recursively traverse an object of derived type, generating code to
7769 nullify allocatable components. */
7772 gfc_nullify_alloc_comp (gfc_symbol * der_type, tree decl, int rank)
7774 return structure_alloc_comps (der_type, decl, NULL_TREE, rank,
7775 NULLIFY_ALLOC_COMP);
7779 /* Recursively traverse an object of derived type, generating code to
7780 deallocate allocatable components. */
7783 gfc_deallocate_alloc_comp (gfc_symbol * der_type, tree decl, int rank)
7785 return structure_alloc_comps (der_type, decl, NULL_TREE, rank,
7786 DEALLOCATE_ALLOC_COMP);
7790 /* Recursively traverse an object of derived type, generating code to
7791 copy it and its allocatable components. */
7794 gfc_copy_alloc_comp (gfc_symbol * der_type, tree decl, tree dest, int rank)
7796 return structure_alloc_comps (der_type, decl, dest, rank, COPY_ALLOC_COMP);
7800 /* Recursively traverse an object of derived type, generating code to
7801 copy only its allocatable components. */
7804 gfc_copy_only_alloc_comp (gfc_symbol * der_type, tree decl, tree dest, int rank)
7806 return structure_alloc_comps (der_type, decl, dest, rank, COPY_ONLY_ALLOC_COMP);
7810 /* Returns the value of LBOUND for an expression. This could be broken out
7811 from gfc_conv_intrinsic_bound but this seemed to be simpler. This is
7812 called by gfc_alloc_allocatable_for_assignment. */
7814 get_std_lbound (gfc_expr *expr, tree desc, int dim, bool assumed_size)
7819 tree cond, cond1, cond3, cond4;
7823 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (desc)))
7825 tmp = gfc_rank_cst[dim];
7826 lbound = gfc_conv_descriptor_lbound_get (desc, tmp);
7827 ubound = gfc_conv_descriptor_ubound_get (desc, tmp);
7828 stride = gfc_conv_descriptor_stride_get (desc, tmp);
7829 cond1 = fold_build2_loc (input_location, GE_EXPR, boolean_type_node,
7831 cond3 = fold_build2_loc (input_location, GE_EXPR, boolean_type_node,
7832 stride, gfc_index_zero_node);
7833 cond3 = fold_build2_loc (input_location, TRUTH_AND_EXPR,
7834 boolean_type_node, cond3, cond1);
7835 cond4 = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
7836 stride, gfc_index_zero_node);
7838 cond = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
7839 tmp, build_int_cst (gfc_array_index_type,
7842 cond = boolean_false_node;
7844 cond1 = fold_build2_loc (input_location, TRUTH_OR_EXPR,
7845 boolean_type_node, cond3, cond4);
7846 cond = fold_build2_loc (input_location, TRUTH_OR_EXPR,
7847 boolean_type_node, cond, cond1);
7849 return fold_build3_loc (input_location, COND_EXPR,
7850 gfc_array_index_type, cond,
7851 lbound, gfc_index_one_node);
7854 if (expr->expr_type == EXPR_FUNCTION)
7856 /* A conversion function, so use the argument. */
7857 gcc_assert (expr->value.function.isym
7858 && expr->value.function.isym->conversion);
7859 expr = expr->value.function.actual->expr;
7862 if (expr->expr_type == EXPR_VARIABLE)
7864 tmp = TREE_TYPE (expr->symtree->n.sym->backend_decl);
7865 for (ref = expr->ref; ref; ref = ref->next)
7867 if (ref->type == REF_COMPONENT
7868 && ref->u.c.component->as
7870 && ref->next->u.ar.type == AR_FULL)
7871 tmp = TREE_TYPE (ref->u.c.component->backend_decl);
7873 return GFC_TYPE_ARRAY_LBOUND(tmp, dim);
7876 return gfc_index_one_node;
7880 /* Returns true if an expression represents an lhs that can be reallocated
7884 gfc_is_reallocatable_lhs (gfc_expr *expr)
7891 /* An allocatable variable. */
7892 if (expr->symtree->n.sym->attr.allocatable
7894 && expr->ref->type == REF_ARRAY
7895 && expr->ref->u.ar.type == AR_FULL)
7898 /* All that can be left are allocatable components. */
7899 if ((expr->symtree->n.sym->ts.type != BT_DERIVED
7900 && expr->symtree->n.sym->ts.type != BT_CLASS)
7901 || !expr->symtree->n.sym->ts.u.derived->attr.alloc_comp)
7904 /* Find a component ref followed by an array reference. */
7905 for (ref = expr->ref; ref; ref = ref->next)
7907 && ref->type == REF_COMPONENT
7908 && ref->next->type == REF_ARRAY
7909 && !ref->next->next)
7915 /* Return true if valid reallocatable lhs. */
7916 if (ref->u.c.component->attr.allocatable
7917 && ref->next->u.ar.type == AR_FULL)
7924 /* Allocate the lhs of an assignment to an allocatable array, otherwise
7928 gfc_alloc_allocatable_for_assignment (gfc_loopinfo *loop,
7932 stmtblock_t realloc_block;
7933 stmtblock_t alloc_block;
7937 gfc_array_info *linfo;
7958 gfc_array_spec * as;
7960 /* x = f(...) with x allocatable. In this case, expr1 is the rhs.
7961 Find the lhs expression in the loop chain and set expr1 and
7962 expr2 accordingly. */
7963 if (expr1->expr_type == EXPR_FUNCTION && expr2 == NULL)
7966 /* Find the ss for the lhs. */
7968 for (; lss && lss != gfc_ss_terminator; lss = lss->loop_chain)
7969 if (lss->info->expr && lss->info->expr->expr_type == EXPR_VARIABLE)
7971 if (lss == gfc_ss_terminator)
7973 expr1 = lss->info->expr;
7976 /* Bail out if this is not a valid allocate on assignment. */
7977 if (!gfc_is_reallocatable_lhs (expr1)
7978 || (expr2 && !expr2->rank))
7981 /* Find the ss for the lhs. */
7983 for (; lss && lss != gfc_ss_terminator; lss = lss->loop_chain)
7984 if (lss->info->expr == expr1)
7987 if (lss == gfc_ss_terminator)
7990 linfo = &lss->info->data.array;
7992 /* Find an ss for the rhs. For operator expressions, we see the
7993 ss's for the operands. Any one of these will do. */
7995 for (; rss && rss != gfc_ss_terminator; rss = rss->loop_chain)
7996 if (rss->info->expr != expr1 && rss != loop->temp_ss)
7999 if (expr2 && rss == gfc_ss_terminator)
8002 gfc_start_block (&fblock);
8004 /* Since the lhs is allocatable, this must be a descriptor type.
8005 Get the data and array size. */
8006 desc = linfo->descriptor;
8007 gcc_assert (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (desc)));
8008 array1 = gfc_conv_descriptor_data_get (desc);
8010 /* 7.4.1.3 "If variable is an allocated allocatable variable, it is
8011 deallocated if expr is an array of different shape or any of the
8012 corresponding length type parameter values of variable and expr
8013 differ." This assures F95 compatibility. */
8014 jump_label1 = gfc_build_label_decl (NULL_TREE);
8015 jump_label2 = gfc_build_label_decl (NULL_TREE);
8017 /* Allocate if data is NULL. */
8018 cond = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
8019 array1, build_int_cst (TREE_TYPE (array1), 0));
8020 tmp = build3_v (COND_EXPR, cond,
8021 build1_v (GOTO_EXPR, jump_label1),
8022 build_empty_stmt (input_location));
8023 gfc_add_expr_to_block (&fblock, tmp);
8025 /* Get arrayspec if expr is a full array. */
8026 if (expr2 && expr2->expr_type == EXPR_FUNCTION
8027 && expr2->value.function.isym
8028 && expr2->value.function.isym->conversion)
8030 /* For conversion functions, take the arg. */
8031 gfc_expr *arg = expr2->value.function.actual->expr;
8032 as = gfc_get_full_arrayspec_from_expr (arg);
8035 as = gfc_get_full_arrayspec_from_expr (expr2);
8039 /* If the lhs shape is not the same as the rhs jump to setting the
8040 bounds and doing the reallocation....... */
8041 for (n = 0; n < expr1->rank; n++)
8043 /* Check the shape. */
8044 lbound = gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[n]);
8045 ubound = gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[n]);
8046 tmp = fold_build2_loc (input_location, MINUS_EXPR,
8047 gfc_array_index_type,
8048 loop->to[n], loop->from[n]);
8049 tmp = fold_build2_loc (input_location, PLUS_EXPR,
8050 gfc_array_index_type,
8052 tmp = fold_build2_loc (input_location, MINUS_EXPR,
8053 gfc_array_index_type,
8055 cond = fold_build2_loc (input_location, NE_EXPR,
8057 tmp, gfc_index_zero_node);
8058 tmp = build3_v (COND_EXPR, cond,
8059 build1_v (GOTO_EXPR, jump_label1),
8060 build_empty_stmt (input_location));
8061 gfc_add_expr_to_block (&fblock, tmp);
8064 /* ....else jump past the (re)alloc code. */
8065 tmp = build1_v (GOTO_EXPR, jump_label2);
8066 gfc_add_expr_to_block (&fblock, tmp);
8068 /* Add the label to start automatic (re)allocation. */
8069 tmp = build1_v (LABEL_EXPR, jump_label1);
8070 gfc_add_expr_to_block (&fblock, tmp);
8072 size1 = gfc_conv_descriptor_size (desc, expr1->rank);
8074 /* Get the rhs size. Fix both sizes. */
8076 desc2 = rss->info->data.array.descriptor;
8079 size2 = gfc_index_one_node;
8080 for (n = 0; n < expr2->rank; n++)
8082 tmp = fold_build2_loc (input_location, MINUS_EXPR,
8083 gfc_array_index_type,
8084 loop->to[n], loop->from[n]);
8085 tmp = fold_build2_loc (input_location, PLUS_EXPR,
8086 gfc_array_index_type,
8087 tmp, gfc_index_one_node);
8088 size2 = fold_build2_loc (input_location, MULT_EXPR,
8089 gfc_array_index_type,
8093 size1 = gfc_evaluate_now (size1, &fblock);
8094 size2 = gfc_evaluate_now (size2, &fblock);
8096 cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
8098 neq_size = gfc_evaluate_now (cond, &fblock);
8100 /* Deallocation of allocatable components will have to occur on
8101 reallocation. Fix the old descriptor now. */
8102 if ((expr1->ts.type == BT_DERIVED)
8103 && expr1->ts.u.derived->attr.alloc_comp)
8104 old_desc = gfc_evaluate_now (desc, &fblock);
8106 old_desc = NULL_TREE;
8108 /* Now modify the lhs descriptor and the associated scalarizer
8109 variables. F2003 7.4.1.3: "If variable is or becomes an
8110 unallocated allocatable variable, then it is allocated with each
8111 deferred type parameter equal to the corresponding type parameters
8112 of expr , with the shape of expr , and with each lower bound equal
8113 to the corresponding element of LBOUND(expr)."
8114 Reuse size1 to keep a dimension-by-dimension track of the
8115 stride of the new array. */
8116 size1 = gfc_index_one_node;
8117 offset = gfc_index_zero_node;
8119 for (n = 0; n < expr2->rank; n++)
8121 tmp = fold_build2_loc (input_location, MINUS_EXPR,
8122 gfc_array_index_type,
8123 loop->to[n], loop->from[n]);
8124 tmp = fold_build2_loc (input_location, PLUS_EXPR,
8125 gfc_array_index_type,
8126 tmp, gfc_index_one_node);
8128 lbound = gfc_index_one_node;
8133 lbd = get_std_lbound (expr2, desc2, n,
8134 as->type == AS_ASSUMED_SIZE);
8135 ubound = fold_build2_loc (input_location,
8137 gfc_array_index_type,
8139 ubound = fold_build2_loc (input_location,
8141 gfc_array_index_type,
8146 gfc_conv_descriptor_lbound_set (&fblock, desc,
8149 gfc_conv_descriptor_ubound_set (&fblock, desc,
8152 gfc_conv_descriptor_stride_set (&fblock, desc,
8155 lbound = gfc_conv_descriptor_lbound_get (desc,
8157 tmp2 = fold_build2_loc (input_location, MULT_EXPR,
8158 gfc_array_index_type,
8160 offset = fold_build2_loc (input_location, MINUS_EXPR,
8161 gfc_array_index_type,
8163 size1 = fold_build2_loc (input_location, MULT_EXPR,
8164 gfc_array_index_type,
8168 /* Set the lhs descriptor and scalarizer offsets. For rank > 1,
8169 the array offset is saved and the info.offset is used for a
8170 running offset. Use the saved_offset instead. */
8171 tmp = gfc_conv_descriptor_offset (desc);
8172 gfc_add_modify (&fblock, tmp, offset);
8173 if (linfo->saved_offset
8174 && TREE_CODE (linfo->saved_offset) == VAR_DECL)
8175 gfc_add_modify (&fblock, linfo->saved_offset, tmp);
8177 /* Now set the deltas for the lhs. */
8178 for (n = 0; n < expr1->rank; n++)
8180 tmp = gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[n]);
8182 tmp = fold_build2_loc (input_location, MINUS_EXPR,
8183 gfc_array_index_type, tmp,
8185 if (linfo->delta[dim]
8186 && TREE_CODE (linfo->delta[dim]) == VAR_DECL)
8187 gfc_add_modify (&fblock, linfo->delta[dim], tmp);
8190 /* Get the new lhs size in bytes. */
8191 if (expr1->ts.type == BT_CHARACTER && expr1->ts.deferred)
8193 tmp = expr2->ts.u.cl->backend_decl;
8194 gcc_assert (expr1->ts.u.cl->backend_decl);
8195 tmp = fold_convert (TREE_TYPE (expr1->ts.u.cl->backend_decl), tmp);
8196 gfc_add_modify (&fblock, expr1->ts.u.cl->backend_decl, tmp);
8198 else if (expr1->ts.type == BT_CHARACTER && expr1->ts.u.cl->backend_decl)
8200 tmp = TYPE_SIZE_UNIT (TREE_TYPE (gfc_typenode_for_spec (&expr1->ts)));
8201 tmp = fold_build2_loc (input_location, MULT_EXPR,
8202 gfc_array_index_type, tmp,
8203 expr1->ts.u.cl->backend_decl);
8206 tmp = TYPE_SIZE_UNIT (gfc_typenode_for_spec (&expr1->ts));
8207 tmp = fold_convert (gfc_array_index_type, tmp);
8208 size2 = fold_build2_loc (input_location, MULT_EXPR,
8209 gfc_array_index_type,
8211 size2 = fold_convert (size_type_node, size2);
8212 size2 = gfc_evaluate_now (size2, &fblock);
8214 /* Realloc expression. Note that the scalarizer uses desc.data
8215 in the array reference - (*desc.data)[<element>]. */
8216 gfc_init_block (&realloc_block);
8218 if ((expr1->ts.type == BT_DERIVED)
8219 && expr1->ts.u.derived->attr.alloc_comp)
8221 tmp = gfc_deallocate_alloc_comp (expr1->ts.u.derived, old_desc,
8223 gfc_add_expr_to_block (&realloc_block, tmp);
8226 tmp = build_call_expr_loc (input_location,
8227 builtin_decl_explicit (BUILT_IN_REALLOC), 2,
8228 fold_convert (pvoid_type_node, array1),
8230 gfc_conv_descriptor_data_set (&realloc_block,
8233 if ((expr1->ts.type == BT_DERIVED)
8234 && expr1->ts.u.derived->attr.alloc_comp)
8236 tmp = gfc_nullify_alloc_comp (expr1->ts.u.derived, desc,
8238 gfc_add_expr_to_block (&realloc_block, tmp);
8241 realloc_expr = gfc_finish_block (&realloc_block);
8243 /* Only reallocate if sizes are different. */
8244 tmp = build3_v (COND_EXPR, neq_size, realloc_expr,
8245 build_empty_stmt (input_location));
8249 /* Malloc expression. */
8250 gfc_init_block (&alloc_block);
8251 tmp = build_call_expr_loc (input_location,
8252 builtin_decl_explicit (BUILT_IN_MALLOC),
8254 gfc_conv_descriptor_data_set (&alloc_block,
8256 tmp = gfc_conv_descriptor_dtype (desc);
8257 gfc_add_modify (&alloc_block, tmp, gfc_get_dtype (TREE_TYPE (desc)));
8258 if ((expr1->ts.type == BT_DERIVED)
8259 && expr1->ts.u.derived->attr.alloc_comp)
8261 tmp = gfc_nullify_alloc_comp (expr1->ts.u.derived, desc,
8263 gfc_add_expr_to_block (&alloc_block, tmp);
8265 alloc_expr = gfc_finish_block (&alloc_block);
8267 /* Malloc if not allocated; realloc otherwise. */
8268 tmp = build_int_cst (TREE_TYPE (array1), 0);
8269 cond = fold_build2_loc (input_location, EQ_EXPR,
8272 tmp = build3_v (COND_EXPR, cond, alloc_expr, realloc_expr);
8273 gfc_add_expr_to_block (&fblock, tmp);
8275 /* Make sure that the scalarizer data pointer is updated. */
8277 && TREE_CODE (linfo->data) == VAR_DECL)
8279 tmp = gfc_conv_descriptor_data_get (desc);
8280 gfc_add_modify (&fblock, linfo->data, tmp);
8283 /* Add the exit label. */
8284 tmp = build1_v (LABEL_EXPR, jump_label2);
8285 gfc_add_expr_to_block (&fblock, tmp);
8287 return gfc_finish_block (&fblock);
8291 /* NULLIFY an allocatable/pointer array on function entry, free it on exit.
8292 Do likewise, recursively if necessary, with the allocatable components of
8296 gfc_trans_deferred_array (gfc_symbol * sym, gfc_wrapped_block * block)
8302 stmtblock_t cleanup;
8305 bool sym_has_alloc_comp;
8307 sym_has_alloc_comp = (sym->ts.type == BT_DERIVED
8308 || sym->ts.type == BT_CLASS)
8309 && sym->ts.u.derived->attr.alloc_comp;
8311 /* Make sure the frontend gets these right. */
8312 if (!(sym->attr.pointer || sym->attr.allocatable || sym_has_alloc_comp))
8313 fatal_error ("Possible front-end bug: Deferred array size without pointer, "
8314 "allocatable attribute or derived type without allocatable "
8317 gfc_save_backend_locus (&loc);
8318 gfc_set_backend_locus (&sym->declared_at);
8319 gfc_init_block (&init);
8321 gcc_assert (TREE_CODE (sym->backend_decl) == VAR_DECL
8322 || TREE_CODE (sym->backend_decl) == PARM_DECL);
8324 if (sym->ts.type == BT_CHARACTER
8325 && !INTEGER_CST_P (sym->ts.u.cl->backend_decl))
8327 gfc_conv_string_length (sym->ts.u.cl, NULL, &init);
8328 gfc_trans_vla_type_sizes (sym, &init);
8331 /* Dummy, use associated and result variables don't need anything special. */
8332 if (sym->attr.dummy || sym->attr.use_assoc || sym->attr.result)
8334 gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE);
8335 gfc_restore_backend_locus (&loc);
8339 descriptor = sym->backend_decl;
8341 /* Although static, derived types with default initializers and
8342 allocatable components must not be nulled wholesale; instead they
8343 are treated component by component. */
8344 if (TREE_STATIC (descriptor) && !sym_has_alloc_comp)
8346 /* SAVEd variables are not freed on exit. */
8347 gfc_trans_static_array_pointer (sym);
8349 gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE);
8350 gfc_restore_backend_locus (&loc);
8354 /* Get the descriptor type. */
8355 type = TREE_TYPE (sym->backend_decl);
8357 if (sym_has_alloc_comp && !(sym->attr.pointer || sym->attr.allocatable))
8360 && !(TREE_STATIC (sym->backend_decl) && sym->attr.is_main_program))
8362 if (sym->value == NULL
8363 || !gfc_has_default_initializer (sym->ts.u.derived))
8365 rank = sym->as ? sym->as->rank : 0;
8366 tmp = gfc_nullify_alloc_comp (sym->ts.u.derived,
8368 gfc_add_expr_to_block (&init, tmp);
8371 gfc_init_default_dt (sym, &init, false);
8374 else if (!GFC_DESCRIPTOR_TYPE_P (type))
8376 /* If the backend_decl is not a descriptor, we must have a pointer
8378 descriptor = build_fold_indirect_ref_loc (input_location,
8380 type = TREE_TYPE (descriptor);
8383 /* NULLIFY the data pointer. */
8384 if (GFC_DESCRIPTOR_TYPE_P (type) && !sym->attr.save)
8385 gfc_conv_descriptor_data_set (&init, descriptor, null_pointer_node);
8387 gfc_restore_backend_locus (&loc);
8388 gfc_init_block (&cleanup);
8390 /* Allocatable arrays need to be freed when they go out of scope.
8391 The allocatable components of pointers must not be touched. */
8392 if (sym_has_alloc_comp && !(sym->attr.function || sym->attr.result)
8393 && !sym->attr.pointer && !sym->attr.save)
8396 rank = sym->as ? sym->as->rank : 0;
8397 tmp = gfc_deallocate_alloc_comp (sym->ts.u.derived, descriptor, rank);
8398 gfc_add_expr_to_block (&cleanup, tmp);
8401 if (sym->attr.allocatable && (sym->attr.dimension || sym->attr.codimension)
8402 && !sym->attr.save && !sym->attr.result)
8404 tmp = gfc_trans_dealloc_allocated (sym->backend_decl,
8405 sym->attr.codimension);
8406 gfc_add_expr_to_block (&cleanup, tmp);
8409 gfc_add_init_cleanup (block, gfc_finish_block (&init),
8410 gfc_finish_block (&cleanup));
8413 /************ Expression Walking Functions ******************/
8415 /* Walk a variable reference.
8417 Possible extension - multiple component subscripts.
8418 x(:,:) = foo%a(:)%b(:)
8420 forall (i=..., j=...)
8421 x(i,j) = foo%a(j)%b(i)
8423 This adds a fair amount of complexity because you need to deal with more
8424 than one ref. Maybe handle in a similar manner to vector subscripts.
8425 Maybe not worth the effort. */
8429 gfc_walk_variable_expr (gfc_ss * ss, gfc_expr * expr)
8433 for (ref = expr->ref; ref; ref = ref->next)
8434 if (ref->type == REF_ARRAY && ref->u.ar.type != AR_ELEMENT)
8437 return gfc_walk_array_ref (ss, expr, ref);
8442 gfc_walk_array_ref (gfc_ss * ss, gfc_expr * expr, gfc_ref * ref)
8448 for (; ref; ref = ref->next)
8450 if (ref->type == REF_SUBSTRING)
8452 ss = gfc_get_scalar_ss (ss, ref->u.ss.start);
8453 ss = gfc_get_scalar_ss (ss, ref->u.ss.end);
8456 /* We're only interested in array sections from now on. */
8457 if (ref->type != REF_ARRAY)
8465 for (n = ar->dimen - 1; n >= 0; n--)
8466 ss = gfc_get_scalar_ss (ss, ar->start[n]);
8470 newss = gfc_get_array_ss (ss, expr, ar->as->rank, GFC_SS_SECTION);
8471 newss->info->data.array.ref = ref;
8473 /* Make sure array is the same as array(:,:), this way
8474 we don't need to special case all the time. */
8475 ar->dimen = ar->as->rank;
8476 for (n = 0; n < ar->dimen; n++)
8478 ar->dimen_type[n] = DIMEN_RANGE;
8480 gcc_assert (ar->start[n] == NULL);
8481 gcc_assert (ar->end[n] == NULL);
8482 gcc_assert (ar->stride[n] == NULL);
8488 newss = gfc_get_array_ss (ss, expr, 0, GFC_SS_SECTION);
8489 newss->info->data.array.ref = ref;
8491 /* We add SS chains for all the subscripts in the section. */
8492 for (n = 0; n < ar->dimen; n++)
8496 switch (ar->dimen_type[n])
8499 /* Add SS for elemental (scalar) subscripts. */
8500 gcc_assert (ar->start[n]);
8501 indexss = gfc_get_scalar_ss (gfc_ss_terminator, ar->start[n]);
8502 indexss->loop_chain = gfc_ss_terminator;
8503 newss->info->data.array.subscript[n] = indexss;
8507 /* We don't add anything for sections, just remember this
8508 dimension for later. */
8509 newss->dim[newss->dimen] = n;
8514 /* Create a GFC_SS_VECTOR index in which we can store
8515 the vector's descriptor. */
8516 indexss = gfc_get_array_ss (gfc_ss_terminator, ar->start[n],
8518 indexss->loop_chain = gfc_ss_terminator;
8519 newss->info->data.array.subscript[n] = indexss;
8520 newss->dim[newss->dimen] = n;
8525 /* We should know what sort of section it is by now. */
8529 /* We should have at least one non-elemental dimension,
8530 unless we are creating a descriptor for a (scalar) coarray. */
8531 gcc_assert (newss->dimen > 0
8532 || newss->info->data.array.ref->u.ar.as->corank > 0);
8537 /* We should know what sort of section it is by now. */
8546 /* Walk an expression operator. If only one operand of a binary expression is
8547 scalar, we must also add the scalar term to the SS chain. */
8550 gfc_walk_op_expr (gfc_ss * ss, gfc_expr * expr)
8555 head = gfc_walk_subexpr (ss, expr->value.op.op1);
8556 if (expr->value.op.op2 == NULL)
8559 head2 = gfc_walk_subexpr (head, expr->value.op.op2);
8561 /* All operands are scalar. Pass back and let the caller deal with it. */
8565 /* All operands require scalarization. */
8566 if (head != ss && (expr->value.op.op2 == NULL || head2 != head))
8569 /* One of the operands needs scalarization, the other is scalar.
8570 Create a gfc_ss for the scalar expression. */
8573 /* First operand is scalar. We build the chain in reverse order, so
8574 add the scalar SS after the second operand. */
8576 while (head && head->next != ss)
8578 /* Check we haven't somehow broken the chain. */
8580 head->next = gfc_get_scalar_ss (ss, expr->value.op.op1);
8582 else /* head2 == head */
8584 gcc_assert (head2 == head);
8585 /* Second operand is scalar. */
8586 head2 = gfc_get_scalar_ss (head2, expr->value.op.op2);
8593 /* Reverse a SS chain. */
8596 gfc_reverse_ss (gfc_ss * ss)
8601 gcc_assert (ss != NULL);
8603 head = gfc_ss_terminator;
8604 while (ss != gfc_ss_terminator)
8607 /* Check we didn't somehow break the chain. */
8608 gcc_assert (next != NULL);
8618 /* Given an expression referring to a procedure, return the symbol of its
8619 interface. We can't get the procedure symbol directly as we have to handle
8620 the case of (deferred) type-bound procedures. */
8623 gfc_get_proc_ifc_for_expr (gfc_expr *procedure_ref)
8628 if (procedure_ref == NULL)
8631 /* Normal procedure case. */
8632 sym = procedure_ref->symtree->n.sym;
8634 /* Typebound procedure case. */
8635 for (ref = procedure_ref->ref; ref; ref = ref->next)
8637 if (ref->type == REF_COMPONENT
8638 && ref->u.c.component->attr.proc_pointer)
8639 sym = ref->u.c.component->ts.interface;
8648 /* Walk the arguments of an elemental function.
8649 PROC_EXPR is used to check whether an argument is permitted to be absent. If
8650 it is NULL, we don't do the check and the argument is assumed to be present.
8654 gfc_walk_elemental_function_args (gfc_ss * ss, gfc_actual_arglist *arg,
8655 gfc_symbol *proc_ifc, gfc_ss_type type)
8657 gfc_formal_arglist *dummy_arg;
8663 head = gfc_ss_terminator;
8667 dummy_arg = gfc_sym_get_dummy_args (proc_ifc);
8672 for (; arg; arg = arg->next)
8674 if (!arg->expr || arg->expr->expr_type == EXPR_NULL)
8677 newss = gfc_walk_subexpr (head, arg->expr);
8680 /* Scalar argument. */
8681 gcc_assert (type == GFC_SS_SCALAR || type == GFC_SS_REFERENCE);
8682 newss = gfc_get_scalar_ss (head, arg->expr);
8683 newss->info->type = type;
8689 if (dummy_arg != NULL
8690 && dummy_arg->sym->attr.optional
8691 && arg->expr->expr_type == EXPR_VARIABLE
8692 && (gfc_expr_attr (arg->expr).optional
8693 || gfc_expr_attr (arg->expr).allocatable
8694 || gfc_expr_attr (arg->expr).pointer))
8695 newss->info->can_be_null_ref = true;
8701 while (tail->next != gfc_ss_terminator)
8705 if (dummy_arg != NULL)
8706 dummy_arg = dummy_arg->next;
8711 /* If all the arguments are scalar we don't need the argument SS. */
8712 gfc_free_ss_chain (head);
8717 /* Add it onto the existing chain. */
8723 /* Walk a function call. Scalar functions are passed back, and taken out of
8724 scalarization loops. For elemental functions we walk their arguments.
8725 The result of functions returning arrays is stored in a temporary outside
8726 the loop, so that the function is only called once. Hence we do not need
8727 to walk their arguments. */
8730 gfc_walk_function_expr (gfc_ss * ss, gfc_expr * expr)
8732 gfc_intrinsic_sym *isym;
8734 gfc_component *comp = NULL;
8736 isym = expr->value.function.isym;
8738 /* Handle intrinsic functions separately. */
8740 return gfc_walk_intrinsic_function (ss, expr, isym);
8742 sym = expr->value.function.esym;
8744 sym = expr->symtree->n.sym;
8746 /* A function that returns arrays. */
8747 comp = gfc_get_proc_ptr_comp (expr);
8748 if ((!comp && gfc_return_by_reference (sym) && sym->result->attr.dimension)
8749 || (comp && comp->attr.dimension))
8750 return gfc_get_array_ss (ss, expr, expr->rank, GFC_SS_FUNCTION);
8752 /* Walk the parameters of an elemental function. For now we always pass
8754 if (sym->attr.elemental || (comp && comp->attr.elemental))
8755 return gfc_walk_elemental_function_args (ss, expr->value.function.actual,
8756 gfc_get_proc_ifc_for_expr (expr),
8759 /* Scalar functions are OK as these are evaluated outside the scalarization
8760 loop. Pass back and let the caller deal with it. */
8765 /* An array temporary is constructed for array constructors. */
8768 gfc_walk_array_constructor (gfc_ss * ss, gfc_expr * expr)
8770 return gfc_get_array_ss (ss, expr, expr->rank, GFC_SS_CONSTRUCTOR);
8774 /* Walk an expression. Add walked expressions to the head of the SS chain.
8775 A wholly scalar expression will not be added. */
8778 gfc_walk_subexpr (gfc_ss * ss, gfc_expr * expr)
8782 switch (expr->expr_type)
8785 head = gfc_walk_variable_expr (ss, expr);
8789 head = gfc_walk_op_expr (ss, expr);
8793 head = gfc_walk_function_expr (ss, expr);
8798 case EXPR_STRUCTURE:
8799 /* Pass back and let the caller deal with it. */
8803 head = gfc_walk_array_constructor (ss, expr);
8806 case EXPR_SUBSTRING:
8807 /* Pass back and let the caller deal with it. */
8811 internal_error ("bad expression type during walk (%d)",
8818 /* Entry point for expression walking.
8819 A return value equal to the passed chain means this is
8820 a scalar expression. It is up to the caller to take whatever action is
8821 necessary to translate these. */
8824 gfc_walk_expr (gfc_expr * expr)
8828 res = gfc_walk_subexpr (gfc_ss_terminator, expr);
8829 return gfc_reverse_ss (res);