1 /* OpenMP directive translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2005-2015 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
29 #include "fold-const.h"
30 #include "gimple-expr.h"
31 #include "gimplify.h" /* For create_tmp_var_raw. */
32 #include "stringpool.h"
34 #include "diagnostic-core.h" /* For internal_error. */
36 #include "trans-stmt.h"
37 #include "trans-types.h"
38 #include "trans-array.h"
39 #include "trans-const.h"
42 #include "gomp-constants.h"
46 /* True if OpenMP should privatize what this DECL points to rather
47 than the DECL itself. */
50 gfc_omp_privatize_by_reference (const_tree decl)
52 tree type = TREE_TYPE (decl);
54 if (TREE_CODE (type) == REFERENCE_TYPE
55 && (!DECL_ARTIFICIAL (decl) || TREE_CODE (decl) == PARM_DECL))
58 if (TREE_CODE (type) == POINTER_TYPE)
60 /* Array POINTER/ALLOCATABLE have aggregate types, all user variables
61 that have POINTER_TYPE type and aren't scalar pointers, scalar
62 allocatables, Cray pointees or C pointers are supposed to be
63 privatized by reference. */
64 if (GFC_DECL_GET_SCALAR_POINTER (decl)
65 || GFC_DECL_GET_SCALAR_ALLOCATABLE (decl)
66 || GFC_DECL_CRAY_POINTEE (decl)
67 || VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
70 if (!DECL_ARTIFICIAL (decl)
71 && TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE)
74 /* Some arrays are expanded as DECL_ARTIFICIAL pointers
76 if (DECL_LANG_SPECIFIC (decl)
77 && GFC_DECL_SAVED_DESCRIPTOR (decl))
84 /* True if OpenMP sharing attribute of DECL is predetermined. */
86 enum omp_clause_default_kind
87 gfc_omp_predetermined_sharing (tree decl)
89 /* Associate names preserve the association established during ASSOCIATE.
90 As they are implemented either as pointers to the selector or array
91 descriptor and shouldn't really change in the ASSOCIATE region,
92 this decl can be either shared or firstprivate. If it is a pointer,
93 use firstprivate, as it is cheaper that way, otherwise make it shared. */
94 if (GFC_DECL_ASSOCIATE_VAR_P (decl))
96 if (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
97 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
99 return OMP_CLAUSE_DEFAULT_SHARED;
102 if (DECL_ARTIFICIAL (decl)
103 && ! GFC_DECL_RESULT (decl)
104 && ! (DECL_LANG_SPECIFIC (decl)
105 && GFC_DECL_SAVED_DESCRIPTOR (decl)))
106 return OMP_CLAUSE_DEFAULT_SHARED;
108 /* Cray pointees shouldn't be listed in any clauses and should be
109 gimplified to dereference of the corresponding Cray pointer.
110 Make them all private, so that they are emitted in the debug
112 if (GFC_DECL_CRAY_POINTEE (decl))
113 return OMP_CLAUSE_DEFAULT_PRIVATE;
115 /* Assumed-size arrays are predetermined shared. */
116 if (TREE_CODE (decl) == PARM_DECL
117 && GFC_ARRAY_TYPE_P (TREE_TYPE (decl))
118 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl)) == GFC_ARRAY_UNKNOWN
119 && GFC_TYPE_ARRAY_UBOUND (TREE_TYPE (decl),
120 GFC_TYPE_ARRAY_RANK (TREE_TYPE (decl)) - 1)
122 return OMP_CLAUSE_DEFAULT_SHARED;
124 /* Dummy procedures aren't considered variables by OpenMP, thus are
125 disallowed in OpenMP clauses. They are represented as PARM_DECLs
126 in the middle-end, so return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE here
127 to avoid complaining about their uses with default(none). */
128 if (TREE_CODE (decl) == PARM_DECL
129 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
130 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == FUNCTION_TYPE)
131 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
133 /* COMMON and EQUIVALENCE decls are shared. They
134 are only referenced through DECL_VALUE_EXPR of the variables
135 contained in them. If those are privatized, they will not be
136 gimplified to the COMMON or EQUIVALENCE decls. */
137 if (GFC_DECL_COMMON_OR_EQUIV (decl) && ! DECL_HAS_VALUE_EXPR_P (decl))
138 return OMP_CLAUSE_DEFAULT_SHARED;
140 if (GFC_DECL_RESULT (decl) && ! DECL_HAS_VALUE_EXPR_P (decl))
141 return OMP_CLAUSE_DEFAULT_SHARED;
143 /* These are either array or derived parameters, or vtables.
144 In the former cases, the OpenMP standard doesn't consider them to be
145 variables at all (they can't be redefined), but they can nevertheless appear
146 in parallel/task regions and for default(none) purposes treat them as shared.
147 For vtables likely the same handling is desirable. */
148 if (TREE_CODE (decl) == VAR_DECL
149 && TREE_READONLY (decl)
150 && TREE_STATIC (decl))
151 return OMP_CLAUSE_DEFAULT_SHARED;
153 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
156 /* Return decl that should be used when reporting DEFAULT(NONE)
160 gfc_omp_report_decl (tree decl)
162 if (DECL_ARTIFICIAL (decl)
163 && DECL_LANG_SPECIFIC (decl)
164 && GFC_DECL_SAVED_DESCRIPTOR (decl))
165 return GFC_DECL_SAVED_DESCRIPTOR (decl);
170 /* Return true if TYPE has any allocatable components. */
173 gfc_has_alloc_comps (tree type, tree decl)
177 if (POINTER_TYPE_P (type))
179 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (decl))
180 type = TREE_TYPE (type);
181 else if (GFC_DECL_GET_SCALAR_POINTER (decl))
185 if (GFC_DESCRIPTOR_TYPE_P (type) || GFC_ARRAY_TYPE_P (type))
186 type = gfc_get_element_type (type);
188 if (TREE_CODE (type) != RECORD_TYPE)
191 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
193 ftype = TREE_TYPE (field);
194 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field))
196 if (GFC_DESCRIPTOR_TYPE_P (ftype)
197 && GFC_TYPE_ARRAY_AKIND (ftype) == GFC_ARRAY_ALLOCATABLE)
199 if (gfc_has_alloc_comps (ftype, field))
205 /* Return true if DECL in private clause needs
206 OMP_CLAUSE_PRIVATE_OUTER_REF on the private clause. */
208 gfc_omp_private_outer_ref (tree decl)
210 tree type = TREE_TYPE (decl);
212 if (GFC_DESCRIPTOR_TYPE_P (type)
213 && GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ALLOCATABLE)
216 if (GFC_DECL_GET_SCALAR_ALLOCATABLE (decl))
219 if (gfc_omp_privatize_by_reference (decl))
220 type = TREE_TYPE (type);
222 if (gfc_has_alloc_comps (type, decl))
228 /* Callback for gfc_omp_unshare_expr. */
231 gfc_omp_unshare_expr_r (tree *tp, int *walk_subtrees, void *)
234 enum tree_code code = TREE_CODE (t);
236 /* Stop at types, decls, constants like copy_tree_r. */
237 if (TREE_CODE_CLASS (code) == tcc_type
238 || TREE_CODE_CLASS (code) == tcc_declaration
239 || TREE_CODE_CLASS (code) == tcc_constant
242 else if (handled_component_p (t)
243 || TREE_CODE (t) == MEM_REF)
245 *tp = unshare_expr (t);
252 /* Unshare in expr anything that the FE which normally doesn't
253 care much about tree sharing (because during gimplification
254 everything is unshared) could cause problems with tree sharing
255 at omp-low.c time. */
258 gfc_omp_unshare_expr (tree expr)
260 walk_tree (&expr, gfc_omp_unshare_expr_r, NULL, NULL);
264 enum walk_alloc_comps
266 WALK_ALLOC_COMPS_DTOR,
267 WALK_ALLOC_COMPS_DEFAULT_CTOR,
268 WALK_ALLOC_COMPS_COPY_CTOR
271 /* Handle allocatable components in OpenMP clauses. */
274 gfc_walk_alloc_comps (tree decl, tree dest, tree var,
275 enum walk_alloc_comps kind)
277 stmtblock_t block, tmpblock;
278 tree type = TREE_TYPE (decl), then_b, tem, field;
279 gfc_init_block (&block);
281 if (GFC_ARRAY_TYPE_P (type) || GFC_DESCRIPTOR_TYPE_P (type))
283 if (GFC_DESCRIPTOR_TYPE_P (type))
285 gfc_init_block (&tmpblock);
286 tem = gfc_full_array_size (&tmpblock, decl,
287 GFC_TYPE_ARRAY_RANK (type));
288 then_b = gfc_finish_block (&tmpblock);
289 gfc_add_expr_to_block (&block, gfc_omp_unshare_expr (then_b));
290 tem = gfc_omp_unshare_expr (tem);
291 tem = fold_build2_loc (input_location, MINUS_EXPR,
292 gfc_array_index_type, tem,
297 if (!TYPE_DOMAIN (type)
298 || TYPE_MAX_VALUE (TYPE_DOMAIN (type)) == NULL_TREE
299 || TYPE_MIN_VALUE (TYPE_DOMAIN (type)) == error_mark_node
300 || TYPE_MAX_VALUE (TYPE_DOMAIN (type)) == error_mark_node)
302 tem = fold_build2 (EXACT_DIV_EXPR, sizetype,
303 TYPE_SIZE_UNIT (type),
304 TYPE_SIZE_UNIT (TREE_TYPE (type)));
305 tem = size_binop (MINUS_EXPR, tem, size_one_node);
308 tem = array_type_nelts (type);
309 tem = fold_convert (gfc_array_index_type, tem);
312 tree nelems = gfc_evaluate_now (tem, &block);
313 tree index = gfc_create_var (gfc_array_index_type, "S");
315 gfc_init_block (&tmpblock);
316 tem = gfc_conv_array_data (decl);
317 tree declvar = build_fold_indirect_ref_loc (input_location, tem);
318 tree declvref = gfc_build_array_ref (declvar, index, NULL);
319 tree destvar, destvref = NULL_TREE;
322 tem = gfc_conv_array_data (dest);
323 destvar = build_fold_indirect_ref_loc (input_location, tem);
324 destvref = gfc_build_array_ref (destvar, index, NULL);
326 gfc_add_expr_to_block (&tmpblock,
327 gfc_walk_alloc_comps (declvref, destvref,
331 gfc_init_loopinfo (&loop);
333 loop.from[0] = gfc_index_zero_node;
334 loop.loopvar[0] = index;
336 gfc_trans_scalarizing_loops (&loop, &tmpblock);
337 gfc_add_block_to_block (&block, &loop.pre);
338 return gfc_finish_block (&block);
340 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (var))
342 decl = build_fold_indirect_ref_loc (input_location, decl);
344 dest = build_fold_indirect_ref_loc (input_location, dest);
345 type = TREE_TYPE (decl);
348 gcc_assert (TREE_CODE (type) == RECORD_TYPE);
349 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
351 tree ftype = TREE_TYPE (field);
352 tree declf, destf = NULL_TREE;
353 bool has_alloc_comps = gfc_has_alloc_comps (ftype, field);
354 if ((!GFC_DESCRIPTOR_TYPE_P (ftype)
355 || GFC_TYPE_ARRAY_AKIND (ftype) != GFC_ARRAY_ALLOCATABLE)
356 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (field)
359 declf = fold_build3_loc (input_location, COMPONENT_REF, ftype,
360 decl, field, NULL_TREE);
362 destf = fold_build3_loc (input_location, COMPONENT_REF, ftype,
363 dest, field, NULL_TREE);
368 case WALK_ALLOC_COMPS_DTOR:
370 case WALK_ALLOC_COMPS_DEFAULT_CTOR:
371 if (GFC_DESCRIPTOR_TYPE_P (ftype)
372 && GFC_TYPE_ARRAY_AKIND (ftype) == GFC_ARRAY_ALLOCATABLE)
374 gfc_add_modify (&block, unshare_expr (destf),
375 unshare_expr (declf));
376 tem = gfc_duplicate_allocatable_nocopy
377 (destf, declf, ftype,
378 GFC_TYPE_ARRAY_RANK (ftype));
380 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field))
381 tem = gfc_duplicate_allocatable_nocopy (destf, declf, ftype, 0);
383 case WALK_ALLOC_COMPS_COPY_CTOR:
384 if (GFC_DESCRIPTOR_TYPE_P (ftype)
385 && GFC_TYPE_ARRAY_AKIND (ftype) == GFC_ARRAY_ALLOCATABLE)
386 tem = gfc_duplicate_allocatable (destf, declf, ftype,
387 GFC_TYPE_ARRAY_RANK (ftype),
389 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field))
390 tem = gfc_duplicate_allocatable (destf, declf, ftype, 0,
395 gfc_add_expr_to_block (&block, gfc_omp_unshare_expr (tem));
398 gfc_init_block (&tmpblock);
399 gfc_add_expr_to_block (&tmpblock,
400 gfc_walk_alloc_comps (declf, destf,
402 then_b = gfc_finish_block (&tmpblock);
403 if (GFC_DESCRIPTOR_TYPE_P (ftype)
404 && GFC_TYPE_ARRAY_AKIND (ftype) == GFC_ARRAY_ALLOCATABLE)
405 tem = gfc_conv_descriptor_data_get (unshare_expr (declf));
406 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field))
407 tem = unshare_expr (declf);
412 tem = fold_convert (pvoid_type_node, tem);
413 tem = fold_build2_loc (input_location, NE_EXPR,
414 boolean_type_node, tem,
416 then_b = build3_loc (input_location, COND_EXPR, void_type_node,
418 build_empty_stmt (input_location));
420 gfc_add_expr_to_block (&block, then_b);
422 if (kind == WALK_ALLOC_COMPS_DTOR)
424 if (GFC_DESCRIPTOR_TYPE_P (ftype)
425 && GFC_TYPE_ARRAY_AKIND (ftype) == GFC_ARRAY_ALLOCATABLE)
427 tem = gfc_trans_dealloc_allocated (unshare_expr (declf),
429 gfc_add_expr_to_block (&block, gfc_omp_unshare_expr (tem));
431 else if (GFC_DECL_GET_SCALAR_ALLOCATABLE (field))
433 tem = gfc_call_free (unshare_expr (declf));
434 gfc_add_expr_to_block (&block, gfc_omp_unshare_expr (tem));
439 return gfc_finish_block (&block);
442 /* Return code to initialize DECL with its default constructor, or
443 NULL if there's nothing to do. */
446 gfc_omp_clause_default_ctor (tree clause, tree decl, tree outer)
448 tree type = TREE_TYPE (decl), size, ptr, cond, then_b, else_b;
449 stmtblock_t block, cond_block;
451 gcc_assert (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_PRIVATE
452 || OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_LASTPRIVATE
453 || OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_LINEAR
454 || OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_REDUCTION);
456 if ((! GFC_DESCRIPTOR_TYPE_P (type)
457 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
458 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause)))
460 if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause)))
463 gfc_start_block (&block);
464 tree tem = gfc_walk_alloc_comps (outer, decl,
465 OMP_CLAUSE_DECL (clause),
466 WALK_ALLOC_COMPS_DEFAULT_CTOR);
467 gfc_add_expr_to_block (&block, tem);
468 return gfc_finish_block (&block);
473 gcc_assert (outer != NULL_TREE);
475 /* Allocatable arrays and scalars in PRIVATE clauses need to be set to
476 "not currently allocated" allocation status if outer
477 array is "not currently allocated", otherwise should be allocated. */
478 gfc_start_block (&block);
480 gfc_init_block (&cond_block);
482 if (GFC_DESCRIPTOR_TYPE_P (type))
484 gfc_add_modify (&cond_block, decl, outer);
485 tree rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
486 size = gfc_conv_descriptor_ubound_get (decl, rank);
487 size = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
489 gfc_conv_descriptor_lbound_get (decl, rank));
490 size = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
491 size, gfc_index_one_node);
492 if (GFC_TYPE_ARRAY_RANK (type) > 1)
493 size = fold_build2_loc (input_location, MULT_EXPR,
494 gfc_array_index_type, size,
495 gfc_conv_descriptor_stride_get (decl, rank));
496 tree esize = fold_convert (gfc_array_index_type,
497 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
498 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
500 size = unshare_expr (size);
501 size = gfc_evaluate_now (fold_convert (size_type_node, size),
505 size = fold_convert (size_type_node, TYPE_SIZE_UNIT (TREE_TYPE (type)));
506 ptr = gfc_create_var (pvoid_type_node, NULL);
507 gfc_allocate_using_malloc (&cond_block, ptr, size, NULL_TREE);
508 if (GFC_DESCRIPTOR_TYPE_P (type))
509 gfc_conv_descriptor_data_set (&cond_block, unshare_expr (decl), ptr);
511 gfc_add_modify (&cond_block, unshare_expr (decl),
512 fold_convert (TREE_TYPE (decl), ptr));
513 if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause)))
515 tree tem = gfc_walk_alloc_comps (outer, decl,
516 OMP_CLAUSE_DECL (clause),
517 WALK_ALLOC_COMPS_DEFAULT_CTOR);
518 gfc_add_expr_to_block (&cond_block, tem);
520 then_b = gfc_finish_block (&cond_block);
522 /* Reduction clause requires allocated ALLOCATABLE. */
523 if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_REDUCTION)
525 gfc_init_block (&cond_block);
526 if (GFC_DESCRIPTOR_TYPE_P (type))
527 gfc_conv_descriptor_data_set (&cond_block, unshare_expr (decl),
530 gfc_add_modify (&cond_block, unshare_expr (decl),
531 build_zero_cst (TREE_TYPE (decl)));
532 else_b = gfc_finish_block (&cond_block);
534 tree tem = fold_convert (pvoid_type_node,
535 GFC_DESCRIPTOR_TYPE_P (type)
536 ? gfc_conv_descriptor_data_get (outer) : outer);
537 tem = unshare_expr (tem);
538 cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
539 tem, null_pointer_node);
540 gfc_add_expr_to_block (&block,
541 build3_loc (input_location, COND_EXPR,
542 void_type_node, cond, then_b,
546 gfc_add_expr_to_block (&block, then_b);
548 return gfc_finish_block (&block);
551 /* Build and return code for a copy constructor from SRC to DEST. */
554 gfc_omp_clause_copy_ctor (tree clause, tree dest, tree src)
556 tree type = TREE_TYPE (dest), ptr, size, call;
557 tree cond, then_b, else_b;
558 stmtblock_t block, cond_block;
560 gcc_assert (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_FIRSTPRIVATE
561 || OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_LINEAR);
563 if ((! GFC_DESCRIPTOR_TYPE_P (type)
564 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
565 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause)))
567 if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause)))
569 gfc_start_block (&block);
570 gfc_add_modify (&block, dest, src);
571 tree tem = gfc_walk_alloc_comps (src, dest, OMP_CLAUSE_DECL (clause),
572 WALK_ALLOC_COMPS_COPY_CTOR);
573 gfc_add_expr_to_block (&block, tem);
574 return gfc_finish_block (&block);
577 return build2_v (MODIFY_EXPR, dest, src);
580 /* Allocatable arrays in FIRSTPRIVATE clauses need to be allocated
581 and copied from SRC. */
582 gfc_start_block (&block);
584 gfc_init_block (&cond_block);
586 gfc_add_modify (&cond_block, dest, src);
587 if (GFC_DESCRIPTOR_TYPE_P (type))
589 tree rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
590 size = gfc_conv_descriptor_ubound_get (dest, rank);
591 size = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
593 gfc_conv_descriptor_lbound_get (dest, rank));
594 size = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
595 size, gfc_index_one_node);
596 if (GFC_TYPE_ARRAY_RANK (type) > 1)
597 size = fold_build2_loc (input_location, MULT_EXPR,
598 gfc_array_index_type, size,
599 gfc_conv_descriptor_stride_get (dest, rank));
600 tree esize = fold_convert (gfc_array_index_type,
601 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
602 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
604 size = unshare_expr (size);
605 size = gfc_evaluate_now (fold_convert (size_type_node, size),
609 size = fold_convert (size_type_node, TYPE_SIZE_UNIT (TREE_TYPE (type)));
610 ptr = gfc_create_var (pvoid_type_node, NULL);
611 gfc_allocate_using_malloc (&cond_block, ptr, size, NULL_TREE);
612 if (GFC_DESCRIPTOR_TYPE_P (type))
613 gfc_conv_descriptor_data_set (&cond_block, unshare_expr (dest), ptr);
615 gfc_add_modify (&cond_block, unshare_expr (dest),
616 fold_convert (TREE_TYPE (dest), ptr));
618 tree srcptr = GFC_DESCRIPTOR_TYPE_P (type)
619 ? gfc_conv_descriptor_data_get (src) : src;
620 srcptr = unshare_expr (srcptr);
621 srcptr = fold_convert (pvoid_type_node, srcptr);
622 call = build_call_expr_loc (input_location,
623 builtin_decl_explicit (BUILT_IN_MEMCPY), 3, ptr,
625 gfc_add_expr_to_block (&cond_block, fold_convert (void_type_node, call));
626 if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause)))
628 tree tem = gfc_walk_alloc_comps (src, dest,
629 OMP_CLAUSE_DECL (clause),
630 WALK_ALLOC_COMPS_COPY_CTOR);
631 gfc_add_expr_to_block (&cond_block, tem);
633 then_b = gfc_finish_block (&cond_block);
635 gfc_init_block (&cond_block);
636 if (GFC_DESCRIPTOR_TYPE_P (type))
637 gfc_conv_descriptor_data_set (&cond_block, unshare_expr (dest),
640 gfc_add_modify (&cond_block, unshare_expr (dest),
641 build_zero_cst (TREE_TYPE (dest)));
642 else_b = gfc_finish_block (&cond_block);
644 cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
645 unshare_expr (srcptr), null_pointer_node);
646 gfc_add_expr_to_block (&block,
647 build3_loc (input_location, COND_EXPR,
648 void_type_node, cond, then_b, else_b));
650 return gfc_finish_block (&block);
653 /* Similarly, except use an intrinsic or pointer assignment operator
657 gfc_omp_clause_assign_op (tree clause, tree dest, tree src)
659 tree type = TREE_TYPE (dest), ptr, size, call, nonalloc;
660 tree cond, then_b, else_b;
661 stmtblock_t block, cond_block, cond_block2, inner_block;
663 if ((! GFC_DESCRIPTOR_TYPE_P (type)
664 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
665 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause)))
667 if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause)))
669 gfc_start_block (&block);
670 /* First dealloc any allocatable components in DEST. */
671 tree tem = gfc_walk_alloc_comps (dest, NULL_TREE,
672 OMP_CLAUSE_DECL (clause),
673 WALK_ALLOC_COMPS_DTOR);
674 gfc_add_expr_to_block (&block, tem);
675 /* Then copy over toplevel data. */
676 gfc_add_modify (&block, dest, src);
677 /* Finally allocate any allocatable components and copy. */
678 tem = gfc_walk_alloc_comps (src, dest, OMP_CLAUSE_DECL (clause),
679 WALK_ALLOC_COMPS_COPY_CTOR);
680 gfc_add_expr_to_block (&block, tem);
681 return gfc_finish_block (&block);
684 return build2_v (MODIFY_EXPR, dest, src);
687 gfc_start_block (&block);
689 if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause)))
691 then_b = gfc_walk_alloc_comps (dest, NULL_TREE, OMP_CLAUSE_DECL (clause),
692 WALK_ALLOC_COMPS_DTOR);
693 tree tem = fold_convert (pvoid_type_node,
694 GFC_DESCRIPTOR_TYPE_P (type)
695 ? gfc_conv_descriptor_data_get (dest) : dest);
696 tem = unshare_expr (tem);
697 cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
698 tem, null_pointer_node);
699 tem = build3_loc (input_location, COND_EXPR, void_type_node, cond,
700 then_b, build_empty_stmt (input_location));
701 gfc_add_expr_to_block (&block, tem);
704 gfc_init_block (&cond_block);
706 if (GFC_DESCRIPTOR_TYPE_P (type))
708 tree rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
709 size = gfc_conv_descriptor_ubound_get (src, rank);
710 size = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
712 gfc_conv_descriptor_lbound_get (src, rank));
713 size = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
714 size, gfc_index_one_node);
715 if (GFC_TYPE_ARRAY_RANK (type) > 1)
716 size = fold_build2_loc (input_location, MULT_EXPR,
717 gfc_array_index_type, size,
718 gfc_conv_descriptor_stride_get (src, rank));
719 tree esize = fold_convert (gfc_array_index_type,
720 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
721 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
723 size = unshare_expr (size);
724 size = gfc_evaluate_now (fold_convert (size_type_node, size),
728 size = fold_convert (size_type_node, TYPE_SIZE_UNIT (TREE_TYPE (type)));
729 ptr = gfc_create_var (pvoid_type_node, NULL);
731 tree destptr = GFC_DESCRIPTOR_TYPE_P (type)
732 ? gfc_conv_descriptor_data_get (dest) : dest;
733 destptr = unshare_expr (destptr);
734 destptr = fold_convert (pvoid_type_node, destptr);
735 gfc_add_modify (&cond_block, ptr, destptr);
737 nonalloc = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
738 destptr, null_pointer_node);
740 if (GFC_DESCRIPTOR_TYPE_P (type))
743 for (i = 0; i < GFC_TYPE_ARRAY_RANK (type); i++)
745 tree rank = gfc_rank_cst[i];
746 tree tem = gfc_conv_descriptor_ubound_get (src, rank);
747 tem = fold_build2_loc (input_location, MINUS_EXPR,
748 gfc_array_index_type, tem,
749 gfc_conv_descriptor_lbound_get (src, rank));
750 tem = fold_build2_loc (input_location, PLUS_EXPR,
751 gfc_array_index_type, tem,
752 gfc_conv_descriptor_lbound_get (dest, rank));
753 tem = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
754 tem, gfc_conv_descriptor_ubound_get (dest,
756 cond = fold_build2_loc (input_location, TRUTH_ORIF_EXPR,
757 boolean_type_node, cond, tem);
761 gfc_init_block (&cond_block2);
763 if (GFC_DESCRIPTOR_TYPE_P (type))
765 gfc_init_block (&inner_block);
766 gfc_allocate_using_malloc (&inner_block, ptr, size, NULL_TREE);
767 then_b = gfc_finish_block (&inner_block);
769 gfc_init_block (&inner_block);
770 gfc_add_modify (&inner_block, ptr,
771 gfc_call_realloc (&inner_block, ptr, size));
772 else_b = gfc_finish_block (&inner_block);
774 gfc_add_expr_to_block (&cond_block2,
775 build3_loc (input_location, COND_EXPR,
777 unshare_expr (nonalloc),
779 gfc_add_modify (&cond_block2, dest, src);
780 gfc_conv_descriptor_data_set (&cond_block2, unshare_expr (dest), ptr);
784 gfc_allocate_using_malloc (&cond_block2, ptr, size, NULL_TREE);
785 gfc_add_modify (&cond_block2, unshare_expr (dest),
786 fold_convert (type, ptr));
788 then_b = gfc_finish_block (&cond_block2);
789 else_b = build_empty_stmt (input_location);
791 gfc_add_expr_to_block (&cond_block,
792 build3_loc (input_location, COND_EXPR,
793 void_type_node, unshare_expr (cond),
796 tree srcptr = GFC_DESCRIPTOR_TYPE_P (type)
797 ? gfc_conv_descriptor_data_get (src) : src;
798 srcptr = unshare_expr (srcptr);
799 srcptr = fold_convert (pvoid_type_node, srcptr);
800 call = build_call_expr_loc (input_location,
801 builtin_decl_explicit (BUILT_IN_MEMCPY), 3, ptr,
803 gfc_add_expr_to_block (&cond_block, fold_convert (void_type_node, call));
804 if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause)))
806 tree tem = gfc_walk_alloc_comps (src, dest,
807 OMP_CLAUSE_DECL (clause),
808 WALK_ALLOC_COMPS_COPY_CTOR);
809 gfc_add_expr_to_block (&cond_block, tem);
811 then_b = gfc_finish_block (&cond_block);
813 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_COPYIN)
815 gfc_init_block (&cond_block);
816 if (GFC_DESCRIPTOR_TYPE_P (type))
817 gfc_add_expr_to_block (&cond_block,
818 gfc_trans_dealloc_allocated (unshare_expr (dest),
822 destptr = gfc_evaluate_now (destptr, &cond_block);
823 gfc_add_expr_to_block (&cond_block, gfc_call_free (destptr));
824 gfc_add_modify (&cond_block, unshare_expr (dest),
825 build_zero_cst (TREE_TYPE (dest)));
827 else_b = gfc_finish_block (&cond_block);
829 cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
830 unshare_expr (srcptr), null_pointer_node);
831 gfc_add_expr_to_block (&block,
832 build3_loc (input_location, COND_EXPR,
833 void_type_node, cond,
837 gfc_add_expr_to_block (&block, then_b);
839 return gfc_finish_block (&block);
843 gfc_omp_linear_clause_add_loop (stmtblock_t *block, tree dest, tree src,
844 tree add, tree nelems)
846 stmtblock_t tmpblock;
847 tree desta, srca, index = gfc_create_var (gfc_array_index_type, "S");
848 nelems = gfc_evaluate_now (nelems, block);
850 gfc_init_block (&tmpblock);
851 if (TREE_CODE (TREE_TYPE (dest)) == ARRAY_TYPE)
853 desta = gfc_build_array_ref (dest, index, NULL);
854 srca = gfc_build_array_ref (src, index, NULL);
858 gcc_assert (POINTER_TYPE_P (TREE_TYPE (dest)));
859 tree idx = fold_build2 (MULT_EXPR, sizetype,
860 fold_convert (sizetype, index),
861 TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (dest))));
862 desta = build_fold_indirect_ref (fold_build2 (POINTER_PLUS_EXPR,
863 TREE_TYPE (dest), dest,
865 srca = build_fold_indirect_ref (fold_build2 (POINTER_PLUS_EXPR,
866 TREE_TYPE (src), src,
869 gfc_add_modify (&tmpblock, desta,
870 fold_build2 (PLUS_EXPR, TREE_TYPE (desta),
874 gfc_init_loopinfo (&loop);
876 loop.from[0] = gfc_index_zero_node;
877 loop.loopvar[0] = index;
879 gfc_trans_scalarizing_loops (&loop, &tmpblock);
880 gfc_add_block_to_block (block, &loop.pre);
883 /* Build and return code for a constructor of DEST that initializes
884 it to SRC plus ADD (ADD is scalar integer). */
887 gfc_omp_clause_linear_ctor (tree clause, tree dest, tree src, tree add)
889 tree type = TREE_TYPE (dest), ptr, size, nelems = NULL_TREE;
892 gcc_assert (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_LINEAR);
894 gfc_start_block (&block);
895 add = gfc_evaluate_now (add, &block);
897 if ((! GFC_DESCRIPTOR_TYPE_P (type)
898 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
899 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause)))
901 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
902 if (!TYPE_DOMAIN (type)
903 || TYPE_MAX_VALUE (TYPE_DOMAIN (type)) == NULL_TREE
904 || TYPE_MIN_VALUE (TYPE_DOMAIN (type)) == error_mark_node
905 || TYPE_MAX_VALUE (TYPE_DOMAIN (type)) == error_mark_node)
907 nelems = fold_build2 (EXACT_DIV_EXPR, sizetype,
908 TYPE_SIZE_UNIT (type),
909 TYPE_SIZE_UNIT (TREE_TYPE (type)));
910 nelems = size_binop (MINUS_EXPR, nelems, size_one_node);
913 nelems = array_type_nelts (type);
914 nelems = fold_convert (gfc_array_index_type, nelems);
916 gfc_omp_linear_clause_add_loop (&block, dest, src, add, nelems);
917 return gfc_finish_block (&block);
920 /* Allocatable arrays in LINEAR clauses need to be allocated
921 and copied from SRC. */
922 gfc_add_modify (&block, dest, src);
923 if (GFC_DESCRIPTOR_TYPE_P (type))
925 tree rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
926 size = gfc_conv_descriptor_ubound_get (dest, rank);
927 size = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
929 gfc_conv_descriptor_lbound_get (dest, rank));
930 size = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
931 size, gfc_index_one_node);
932 if (GFC_TYPE_ARRAY_RANK (type) > 1)
933 size = fold_build2_loc (input_location, MULT_EXPR,
934 gfc_array_index_type, size,
935 gfc_conv_descriptor_stride_get (dest, rank));
936 tree esize = fold_convert (gfc_array_index_type,
937 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
938 nelems = gfc_evaluate_now (unshare_expr (size), &block);
939 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
940 nelems, unshare_expr (esize));
941 size = gfc_evaluate_now (fold_convert (size_type_node, size),
943 nelems = fold_build2_loc (input_location, MINUS_EXPR,
944 gfc_array_index_type, nelems,
948 size = fold_convert (size_type_node, TYPE_SIZE_UNIT (TREE_TYPE (type)));
949 ptr = gfc_create_var (pvoid_type_node, NULL);
950 gfc_allocate_using_malloc (&block, ptr, size, NULL_TREE);
951 if (GFC_DESCRIPTOR_TYPE_P (type))
953 gfc_conv_descriptor_data_set (&block, unshare_expr (dest), ptr);
954 tree etype = gfc_get_element_type (type);
955 ptr = fold_convert (build_pointer_type (etype), ptr);
956 tree srcptr = gfc_conv_descriptor_data_get (unshare_expr (src));
957 srcptr = fold_convert (build_pointer_type (etype), srcptr);
958 gfc_omp_linear_clause_add_loop (&block, ptr, srcptr, add, nelems);
962 gfc_add_modify (&block, unshare_expr (dest),
963 fold_convert (TREE_TYPE (dest), ptr));
964 ptr = fold_convert (TREE_TYPE (dest), ptr);
965 tree dstm = build_fold_indirect_ref (ptr);
966 tree srcm = build_fold_indirect_ref (unshare_expr (src));
967 gfc_add_modify (&block, dstm,
968 fold_build2 (PLUS_EXPR, TREE_TYPE (add), srcm, add));
970 return gfc_finish_block (&block);
973 /* Build and return code destructing DECL. Return NULL if nothing
977 gfc_omp_clause_dtor (tree clause, tree decl)
979 tree type = TREE_TYPE (decl), tem;
981 if ((! GFC_DESCRIPTOR_TYPE_P (type)
982 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
983 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause)))
985 if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause)))
986 return gfc_walk_alloc_comps (decl, NULL_TREE,
987 OMP_CLAUSE_DECL (clause),
988 WALK_ALLOC_COMPS_DTOR);
992 if (GFC_DESCRIPTOR_TYPE_P (type))
993 /* Allocatable arrays in FIRSTPRIVATE/LASTPRIVATE etc. clauses need
994 to be deallocated if they were allocated. */
995 tem = gfc_trans_dealloc_allocated (decl, false, NULL);
997 tem = gfc_call_free (decl);
998 tem = gfc_omp_unshare_expr (tem);
1000 if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause)))
1005 gfc_init_block (&block);
1006 gfc_add_expr_to_block (&block,
1007 gfc_walk_alloc_comps (decl, NULL_TREE,
1008 OMP_CLAUSE_DECL (clause),
1009 WALK_ALLOC_COMPS_DTOR));
1010 gfc_add_expr_to_block (&block, tem);
1011 then_b = gfc_finish_block (&block);
1013 tem = fold_convert (pvoid_type_node,
1014 GFC_DESCRIPTOR_TYPE_P (type)
1015 ? gfc_conv_descriptor_data_get (decl) : decl);
1016 tem = unshare_expr (tem);
1017 tree cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
1018 tem, null_pointer_node);
1019 tem = build3_loc (input_location, COND_EXPR, void_type_node, cond,
1020 then_b, build_empty_stmt (input_location));
1027 gfc_omp_finish_clause (tree c, gimple_seq *pre_p)
1029 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP)
1032 tree decl = OMP_CLAUSE_DECL (c);
1033 tree c2 = NULL_TREE, c3 = NULL_TREE, c4 = NULL_TREE;
1034 if (POINTER_TYPE_P (TREE_TYPE (decl)))
1036 if (!gfc_omp_privatize_by_reference (decl)
1037 && !GFC_DECL_GET_SCALAR_POINTER (decl)
1038 && !GFC_DECL_GET_SCALAR_ALLOCATABLE (decl)
1039 && !GFC_DECL_CRAY_POINTEE (decl)
1040 && !GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
1042 tree orig_decl = decl;
1043 c4 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_MAP);
1044 OMP_CLAUSE_SET_MAP_KIND (c4, GOMP_MAP_POINTER);
1045 OMP_CLAUSE_DECL (c4) = decl;
1046 OMP_CLAUSE_SIZE (c4) = size_int (0);
1047 decl = build_fold_indirect_ref (decl);
1048 OMP_CLAUSE_DECL (c) = decl;
1049 OMP_CLAUSE_SIZE (c) = NULL_TREE;
1050 if (TREE_CODE (TREE_TYPE (orig_decl)) == REFERENCE_TYPE
1051 && (GFC_DECL_GET_SCALAR_POINTER (orig_decl)
1052 || GFC_DECL_GET_SCALAR_ALLOCATABLE (orig_decl)))
1054 c3 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_MAP);
1055 OMP_CLAUSE_SET_MAP_KIND (c3, GOMP_MAP_POINTER);
1056 OMP_CLAUSE_DECL (c3) = unshare_expr (decl);
1057 OMP_CLAUSE_SIZE (c3) = size_int (0);
1058 decl = build_fold_indirect_ref (decl);
1059 OMP_CLAUSE_DECL (c) = decl;
1062 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl)))
1065 gfc_start_block (&block);
1066 tree type = TREE_TYPE (decl);
1067 tree ptr = gfc_conv_descriptor_data_get (decl);
1068 ptr = fold_convert (build_pointer_type (char_type_node), ptr);
1069 ptr = build_fold_indirect_ref (ptr);
1070 OMP_CLAUSE_DECL (c) = ptr;
1071 c2 = build_omp_clause (input_location, OMP_CLAUSE_MAP);
1072 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_TO_PSET);
1073 OMP_CLAUSE_DECL (c2) = decl;
1074 OMP_CLAUSE_SIZE (c2) = TYPE_SIZE_UNIT (type);
1075 c3 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_MAP);
1076 OMP_CLAUSE_SET_MAP_KIND (c3, GOMP_MAP_POINTER);
1077 OMP_CLAUSE_DECL (c3) = gfc_conv_descriptor_data_get (decl);
1078 OMP_CLAUSE_SIZE (c3) = size_int (0);
1079 tree size = create_tmp_var (gfc_array_index_type);
1080 tree elemsz = TYPE_SIZE_UNIT (gfc_get_element_type (type));
1081 elemsz = fold_convert (gfc_array_index_type, elemsz);
1082 if (GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_POINTER
1083 || GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_POINTER_CONT)
1085 stmtblock_t cond_block;
1086 tree tem, then_b, else_b, zero, cond;
1088 gfc_init_block (&cond_block);
1089 tem = gfc_full_array_size (&cond_block, decl,
1090 GFC_TYPE_ARRAY_RANK (type));
1091 gfc_add_modify (&cond_block, size, tem);
1092 gfc_add_modify (&cond_block, size,
1093 fold_build2 (MULT_EXPR, gfc_array_index_type,
1095 then_b = gfc_finish_block (&cond_block);
1096 gfc_init_block (&cond_block);
1097 zero = build_int_cst (gfc_array_index_type, 0);
1098 gfc_add_modify (&cond_block, size, zero);
1099 else_b = gfc_finish_block (&cond_block);
1100 tem = gfc_conv_descriptor_data_get (decl);
1101 tem = fold_convert (pvoid_type_node, tem);
1102 cond = fold_build2_loc (input_location, NE_EXPR,
1103 boolean_type_node, tem, null_pointer_node);
1104 gfc_add_expr_to_block (&block, build3_loc (input_location, COND_EXPR,
1105 void_type_node, cond,
1110 gfc_add_modify (&block, size,
1111 gfc_full_array_size (&block, decl,
1112 GFC_TYPE_ARRAY_RANK (type)));
1113 gfc_add_modify (&block, size,
1114 fold_build2 (MULT_EXPR, gfc_array_index_type,
1117 OMP_CLAUSE_SIZE (c) = size;
1118 tree stmt = gfc_finish_block (&block);
1119 gimplify_and_add (stmt, pre_p);
1122 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
1124 = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
1125 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
1128 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (last);
1129 OMP_CLAUSE_CHAIN (last) = c2;
1134 OMP_CLAUSE_CHAIN (c3) = OMP_CLAUSE_CHAIN (last);
1135 OMP_CLAUSE_CHAIN (last) = c3;
1140 OMP_CLAUSE_CHAIN (c4) = OMP_CLAUSE_CHAIN (last);
1141 OMP_CLAUSE_CHAIN (last) = c4;
1147 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
1148 disregarded in OpenMP construct, because it is going to be
1149 remapped during OpenMP lowering. SHARED is true if DECL
1150 is going to be shared, false if it is going to be privatized. */
1153 gfc_omp_disregard_value_expr (tree decl, bool shared)
1155 if (GFC_DECL_COMMON_OR_EQUIV (decl)
1156 && DECL_HAS_VALUE_EXPR_P (decl))
1158 tree value = DECL_VALUE_EXPR (decl);
1160 if (TREE_CODE (value) == COMPONENT_REF
1161 && TREE_CODE (TREE_OPERAND (value, 0)) == VAR_DECL
1162 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value, 0)))
1164 /* If variable in COMMON or EQUIVALENCE is privatized, return
1165 true, as just that variable is supposed to be privatized,
1166 not the whole COMMON or whole EQUIVALENCE.
1167 For shared variables in COMMON or EQUIVALENCE, let them be
1168 gimplified to DECL_VALUE_EXPR, so that for multiple shared vars
1169 from the same COMMON or EQUIVALENCE just one sharing of the
1170 whole COMMON or EQUIVALENCE is enough. */
1175 if (GFC_DECL_RESULT (decl) && DECL_HAS_VALUE_EXPR_P (decl))
1181 /* Return true if DECL that is shared iff SHARED is true should
1182 be put into OMP_CLAUSE_PRIVATE with OMP_CLAUSE_PRIVATE_DEBUG
1186 gfc_omp_private_debug_clause (tree decl, bool shared)
1188 if (GFC_DECL_CRAY_POINTEE (decl))
1191 if (GFC_DECL_COMMON_OR_EQUIV (decl)
1192 && DECL_HAS_VALUE_EXPR_P (decl))
1194 tree value = DECL_VALUE_EXPR (decl);
1196 if (TREE_CODE (value) == COMPONENT_REF
1197 && TREE_CODE (TREE_OPERAND (value, 0)) == VAR_DECL
1198 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value, 0)))
1205 /* Register language specific type size variables as potentially OpenMP
1206 firstprivate variables. */
1209 gfc_omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
1211 if (GFC_ARRAY_TYPE_P (type) || GFC_DESCRIPTOR_TYPE_P (type))
1215 gcc_assert (TYPE_LANG_SPECIFIC (type) != NULL);
1216 for (r = 0; r < GFC_TYPE_ARRAY_RANK (type); r++)
1218 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_LBOUND (type, r));
1219 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_UBOUND (type, r));
1220 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_STRIDE (type, r));
1222 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_SIZE (type));
1223 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_OFFSET (type));
1229 gfc_trans_add_clause (tree node, tree tail)
1231 OMP_CLAUSE_CHAIN (node) = tail;
1236 gfc_trans_omp_variable (gfc_symbol *sym, bool declare_simd)
1241 gfc_symbol *proc_sym;
1242 gfc_formal_arglist *f;
1244 gcc_assert (sym->attr.dummy);
1245 proc_sym = sym->ns->proc_name;
1246 if (proc_sym->attr.entry_master)
1248 if (gfc_return_by_reference (proc_sym))
1251 if (proc_sym->ts.type == BT_CHARACTER)
1254 for (f = gfc_sym_get_dummy_args (proc_sym); f; f = f->next)
1260 return build_int_cst (integer_type_node, cnt);
1263 tree t = gfc_get_symbol_decl (sym);
1267 bool alternate_entry;
1270 return_value = sym->attr.function && sym->result == sym;
1271 alternate_entry = sym->attr.function && sym->attr.entry
1272 && sym->result == sym;
1273 entry_master = sym->attr.result
1274 && sym->ns->proc_name->attr.entry_master
1275 && !gfc_return_by_reference (sym->ns->proc_name);
1276 parent_decl = current_function_decl
1277 ? DECL_CONTEXT (current_function_decl) : NULL_TREE;
1279 if ((t == parent_decl && return_value)
1280 || (sym->ns && sym->ns->proc_name
1281 && sym->ns->proc_name->backend_decl == parent_decl
1282 && (alternate_entry || entry_master)))
1287 /* Special case for assigning the return value of a function.
1288 Self recursive functions must have an explicit return value. */
1289 if (return_value && (t == current_function_decl || parent_flag))
1290 t = gfc_get_fake_result_decl (sym, parent_flag);
1292 /* Similarly for alternate entry points. */
1293 else if (alternate_entry
1294 && (sym->ns->proc_name->backend_decl == current_function_decl
1297 gfc_entry_list *el = NULL;
1299 for (el = sym->ns->entries; el; el = el->next)
1302 t = gfc_get_fake_result_decl (sym, parent_flag);
1307 else if (entry_master
1308 && (sym->ns->proc_name->backend_decl == current_function_decl
1310 t = gfc_get_fake_result_decl (sym, parent_flag);
1316 gfc_trans_omp_variable_list (enum omp_clause_code code,
1317 gfc_omp_namelist *namelist, tree list,
1320 for (; namelist != NULL; namelist = namelist->next)
1321 if (namelist->sym->attr.referenced || declare_simd)
1323 tree t = gfc_trans_omp_variable (namelist->sym, declare_simd);
1324 if (t != error_mark_node)
1326 tree node = build_omp_clause (input_location, code);
1327 OMP_CLAUSE_DECL (node) = t;
1328 list = gfc_trans_add_clause (node, list);
1334 struct omp_udr_find_orig_data
1336 gfc_omp_udr *omp_udr;
1341 omp_udr_find_orig (gfc_expr **e, int *walk_subtrees ATTRIBUTE_UNUSED,
1344 struct omp_udr_find_orig_data *cd = (struct omp_udr_find_orig_data *) data;
1345 if ((*e)->expr_type == EXPR_VARIABLE
1346 && (*e)->symtree->n.sym == cd->omp_udr->omp_orig)
1347 cd->omp_orig_seen = true;
1353 gfc_trans_omp_array_reduction_or_udr (tree c, gfc_omp_namelist *n, locus where)
1355 gfc_symbol *sym = n->sym;
1356 gfc_symtree *root1 = NULL, *root2 = NULL, *root3 = NULL, *root4 = NULL;
1357 gfc_symtree *symtree1, *symtree2, *symtree3, *symtree4 = NULL;
1358 gfc_symbol init_val_sym, outer_sym, intrinsic_sym;
1359 gfc_symbol omp_var_copy[4];
1360 gfc_expr *e1, *e2, *e3, *e4;
1362 tree decl, backend_decl, stmt, type, outer_decl;
1363 locus old_loc = gfc_current_locus;
1366 gfc_omp_udr *udr = n->udr ? n->udr->udr : NULL;
1368 decl = OMP_CLAUSE_DECL (c);
1369 gfc_current_locus = where;
1370 type = TREE_TYPE (decl);
1371 outer_decl = create_tmp_var_raw (type);
1372 if (TREE_CODE (decl) == PARM_DECL
1373 && TREE_CODE (type) == REFERENCE_TYPE
1374 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (type))
1375 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (type)) == GFC_ARRAY_ALLOCATABLE)
1377 decl = build_fold_indirect_ref (decl);
1378 type = TREE_TYPE (type);
1381 /* Create a fake symbol for init value. */
1382 memset (&init_val_sym, 0, sizeof (init_val_sym));
1383 init_val_sym.ns = sym->ns;
1384 init_val_sym.name = sym->name;
1385 init_val_sym.ts = sym->ts;
1386 init_val_sym.attr.referenced = 1;
1387 init_val_sym.declared_at = where;
1388 init_val_sym.attr.flavor = FL_VARIABLE;
1389 if (OMP_CLAUSE_REDUCTION_CODE (c) != ERROR_MARK)
1390 backend_decl = omp_reduction_init (c, gfc_sym_type (&init_val_sym));
1391 else if (udr->initializer_ns)
1392 backend_decl = NULL;
1394 switch (sym->ts.type)
1400 backend_decl = build_zero_cst (gfc_sym_type (&init_val_sym));
1403 backend_decl = NULL_TREE;
1406 init_val_sym.backend_decl = backend_decl;
1408 /* Create a fake symbol for the outer array reference. */
1411 outer_sym.as = gfc_copy_array_spec (sym->as);
1412 outer_sym.attr.dummy = 0;
1413 outer_sym.attr.result = 0;
1414 outer_sym.attr.flavor = FL_VARIABLE;
1415 outer_sym.backend_decl = outer_decl;
1416 if (decl != OMP_CLAUSE_DECL (c))
1417 outer_sym.backend_decl = build_fold_indirect_ref (outer_decl);
1419 /* Create fake symtrees for it. */
1420 symtree1 = gfc_new_symtree (&root1, sym->name);
1421 symtree1->n.sym = sym;
1422 gcc_assert (symtree1 == root1);
1424 symtree2 = gfc_new_symtree (&root2, sym->name);
1425 symtree2->n.sym = &init_val_sym;
1426 gcc_assert (symtree2 == root2);
1428 symtree3 = gfc_new_symtree (&root3, sym->name);
1429 symtree3->n.sym = &outer_sym;
1430 gcc_assert (symtree3 == root3);
1432 memset (omp_var_copy, 0, sizeof omp_var_copy);
1435 omp_var_copy[0] = *udr->omp_out;
1436 omp_var_copy[1] = *udr->omp_in;
1437 *udr->omp_out = outer_sym;
1438 *udr->omp_in = *sym;
1439 if (udr->initializer_ns)
1441 omp_var_copy[2] = *udr->omp_priv;
1442 omp_var_copy[3] = *udr->omp_orig;
1443 *udr->omp_priv = *sym;
1444 *udr->omp_orig = outer_sym;
1448 /* Create expressions. */
1449 e1 = gfc_get_expr ();
1450 e1->expr_type = EXPR_VARIABLE;
1452 e1->symtree = symtree1;
1454 if (sym->attr.dimension)
1456 e1->ref = ref = gfc_get_ref ();
1457 ref->type = REF_ARRAY;
1458 ref->u.ar.where = where;
1459 ref->u.ar.as = sym->as;
1460 ref->u.ar.type = AR_FULL;
1461 ref->u.ar.dimen = 0;
1463 t = gfc_resolve_expr (e1);
1467 if (backend_decl != NULL_TREE)
1469 e2 = gfc_get_expr ();
1470 e2->expr_type = EXPR_VARIABLE;
1472 e2->symtree = symtree2;
1474 t = gfc_resolve_expr (e2);
1477 else if (udr->initializer_ns == NULL)
1479 gcc_assert (sym->ts.type == BT_DERIVED);
1480 e2 = gfc_default_initializer (&sym->ts);
1482 t = gfc_resolve_expr (e2);
1485 else if (n->udr->initializer->op == EXEC_ASSIGN)
1487 e2 = gfc_copy_expr (n->udr->initializer->expr2);
1488 t = gfc_resolve_expr (e2);
1491 if (udr && udr->initializer_ns)
1493 struct omp_udr_find_orig_data cd;
1495 cd.omp_orig_seen = false;
1496 gfc_code_walker (&n->udr->initializer,
1497 gfc_dummy_code_callback, omp_udr_find_orig, &cd);
1498 if (cd.omp_orig_seen)
1499 OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c) = 1;
1502 e3 = gfc_copy_expr (e1);
1503 e3->symtree = symtree3;
1504 t = gfc_resolve_expr (e3);
1509 switch (OMP_CLAUSE_REDUCTION_CODE (c))
1513 e4 = gfc_add (e3, e1);
1516 e4 = gfc_multiply (e3, e1);
1518 case TRUTH_ANDIF_EXPR:
1519 e4 = gfc_and (e3, e1);
1521 case TRUTH_ORIF_EXPR:
1522 e4 = gfc_or (e3, e1);
1525 e4 = gfc_eqv (e3, e1);
1528 e4 = gfc_neqv (e3, e1);
1546 if (n->udr->combiner->op == EXEC_ASSIGN)
1549 e3 = gfc_copy_expr (n->udr->combiner->expr1);
1550 e4 = gfc_copy_expr (n->udr->combiner->expr2);
1551 t = gfc_resolve_expr (e3);
1553 t = gfc_resolve_expr (e4);
1562 memset (&intrinsic_sym, 0, sizeof (intrinsic_sym));
1563 intrinsic_sym.ns = sym->ns;
1564 intrinsic_sym.name = iname;
1565 intrinsic_sym.ts = sym->ts;
1566 intrinsic_sym.attr.referenced = 1;
1567 intrinsic_sym.attr.intrinsic = 1;
1568 intrinsic_sym.attr.function = 1;
1569 intrinsic_sym.result = &intrinsic_sym;
1570 intrinsic_sym.declared_at = where;
1572 symtree4 = gfc_new_symtree (&root4, iname);
1573 symtree4->n.sym = &intrinsic_sym;
1574 gcc_assert (symtree4 == root4);
1576 e4 = gfc_get_expr ();
1577 e4->expr_type = EXPR_FUNCTION;
1579 e4->symtree = symtree4;
1580 e4->value.function.actual = gfc_get_actual_arglist ();
1581 e4->value.function.actual->expr = e3;
1582 e4->value.function.actual->next = gfc_get_actual_arglist ();
1583 e4->value.function.actual->next->expr = e1;
1585 if (OMP_CLAUSE_REDUCTION_CODE (c) != ERROR_MARK)
1587 /* e1 and e3 have been stored as arguments of e4, avoid sharing. */
1588 e1 = gfc_copy_expr (e1);
1589 e3 = gfc_copy_expr (e3);
1590 t = gfc_resolve_expr (e4);
1594 /* Create the init statement list. */
1597 stmt = gfc_trans_assignment (e1, e2, false, false);
1599 stmt = gfc_trans_call (n->udr->initializer, false,
1600 NULL_TREE, NULL_TREE, false);
1601 if (TREE_CODE (stmt) != BIND_EXPR)
1602 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
1605 OMP_CLAUSE_REDUCTION_INIT (c) = stmt;
1607 /* Create the merge statement list. */
1610 stmt = gfc_trans_assignment (e3, e4, false, true);
1612 stmt = gfc_trans_call (n->udr->combiner, false,
1613 NULL_TREE, NULL_TREE, false);
1614 if (TREE_CODE (stmt) != BIND_EXPR)
1615 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
1618 OMP_CLAUSE_REDUCTION_MERGE (c) = stmt;
1620 /* And stick the placeholder VAR_DECL into the clause as well. */
1621 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = outer_decl;
1623 gfc_current_locus = old_loc;
1636 gfc_free_array_spec (outer_sym.as);
1640 *udr->omp_out = omp_var_copy[0];
1641 *udr->omp_in = omp_var_copy[1];
1642 if (udr->initializer_ns)
1644 *udr->omp_priv = omp_var_copy[2];
1645 *udr->omp_orig = omp_var_copy[3];
1651 gfc_trans_omp_reduction_list (gfc_omp_namelist *namelist, tree list,
1654 for (; namelist != NULL; namelist = namelist->next)
1655 if (namelist->sym->attr.referenced)
1657 tree t = gfc_trans_omp_variable (namelist->sym, false);
1658 if (t != error_mark_node)
1660 tree node = build_omp_clause (where.lb->location,
1661 OMP_CLAUSE_REDUCTION);
1662 OMP_CLAUSE_DECL (node) = t;
1663 switch (namelist->u.reduction_op)
1665 case OMP_REDUCTION_PLUS:
1666 OMP_CLAUSE_REDUCTION_CODE (node) = PLUS_EXPR;
1668 case OMP_REDUCTION_MINUS:
1669 OMP_CLAUSE_REDUCTION_CODE (node) = MINUS_EXPR;
1671 case OMP_REDUCTION_TIMES:
1672 OMP_CLAUSE_REDUCTION_CODE (node) = MULT_EXPR;
1674 case OMP_REDUCTION_AND:
1675 OMP_CLAUSE_REDUCTION_CODE (node) = TRUTH_ANDIF_EXPR;
1677 case OMP_REDUCTION_OR:
1678 OMP_CLAUSE_REDUCTION_CODE (node) = TRUTH_ORIF_EXPR;
1680 case OMP_REDUCTION_EQV:
1681 OMP_CLAUSE_REDUCTION_CODE (node) = EQ_EXPR;
1683 case OMP_REDUCTION_NEQV:
1684 OMP_CLAUSE_REDUCTION_CODE (node) = NE_EXPR;
1686 case OMP_REDUCTION_MAX:
1687 OMP_CLAUSE_REDUCTION_CODE (node) = MAX_EXPR;
1689 case OMP_REDUCTION_MIN:
1690 OMP_CLAUSE_REDUCTION_CODE (node) = MIN_EXPR;
1692 case OMP_REDUCTION_IAND:
1693 OMP_CLAUSE_REDUCTION_CODE (node) = BIT_AND_EXPR;
1695 case OMP_REDUCTION_IOR:
1696 OMP_CLAUSE_REDUCTION_CODE (node) = BIT_IOR_EXPR;
1698 case OMP_REDUCTION_IEOR:
1699 OMP_CLAUSE_REDUCTION_CODE (node) = BIT_XOR_EXPR;
1701 case OMP_REDUCTION_USER:
1702 OMP_CLAUSE_REDUCTION_CODE (node) = ERROR_MARK;
1707 if (namelist->sym->attr.dimension
1708 || namelist->u.reduction_op == OMP_REDUCTION_USER
1709 || namelist->sym->attr.allocatable)
1710 gfc_trans_omp_array_reduction_or_udr (node, namelist, where);
1711 list = gfc_trans_add_clause (node, list);
1718 gfc_convert_expr_to_tree (stmtblock_t *block, gfc_expr *expr)
1723 gfc_init_se (&se, NULL );
1724 gfc_conv_expr (&se, expr);
1725 gfc_add_block_to_block (block, &se.pre);
1726 result = gfc_evaluate_now (se.expr, block);
1727 gfc_add_block_to_block (block, &se.post);
1733 gfc_trans_omp_clauses (stmtblock_t *block, gfc_omp_clauses *clauses,
1734 locus where, bool declare_simd = false)
1736 tree omp_clauses = NULL_TREE, chunk_size, c;
1738 enum omp_clause_code clause_code;
1741 if (clauses == NULL)
1744 for (list = 0; list < OMP_LIST_NUM; list++)
1746 gfc_omp_namelist *n = clauses->lists[list];
1752 case OMP_LIST_REDUCTION:
1753 omp_clauses = gfc_trans_omp_reduction_list (n, omp_clauses, where);
1755 case OMP_LIST_PRIVATE:
1756 clause_code = OMP_CLAUSE_PRIVATE;
1758 case OMP_LIST_SHARED:
1759 clause_code = OMP_CLAUSE_SHARED;
1761 case OMP_LIST_FIRSTPRIVATE:
1762 clause_code = OMP_CLAUSE_FIRSTPRIVATE;
1764 case OMP_LIST_LASTPRIVATE:
1765 clause_code = OMP_CLAUSE_LASTPRIVATE;
1767 case OMP_LIST_COPYIN:
1768 clause_code = OMP_CLAUSE_COPYIN;
1770 case OMP_LIST_COPYPRIVATE:
1771 clause_code = OMP_CLAUSE_COPYPRIVATE;
1773 case OMP_LIST_UNIFORM:
1774 clause_code = OMP_CLAUSE_UNIFORM;
1776 case OMP_LIST_USE_DEVICE:
1777 clause_code = OMP_CLAUSE_USE_DEVICE;
1779 case OMP_LIST_DEVICE_RESIDENT:
1780 clause_code = OMP_CLAUSE_DEVICE_RESIDENT;
1782 case OMP_LIST_CACHE:
1783 clause_code = OMP_CLAUSE__CACHE_;
1788 = gfc_trans_omp_variable_list (clause_code, n, omp_clauses,
1791 case OMP_LIST_ALIGNED:
1792 for (; n != NULL; n = n->next)
1793 if (n->sym->attr.referenced || declare_simd)
1795 tree t = gfc_trans_omp_variable (n->sym, declare_simd);
1796 if (t != error_mark_node)
1798 tree node = build_omp_clause (input_location,
1799 OMP_CLAUSE_ALIGNED);
1800 OMP_CLAUSE_DECL (node) = t;
1806 alignment_var = gfc_conv_constant_to_tree (n->expr);
1809 gfc_init_se (&se, NULL);
1810 gfc_conv_expr (&se, n->expr);
1811 gfc_add_block_to_block (block, &se.pre);
1812 alignment_var = gfc_evaluate_now (se.expr, block);
1813 gfc_add_block_to_block (block, &se.post);
1815 OMP_CLAUSE_ALIGNED_ALIGNMENT (node) = alignment_var;
1817 omp_clauses = gfc_trans_add_clause (node, omp_clauses);
1821 case OMP_LIST_LINEAR:
1823 gfc_expr *last_step_expr = NULL;
1824 tree last_step = NULL_TREE;
1826 for (; n != NULL; n = n->next)
1830 last_step_expr = n->expr;
1831 last_step = NULL_TREE;
1833 if (n->sym->attr.referenced || declare_simd)
1835 tree t = gfc_trans_omp_variable (n->sym, declare_simd);
1836 if (t != error_mark_node)
1838 tree node = build_omp_clause (input_location,
1840 OMP_CLAUSE_DECL (node) = t;
1841 if (last_step_expr && last_step == NULL_TREE)
1845 = gfc_conv_constant_to_tree (last_step_expr);
1848 gfc_init_se (&se, NULL);
1849 gfc_conv_expr (&se, last_step_expr);
1850 gfc_add_block_to_block (block, &se.pre);
1851 last_step = gfc_evaluate_now (se.expr, block);
1852 gfc_add_block_to_block (block, &se.post);
1855 OMP_CLAUSE_LINEAR_STEP (node)
1856 = fold_convert (gfc_typenode_for_spec (&n->sym->ts),
1858 if (n->sym->attr.dimension || n->sym->attr.allocatable)
1859 OMP_CLAUSE_LINEAR_ARRAY (node) = 1;
1860 omp_clauses = gfc_trans_add_clause (node, omp_clauses);
1866 case OMP_LIST_DEPEND:
1867 for (; n != NULL; n = n->next)
1869 if (!n->sym->attr.referenced)
1872 tree node = build_omp_clause (input_location, OMP_CLAUSE_DEPEND);
1873 if (n->expr == NULL || n->expr->ref->u.ar.type == AR_FULL)
1875 tree decl = gfc_get_symbol_decl (n->sym);
1876 if (gfc_omp_privatize_by_reference (decl))
1877 decl = build_fold_indirect_ref (decl);
1878 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl)))
1880 decl = gfc_conv_descriptor_data_get (decl);
1881 decl = fold_convert (build_pointer_type (char_type_node),
1883 decl = build_fold_indirect_ref (decl);
1885 else if (DECL_P (decl))
1886 TREE_ADDRESSABLE (decl) = 1;
1887 OMP_CLAUSE_DECL (node) = decl;
1892 gfc_init_se (&se, NULL);
1893 if (n->expr->ref->u.ar.type == AR_ELEMENT)
1895 gfc_conv_expr_reference (&se, n->expr);
1900 gfc_conv_expr_descriptor (&se, n->expr);
1901 ptr = gfc_conv_array_data (se.expr);
1903 gfc_add_block_to_block (block, &se.pre);
1904 gfc_add_block_to_block (block, &se.post);
1905 ptr = fold_convert (build_pointer_type (char_type_node),
1907 OMP_CLAUSE_DECL (node) = build_fold_indirect_ref (ptr);
1909 switch (n->u.depend_op)
1912 OMP_CLAUSE_DEPEND_KIND (node) = OMP_CLAUSE_DEPEND_IN;
1914 case OMP_DEPEND_OUT:
1915 OMP_CLAUSE_DEPEND_KIND (node) = OMP_CLAUSE_DEPEND_OUT;
1917 case OMP_DEPEND_INOUT:
1918 OMP_CLAUSE_DEPEND_KIND (node) = OMP_CLAUSE_DEPEND_INOUT;
1923 omp_clauses = gfc_trans_add_clause (node, omp_clauses);
1927 for (; n != NULL; n = n->next)
1929 if (!n->sym->attr.referenced)
1932 tree node = build_omp_clause (input_location, OMP_CLAUSE_MAP);
1933 tree node2 = NULL_TREE;
1934 tree node3 = NULL_TREE;
1935 tree node4 = NULL_TREE;
1936 tree decl = gfc_get_symbol_decl (n->sym);
1938 TREE_ADDRESSABLE (decl) = 1;
1939 if (n->expr == NULL || n->expr->ref->u.ar.type == AR_FULL)
1941 if (POINTER_TYPE_P (TREE_TYPE (decl))
1942 && (gfc_omp_privatize_by_reference (decl)
1943 || GFC_DECL_GET_SCALAR_POINTER (decl)
1944 || GFC_DECL_GET_SCALAR_ALLOCATABLE (decl)
1945 || GFC_DECL_CRAY_POINTEE (decl)
1946 || GFC_DESCRIPTOR_TYPE_P
1947 (TREE_TYPE (TREE_TYPE (decl)))))
1949 tree orig_decl = decl;
1950 node4 = build_omp_clause (input_location,
1952 OMP_CLAUSE_SET_MAP_KIND (node4, GOMP_MAP_POINTER);
1953 OMP_CLAUSE_DECL (node4) = decl;
1954 OMP_CLAUSE_SIZE (node4) = size_int (0);
1955 decl = build_fold_indirect_ref (decl);
1956 if (TREE_CODE (TREE_TYPE (orig_decl)) == REFERENCE_TYPE
1957 && (GFC_DECL_GET_SCALAR_POINTER (orig_decl)
1958 || GFC_DECL_GET_SCALAR_ALLOCATABLE (orig_decl)))
1960 node3 = build_omp_clause (input_location,
1962 OMP_CLAUSE_SET_MAP_KIND (node3, GOMP_MAP_POINTER);
1963 OMP_CLAUSE_DECL (node3) = decl;
1964 OMP_CLAUSE_SIZE (node3) = size_int (0);
1965 decl = build_fold_indirect_ref (decl);
1968 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl)))
1970 tree type = TREE_TYPE (decl);
1971 tree ptr = gfc_conv_descriptor_data_get (decl);
1972 ptr = fold_convert (build_pointer_type (char_type_node),
1974 ptr = build_fold_indirect_ref (ptr);
1975 OMP_CLAUSE_DECL (node) = ptr;
1976 node2 = build_omp_clause (input_location,
1978 OMP_CLAUSE_SET_MAP_KIND (node2, GOMP_MAP_TO_PSET);
1979 OMP_CLAUSE_DECL (node2) = decl;
1980 OMP_CLAUSE_SIZE (node2) = TYPE_SIZE_UNIT (type);
1981 node3 = build_omp_clause (input_location,
1983 OMP_CLAUSE_SET_MAP_KIND (node3, GOMP_MAP_POINTER);
1984 OMP_CLAUSE_DECL (node3)
1985 = gfc_conv_descriptor_data_get (decl);
1986 OMP_CLAUSE_SIZE (node3) = size_int (0);
1988 /* We have to check for n->sym->attr.dimension because
1989 of scalar coarrays. */
1990 if (n->sym->attr.pointer && n->sym->attr.dimension)
1992 stmtblock_t cond_block;
1994 = gfc_create_var (gfc_array_index_type, NULL);
1995 tree tem, then_b, else_b, zero, cond;
1997 gfc_init_block (&cond_block);
1999 = gfc_full_array_size (&cond_block, decl,
2000 GFC_TYPE_ARRAY_RANK (type));
2001 gfc_add_modify (&cond_block, size, tem);
2002 then_b = gfc_finish_block (&cond_block);
2003 gfc_init_block (&cond_block);
2004 zero = build_int_cst (gfc_array_index_type, 0);
2005 gfc_add_modify (&cond_block, size, zero);
2006 else_b = gfc_finish_block (&cond_block);
2007 tem = gfc_conv_descriptor_data_get (decl);
2008 tem = fold_convert (pvoid_type_node, tem);
2009 cond = fold_build2_loc (input_location, NE_EXPR,
2011 tem, null_pointer_node);
2012 gfc_add_expr_to_block (block,
2013 build3_loc (input_location,
2018 OMP_CLAUSE_SIZE (node) = size;
2020 else if (n->sym->attr.dimension)
2021 OMP_CLAUSE_SIZE (node)
2022 = gfc_full_array_size (block, decl,
2023 GFC_TYPE_ARRAY_RANK (type));
2024 if (n->sym->attr.dimension)
2027 = TYPE_SIZE_UNIT (gfc_get_element_type (type));
2028 elemsz = fold_convert (gfc_array_index_type, elemsz);
2029 OMP_CLAUSE_SIZE (node)
2030 = fold_build2 (MULT_EXPR, gfc_array_index_type,
2031 OMP_CLAUSE_SIZE (node), elemsz);
2035 OMP_CLAUSE_DECL (node) = decl;
2040 gfc_init_se (&se, NULL);
2041 if (n->expr->ref->u.ar.type == AR_ELEMENT)
2043 gfc_conv_expr_reference (&se, n->expr);
2044 gfc_add_block_to_block (block, &se.pre);
2046 OMP_CLAUSE_SIZE (node)
2047 = TYPE_SIZE_UNIT (TREE_TYPE (ptr));
2051 gfc_conv_expr_descriptor (&se, n->expr);
2052 ptr = gfc_conv_array_data (se.expr);
2053 tree type = TREE_TYPE (se.expr);
2054 gfc_add_block_to_block (block, &se.pre);
2055 OMP_CLAUSE_SIZE (node)
2056 = gfc_full_array_size (block, se.expr,
2057 GFC_TYPE_ARRAY_RANK (type));
2059 = TYPE_SIZE_UNIT (gfc_get_element_type (type));
2060 elemsz = fold_convert (gfc_array_index_type, elemsz);
2061 OMP_CLAUSE_SIZE (node)
2062 = fold_build2 (MULT_EXPR, gfc_array_index_type,
2063 OMP_CLAUSE_SIZE (node), elemsz);
2065 gfc_add_block_to_block (block, &se.post);
2066 ptr = fold_convert (build_pointer_type (char_type_node),
2068 OMP_CLAUSE_DECL (node) = build_fold_indirect_ref (ptr);
2070 if (POINTER_TYPE_P (TREE_TYPE (decl))
2071 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
2073 node4 = build_omp_clause (input_location,
2075 OMP_CLAUSE_SET_MAP_KIND (node4, GOMP_MAP_POINTER);
2076 OMP_CLAUSE_DECL (node4) = decl;
2077 OMP_CLAUSE_SIZE (node4) = size_int (0);
2078 decl = build_fold_indirect_ref (decl);
2080 ptr = fold_convert (sizetype, ptr);
2081 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl)))
2083 tree type = TREE_TYPE (decl);
2084 ptr2 = gfc_conv_descriptor_data_get (decl);
2085 node2 = build_omp_clause (input_location,
2087 OMP_CLAUSE_SET_MAP_KIND (node2, GOMP_MAP_TO_PSET);
2088 OMP_CLAUSE_DECL (node2) = decl;
2089 OMP_CLAUSE_SIZE (node2) = TYPE_SIZE_UNIT (type);
2090 node3 = build_omp_clause (input_location,
2092 OMP_CLAUSE_SET_MAP_KIND (node3, GOMP_MAP_POINTER);
2093 OMP_CLAUSE_DECL (node3)
2094 = gfc_conv_descriptor_data_get (decl);
2098 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
2099 ptr2 = build_fold_addr_expr (decl);
2102 gcc_assert (POINTER_TYPE_P (TREE_TYPE (decl)));
2105 node3 = build_omp_clause (input_location,
2107 OMP_CLAUSE_SET_MAP_KIND (node3, GOMP_MAP_POINTER);
2108 OMP_CLAUSE_DECL (node3) = decl;
2110 ptr2 = fold_convert (sizetype, ptr2);
2111 OMP_CLAUSE_SIZE (node3)
2112 = fold_build2 (MINUS_EXPR, sizetype, ptr, ptr2);
2114 switch (n->u.map_op)
2117 OMP_CLAUSE_SET_MAP_KIND (node, GOMP_MAP_ALLOC);
2120 OMP_CLAUSE_SET_MAP_KIND (node, GOMP_MAP_TO);
2123 OMP_CLAUSE_SET_MAP_KIND (node, GOMP_MAP_FROM);
2125 case OMP_MAP_TOFROM:
2126 OMP_CLAUSE_SET_MAP_KIND (node, GOMP_MAP_TOFROM);
2128 case OMP_MAP_FORCE_ALLOC:
2129 OMP_CLAUSE_SET_MAP_KIND (node, GOMP_MAP_FORCE_ALLOC);
2131 case OMP_MAP_FORCE_DEALLOC:
2132 OMP_CLAUSE_SET_MAP_KIND (node, GOMP_MAP_FORCE_DEALLOC);
2134 case OMP_MAP_FORCE_TO:
2135 OMP_CLAUSE_SET_MAP_KIND (node, GOMP_MAP_FORCE_TO);
2137 case OMP_MAP_FORCE_FROM:
2138 OMP_CLAUSE_SET_MAP_KIND (node, GOMP_MAP_FORCE_FROM);
2140 case OMP_MAP_FORCE_TOFROM:
2141 OMP_CLAUSE_SET_MAP_KIND (node, GOMP_MAP_FORCE_TOFROM);
2143 case OMP_MAP_FORCE_PRESENT:
2144 OMP_CLAUSE_SET_MAP_KIND (node, GOMP_MAP_FORCE_PRESENT);
2146 case OMP_MAP_FORCE_DEVICEPTR:
2147 OMP_CLAUSE_SET_MAP_KIND (node, GOMP_MAP_FORCE_DEVICEPTR);
2152 omp_clauses = gfc_trans_add_clause (node, omp_clauses);
2154 omp_clauses = gfc_trans_add_clause (node2, omp_clauses);
2156 omp_clauses = gfc_trans_add_clause (node3, omp_clauses);
2158 omp_clauses = gfc_trans_add_clause (node4, omp_clauses);
2163 for (; n != NULL; n = n->next)
2165 if (!n->sym->attr.referenced)
2168 tree node = build_omp_clause (input_location,
2170 ? OMP_CLAUSE_TO : OMP_CLAUSE_FROM);
2171 if (n->expr == NULL || n->expr->ref->u.ar.type == AR_FULL)
2173 tree decl = gfc_get_symbol_decl (n->sym);
2174 if (gfc_omp_privatize_by_reference (decl))
2175 decl = build_fold_indirect_ref (decl);
2176 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl)))
2178 tree type = TREE_TYPE (decl);
2179 tree ptr = gfc_conv_descriptor_data_get (decl);
2180 ptr = fold_convert (build_pointer_type (char_type_node),
2182 ptr = build_fold_indirect_ref (ptr);
2183 OMP_CLAUSE_DECL (node) = ptr;
2184 OMP_CLAUSE_SIZE (node)
2185 = gfc_full_array_size (block, decl,
2186 GFC_TYPE_ARRAY_RANK (type));
2188 = TYPE_SIZE_UNIT (gfc_get_element_type (type));
2189 elemsz = fold_convert (gfc_array_index_type, elemsz);
2190 OMP_CLAUSE_SIZE (node)
2191 = fold_build2 (MULT_EXPR, gfc_array_index_type,
2192 OMP_CLAUSE_SIZE (node), elemsz);
2195 OMP_CLAUSE_DECL (node) = decl;
2200 gfc_init_se (&se, NULL);
2201 if (n->expr->ref->u.ar.type == AR_ELEMENT)
2203 gfc_conv_expr_reference (&se, n->expr);
2205 gfc_add_block_to_block (block, &se.pre);
2206 OMP_CLAUSE_SIZE (node)
2207 = TYPE_SIZE_UNIT (TREE_TYPE (ptr));
2211 gfc_conv_expr_descriptor (&se, n->expr);
2212 ptr = gfc_conv_array_data (se.expr);
2213 tree type = TREE_TYPE (se.expr);
2214 gfc_add_block_to_block (block, &se.pre);
2215 OMP_CLAUSE_SIZE (node)
2216 = gfc_full_array_size (block, se.expr,
2217 GFC_TYPE_ARRAY_RANK (type));
2219 = TYPE_SIZE_UNIT (gfc_get_element_type (type));
2220 elemsz = fold_convert (gfc_array_index_type, elemsz);
2221 OMP_CLAUSE_SIZE (node)
2222 = fold_build2 (MULT_EXPR, gfc_array_index_type,
2223 OMP_CLAUSE_SIZE (node), elemsz);
2225 gfc_add_block_to_block (block, &se.post);
2226 ptr = fold_convert (build_pointer_type (char_type_node),
2228 OMP_CLAUSE_DECL (node) = build_fold_indirect_ref (ptr);
2230 omp_clauses = gfc_trans_add_clause (node, omp_clauses);
2238 if (clauses->if_expr)
2242 gfc_init_se (&se, NULL);
2243 gfc_conv_expr (&se, clauses->if_expr);
2244 gfc_add_block_to_block (block, &se.pre);
2245 if_var = gfc_evaluate_now (se.expr, block);
2246 gfc_add_block_to_block (block, &se.post);
2248 c = build_omp_clause (where.lb->location, OMP_CLAUSE_IF);
2249 OMP_CLAUSE_IF_EXPR (c) = if_var;
2250 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2253 if (clauses->final_expr)
2257 gfc_init_se (&se, NULL);
2258 gfc_conv_expr (&se, clauses->final_expr);
2259 gfc_add_block_to_block (block, &se.pre);
2260 final_var = gfc_evaluate_now (se.expr, block);
2261 gfc_add_block_to_block (block, &se.post);
2263 c = build_omp_clause (where.lb->location, OMP_CLAUSE_FINAL);
2264 OMP_CLAUSE_FINAL_EXPR (c) = final_var;
2265 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2268 if (clauses->num_threads)
2272 gfc_init_se (&se, NULL);
2273 gfc_conv_expr (&se, clauses->num_threads);
2274 gfc_add_block_to_block (block, &se.pre);
2275 num_threads = gfc_evaluate_now (se.expr, block);
2276 gfc_add_block_to_block (block, &se.post);
2278 c = build_omp_clause (where.lb->location, OMP_CLAUSE_NUM_THREADS);
2279 OMP_CLAUSE_NUM_THREADS_EXPR (c) = num_threads;
2280 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2283 chunk_size = NULL_TREE;
2284 if (clauses->chunk_size)
2286 gfc_init_se (&se, NULL);
2287 gfc_conv_expr (&se, clauses->chunk_size);
2288 gfc_add_block_to_block (block, &se.pre);
2289 chunk_size = gfc_evaluate_now (se.expr, block);
2290 gfc_add_block_to_block (block, &se.post);
2293 if (clauses->sched_kind != OMP_SCHED_NONE)
2295 c = build_omp_clause (where.lb->location, OMP_CLAUSE_SCHEDULE);
2296 OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (c) = chunk_size;
2297 switch (clauses->sched_kind)
2299 case OMP_SCHED_STATIC:
2300 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_STATIC;
2302 case OMP_SCHED_DYNAMIC:
2303 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_DYNAMIC;
2305 case OMP_SCHED_GUIDED:
2306 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_GUIDED;
2308 case OMP_SCHED_RUNTIME:
2309 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_RUNTIME;
2311 case OMP_SCHED_AUTO:
2312 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_AUTO;
2317 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2320 if (clauses->default_sharing != OMP_DEFAULT_UNKNOWN)
2322 c = build_omp_clause (where.lb->location, OMP_CLAUSE_DEFAULT);
2323 switch (clauses->default_sharing)
2325 case OMP_DEFAULT_NONE:
2326 OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_NONE;
2328 case OMP_DEFAULT_SHARED:
2329 OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_SHARED;
2331 case OMP_DEFAULT_PRIVATE:
2332 OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_PRIVATE;
2334 case OMP_DEFAULT_FIRSTPRIVATE:
2335 OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
2340 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2343 if (clauses->nowait)
2345 c = build_omp_clause (where.lb->location, OMP_CLAUSE_NOWAIT);
2346 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2349 if (clauses->ordered)
2351 c = build_omp_clause (where.lb->location, OMP_CLAUSE_ORDERED);
2352 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2355 if (clauses->untied)
2357 c = build_omp_clause (where.lb->location, OMP_CLAUSE_UNTIED);
2358 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2361 if (clauses->mergeable)
2363 c = build_omp_clause (where.lb->location, OMP_CLAUSE_MERGEABLE);
2364 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2367 if (clauses->collapse)
2369 c = build_omp_clause (where.lb->location, OMP_CLAUSE_COLLAPSE);
2370 OMP_CLAUSE_COLLAPSE_EXPR (c)
2371 = build_int_cst (integer_type_node, clauses->collapse);
2372 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2375 if (clauses->inbranch)
2377 c = build_omp_clause (where.lb->location, OMP_CLAUSE_INBRANCH);
2378 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2381 if (clauses->notinbranch)
2383 c = build_omp_clause (where.lb->location, OMP_CLAUSE_NOTINBRANCH);
2384 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2387 switch (clauses->cancel)
2389 case OMP_CANCEL_UNKNOWN:
2391 case OMP_CANCEL_PARALLEL:
2392 c = build_omp_clause (where.lb->location, OMP_CLAUSE_PARALLEL);
2393 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2395 case OMP_CANCEL_SECTIONS:
2396 c = build_omp_clause (where.lb->location, OMP_CLAUSE_SECTIONS);
2397 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2400 c = build_omp_clause (where.lb->location, OMP_CLAUSE_FOR);
2401 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2403 case OMP_CANCEL_TASKGROUP:
2404 c = build_omp_clause (where.lb->location, OMP_CLAUSE_TASKGROUP);
2405 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2409 if (clauses->proc_bind != OMP_PROC_BIND_UNKNOWN)
2411 c = build_omp_clause (where.lb->location, OMP_CLAUSE_PROC_BIND);
2412 switch (clauses->proc_bind)
2414 case OMP_PROC_BIND_MASTER:
2415 OMP_CLAUSE_PROC_BIND_KIND (c) = OMP_CLAUSE_PROC_BIND_MASTER;
2417 case OMP_PROC_BIND_SPREAD:
2418 OMP_CLAUSE_PROC_BIND_KIND (c) = OMP_CLAUSE_PROC_BIND_SPREAD;
2420 case OMP_PROC_BIND_CLOSE:
2421 OMP_CLAUSE_PROC_BIND_KIND (c) = OMP_CLAUSE_PROC_BIND_CLOSE;
2426 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2429 if (clauses->safelen_expr)
2433 gfc_init_se (&se, NULL);
2434 gfc_conv_expr (&se, clauses->safelen_expr);
2435 gfc_add_block_to_block (block, &se.pre);
2436 safelen_var = gfc_evaluate_now (se.expr, block);
2437 gfc_add_block_to_block (block, &se.post);
2439 c = build_omp_clause (where.lb->location, OMP_CLAUSE_SAFELEN);
2440 OMP_CLAUSE_SAFELEN_EXPR (c) = safelen_var;
2441 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2444 if (clauses->simdlen_expr)
2446 c = build_omp_clause (where.lb->location, OMP_CLAUSE_SIMDLEN);
2447 OMP_CLAUSE_SIMDLEN_EXPR (c)
2448 = gfc_conv_constant_to_tree (clauses->simdlen_expr);
2449 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2452 if (clauses->num_teams)
2456 gfc_init_se (&se, NULL);
2457 gfc_conv_expr (&se, clauses->num_teams);
2458 gfc_add_block_to_block (block, &se.pre);
2459 num_teams = gfc_evaluate_now (se.expr, block);
2460 gfc_add_block_to_block (block, &se.post);
2462 c = build_omp_clause (where.lb->location, OMP_CLAUSE_NUM_TEAMS);
2463 OMP_CLAUSE_NUM_TEAMS_EXPR (c) = num_teams;
2464 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2467 if (clauses->device)
2471 gfc_init_se (&se, NULL);
2472 gfc_conv_expr (&se, clauses->device);
2473 gfc_add_block_to_block (block, &se.pre);
2474 device = gfc_evaluate_now (se.expr, block);
2475 gfc_add_block_to_block (block, &se.post);
2477 c = build_omp_clause (where.lb->location, OMP_CLAUSE_DEVICE);
2478 OMP_CLAUSE_DEVICE_ID (c) = device;
2479 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2482 if (clauses->thread_limit)
2486 gfc_init_se (&se, NULL);
2487 gfc_conv_expr (&se, clauses->thread_limit);
2488 gfc_add_block_to_block (block, &se.pre);
2489 thread_limit = gfc_evaluate_now (se.expr, block);
2490 gfc_add_block_to_block (block, &se.post);
2492 c = build_omp_clause (where.lb->location, OMP_CLAUSE_THREAD_LIMIT);
2493 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
2494 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2497 chunk_size = NULL_TREE;
2498 if (clauses->dist_chunk_size)
2500 gfc_init_se (&se, NULL);
2501 gfc_conv_expr (&se, clauses->dist_chunk_size);
2502 gfc_add_block_to_block (block, &se.pre);
2503 chunk_size = gfc_evaluate_now (se.expr, block);
2504 gfc_add_block_to_block (block, &se.post);
2507 if (clauses->dist_sched_kind != OMP_SCHED_NONE)
2509 c = build_omp_clause (where.lb->location, OMP_CLAUSE_DIST_SCHEDULE);
2510 OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (c) = chunk_size;
2511 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2516 c = build_omp_clause (where.lb->location, OMP_CLAUSE_ASYNC);
2517 if (clauses->async_expr)
2518 OMP_CLAUSE_ASYNC_EXPR (c)
2519 = gfc_convert_expr_to_tree (block, clauses->async_expr);
2521 OMP_CLAUSE_ASYNC_EXPR (c) = NULL;
2522 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2526 c = build_omp_clause (where.lb->location, OMP_CLAUSE_ORDERED);
2527 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2529 if (clauses->independent)
2531 c = build_omp_clause (where.lb->location, OMP_CLAUSE_INDEPENDENT);
2532 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2534 if (clauses->wait_list)
2538 for (el = clauses->wait_list; el; el = el->next)
2540 c = build_omp_clause (where.lb->location, OMP_CLAUSE_WAIT);
2541 OMP_CLAUSE_DECL (c) = gfc_convert_expr_to_tree (block, el->expr);
2542 OMP_CLAUSE_CHAIN (c) = omp_clauses;
2546 if (clauses->num_gangs_expr)
2549 = gfc_convert_expr_to_tree (block, clauses->num_gangs_expr);
2550 c = build_omp_clause (where.lb->location, OMP_CLAUSE_NUM_GANGS);
2551 OMP_CLAUSE_NUM_GANGS_EXPR (c) = num_gangs_var;
2552 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2554 if (clauses->num_workers_expr)
2556 tree num_workers_var
2557 = gfc_convert_expr_to_tree (block, clauses->num_workers_expr);
2558 c = build_omp_clause (where.lb->location, OMP_CLAUSE_NUM_WORKERS);
2559 OMP_CLAUSE_NUM_WORKERS_EXPR (c) = num_workers_var;
2560 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2562 if (clauses->vector_length_expr)
2564 tree vector_length_var
2565 = gfc_convert_expr_to_tree (block, clauses->vector_length_expr);
2566 c = build_omp_clause (where.lb->location, OMP_CLAUSE_VECTOR_LENGTH);
2567 OMP_CLAUSE_VECTOR_LENGTH_EXPR (c) = vector_length_var;
2568 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2570 if (clauses->vector)
2572 if (clauses->vector_expr)
2575 = gfc_convert_expr_to_tree (block, clauses->vector_expr);
2576 c = build_omp_clause (where.lb->location, OMP_CLAUSE_VECTOR);
2577 OMP_CLAUSE_VECTOR_EXPR (c) = vector_var;
2578 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2582 c = build_omp_clause (where.lb->location, OMP_CLAUSE_VECTOR);
2583 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2586 if (clauses->worker)
2588 if (clauses->worker_expr)
2591 = gfc_convert_expr_to_tree (block, clauses->worker_expr);
2592 c = build_omp_clause (where.lb->location, OMP_CLAUSE_WORKER);
2593 OMP_CLAUSE_WORKER_EXPR (c) = worker_var;
2594 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2598 c = build_omp_clause (where.lb->location, OMP_CLAUSE_WORKER);
2599 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2604 if (clauses->gang_expr)
2607 = gfc_convert_expr_to_tree (block, clauses->gang_expr);
2608 c = build_omp_clause (where.lb->location, OMP_CLAUSE_GANG);
2609 OMP_CLAUSE_GANG_EXPR (c) = gang_var;
2610 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2614 c = build_omp_clause (where.lb->location, OMP_CLAUSE_GANG);
2615 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
2619 return nreverse (omp_clauses);
2622 /* Like gfc_trans_code, but force creation of a BIND_EXPR around it. */
2625 gfc_trans_omp_code (gfc_code *code, bool force_empty)
2630 stmt = gfc_trans_code (code);
2631 if (TREE_CODE (stmt) != BIND_EXPR)
2633 if (!IS_EMPTY_STMT (stmt) || force_empty)
2635 tree block = poplevel (1, 0);
2636 stmt = build3_v (BIND_EXPR, NULL, stmt, block);
2646 /* Trans OpenACC directives. */
2647 /* parallel, kernels, data and host_data. */
2649 gfc_trans_oacc_construct (gfc_code *code)
2652 tree stmt, oacc_clauses;
2653 enum tree_code construct_code;
2657 case EXEC_OACC_PARALLEL:
2658 construct_code = OACC_PARALLEL;
2660 case EXEC_OACC_KERNELS:
2661 construct_code = OACC_KERNELS;
2663 case EXEC_OACC_DATA:
2664 construct_code = OACC_DATA;
2666 case EXEC_OACC_HOST_DATA:
2667 construct_code = OACC_HOST_DATA;
2673 gfc_start_block (&block);
2674 oacc_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
2676 stmt = gfc_trans_omp_code (code->block->next, true);
2677 stmt = build2_loc (input_location, construct_code, void_type_node, stmt,
2679 gfc_add_expr_to_block (&block, stmt);
2680 return gfc_finish_block (&block);
2683 /* update, enter_data, exit_data, cache. */
2685 gfc_trans_oacc_executable_directive (gfc_code *code)
2688 tree stmt, oacc_clauses;
2689 enum tree_code construct_code;
2693 case EXEC_OACC_UPDATE:
2694 construct_code = OACC_UPDATE;
2696 case EXEC_OACC_ENTER_DATA:
2697 construct_code = OACC_ENTER_DATA;
2699 case EXEC_OACC_EXIT_DATA:
2700 construct_code = OACC_EXIT_DATA;
2702 case EXEC_OACC_CACHE:
2703 construct_code = OACC_CACHE;
2709 gfc_start_block (&block);
2710 oacc_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
2712 stmt = build1_loc (input_location, construct_code, void_type_node,
2714 gfc_add_expr_to_block (&block, stmt);
2715 return gfc_finish_block (&block);
2719 gfc_trans_oacc_wait_directive (gfc_code *code)
2723 vec<tree, va_gc> *args;
2726 gfc_omp_clauses *clauses = code->ext.omp_clauses;
2727 location_t loc = input_location;
2729 for (el = clauses->wait_list; el; el = el->next)
2732 vec_alloc (args, nparms + 2);
2733 stmt = builtin_decl_explicit (BUILT_IN_GOACC_WAIT);
2735 gfc_start_block (&block);
2737 if (clauses->async_expr)
2738 t = gfc_convert_expr_to_tree (&block, clauses->async_expr);
2740 t = build_int_cst (integer_type_node, -2);
2742 args->quick_push (t);
2743 args->quick_push (build_int_cst (integer_type_node, nparms));
2745 for (el = clauses->wait_list; el; el = el->next)
2746 args->quick_push (gfc_convert_expr_to_tree (&block, el->expr));
2748 stmt = build_call_expr_loc_vec (loc, stmt, args);
2749 gfc_add_expr_to_block (&block, stmt);
2753 return gfc_finish_block (&block);
2756 static tree gfc_trans_omp_sections (gfc_code *, gfc_omp_clauses *);
2757 static tree gfc_trans_omp_workshare (gfc_code *, gfc_omp_clauses *);
2760 gfc_trans_omp_atomic (gfc_code *code)
2762 gfc_code *atomic_code = code;
2766 gfc_expr *expr2, *e;
2769 tree lhsaddr, type, rhs, x;
2770 enum tree_code op = ERROR_MARK;
2771 enum tree_code aop = OMP_ATOMIC;
2772 bool var_on_left = false;
2773 bool seq_cst = (atomic_code->ext.omp_atomic & GFC_OMP_ATOMIC_SEQ_CST) != 0;
2775 code = code->block->next;
2776 gcc_assert (code->op == EXEC_ASSIGN);
2777 var = code->expr1->symtree->n.sym;
2779 gfc_init_se (&lse, NULL);
2780 gfc_init_se (&rse, NULL);
2781 gfc_init_se (&vse, NULL);
2782 gfc_start_block (&block);
2784 expr2 = code->expr2;
2785 if (expr2->expr_type == EXPR_FUNCTION
2786 && expr2->value.function.isym->id == GFC_ISYM_CONVERSION)
2787 expr2 = expr2->value.function.actual->expr;
2789 switch (atomic_code->ext.omp_atomic & GFC_OMP_ATOMIC_MASK)
2791 case GFC_OMP_ATOMIC_READ:
2792 gfc_conv_expr (&vse, code->expr1);
2793 gfc_add_block_to_block (&block, &vse.pre);
2795 gfc_conv_expr (&lse, expr2);
2796 gfc_add_block_to_block (&block, &lse.pre);
2797 type = TREE_TYPE (lse.expr);
2798 lhsaddr = gfc_build_addr_expr (NULL, lse.expr);
2800 x = build1 (OMP_ATOMIC_READ, type, lhsaddr);
2801 OMP_ATOMIC_SEQ_CST (x) = seq_cst;
2802 x = convert (TREE_TYPE (vse.expr), x);
2803 gfc_add_modify (&block, vse.expr, x);
2805 gfc_add_block_to_block (&block, &lse.pre);
2806 gfc_add_block_to_block (&block, &rse.pre);
2808 return gfc_finish_block (&block);
2809 case GFC_OMP_ATOMIC_CAPTURE:
2810 aop = OMP_ATOMIC_CAPTURE_NEW;
2811 if (expr2->expr_type == EXPR_VARIABLE)
2813 aop = OMP_ATOMIC_CAPTURE_OLD;
2814 gfc_conv_expr (&vse, code->expr1);
2815 gfc_add_block_to_block (&block, &vse.pre);
2817 gfc_conv_expr (&lse, expr2);
2818 gfc_add_block_to_block (&block, &lse.pre);
2819 gfc_init_se (&lse, NULL);
2821 var = code->expr1->symtree->n.sym;
2822 expr2 = code->expr2;
2823 if (expr2->expr_type == EXPR_FUNCTION
2824 && expr2->value.function.isym->id == GFC_ISYM_CONVERSION)
2825 expr2 = expr2->value.function.actual->expr;
2832 gfc_conv_expr (&lse, code->expr1);
2833 gfc_add_block_to_block (&block, &lse.pre);
2834 type = TREE_TYPE (lse.expr);
2835 lhsaddr = gfc_build_addr_expr (NULL, lse.expr);
2837 if (((atomic_code->ext.omp_atomic & GFC_OMP_ATOMIC_MASK)
2838 == GFC_OMP_ATOMIC_WRITE)
2839 || (atomic_code->ext.omp_atomic & GFC_OMP_ATOMIC_SWAP))
2841 gfc_conv_expr (&rse, expr2);
2842 gfc_add_block_to_block (&block, &rse.pre);
2844 else if (expr2->expr_type == EXPR_OP)
2847 switch (expr2->value.op.op)
2849 case INTRINSIC_PLUS:
2852 case INTRINSIC_TIMES:
2855 case INTRINSIC_MINUS:
2858 case INTRINSIC_DIVIDE:
2859 if (expr2->ts.type == BT_INTEGER)
2860 op = TRUNC_DIV_EXPR;
2865 op = TRUTH_ANDIF_EXPR;
2868 op = TRUTH_ORIF_EXPR;
2873 case INTRINSIC_NEQV:
2879 e = expr2->value.op.op1;
2880 if (e->expr_type == EXPR_FUNCTION
2881 && e->value.function.isym->id == GFC_ISYM_CONVERSION)
2882 e = e->value.function.actual->expr;
2883 if (e->expr_type == EXPR_VARIABLE
2884 && e->symtree != NULL
2885 && e->symtree->n.sym == var)
2887 expr2 = expr2->value.op.op2;
2892 e = expr2->value.op.op2;
2893 if (e->expr_type == EXPR_FUNCTION
2894 && e->value.function.isym->id == GFC_ISYM_CONVERSION)
2895 e = e->value.function.actual->expr;
2896 gcc_assert (e->expr_type == EXPR_VARIABLE
2897 && e->symtree != NULL
2898 && e->symtree->n.sym == var);
2899 expr2 = expr2->value.op.op1;
2900 var_on_left = false;
2902 gfc_conv_expr (&rse, expr2);
2903 gfc_add_block_to_block (&block, &rse.pre);
2907 gcc_assert (expr2->expr_type == EXPR_FUNCTION);
2908 switch (expr2->value.function.isym->id)
2928 e = expr2->value.function.actual->expr;
2929 gcc_assert (e->expr_type == EXPR_VARIABLE
2930 && e->symtree != NULL
2931 && e->symtree->n.sym == var);
2933 gfc_conv_expr (&rse, expr2->value.function.actual->next->expr);
2934 gfc_add_block_to_block (&block, &rse.pre);
2935 if (expr2->value.function.actual->next->next != NULL)
2937 tree accum = gfc_create_var (TREE_TYPE (rse.expr), NULL);
2938 gfc_actual_arglist *arg;
2940 gfc_add_modify (&block, accum, rse.expr);
2941 for (arg = expr2->value.function.actual->next->next; arg;
2944 gfc_init_block (&rse.pre);
2945 gfc_conv_expr (&rse, arg->expr);
2946 gfc_add_block_to_block (&block, &rse.pre);
2947 x = fold_build2_loc (input_location, op, TREE_TYPE (accum),
2949 gfc_add_modify (&block, accum, x);
2955 expr2 = expr2->value.function.actual->next->expr;
2958 lhsaddr = save_expr (lhsaddr);
2959 if (TREE_CODE (lhsaddr) != SAVE_EXPR
2960 && (TREE_CODE (lhsaddr) != ADDR_EXPR
2961 || TREE_CODE (TREE_OPERAND (lhsaddr, 0)) != VAR_DECL))
2963 /* Make sure LHS is simple enough so that goa_lhs_expr_p can recognize
2964 it even after unsharing function body. */
2965 tree var = create_tmp_var_raw (TREE_TYPE (lhsaddr));
2966 DECL_CONTEXT (var) = current_function_decl;
2967 lhsaddr = build4 (TARGET_EXPR, TREE_TYPE (lhsaddr), var, lhsaddr,
2968 NULL_TREE, NULL_TREE);
2971 rhs = gfc_evaluate_now (rse.expr, &block);
2973 if (((atomic_code->ext.omp_atomic & GFC_OMP_ATOMIC_MASK)
2974 == GFC_OMP_ATOMIC_WRITE)
2975 || (atomic_code->ext.omp_atomic & GFC_OMP_ATOMIC_SWAP))
2979 x = convert (TREE_TYPE (rhs),
2980 build_fold_indirect_ref_loc (input_location, lhsaddr));
2982 x = fold_build2_loc (input_location, op, TREE_TYPE (rhs), x, rhs);
2984 x = fold_build2_loc (input_location, op, TREE_TYPE (rhs), rhs, x);
2987 if (TREE_CODE (TREE_TYPE (rhs)) == COMPLEX_TYPE
2988 && TREE_CODE (type) != COMPLEX_TYPE)
2989 x = fold_build1_loc (input_location, REALPART_EXPR,
2990 TREE_TYPE (TREE_TYPE (rhs)), x);
2992 gfc_add_block_to_block (&block, &lse.pre);
2993 gfc_add_block_to_block (&block, &rse.pre);
2995 if (aop == OMP_ATOMIC)
2997 x = build2_v (OMP_ATOMIC, lhsaddr, convert (type, x));
2998 OMP_ATOMIC_SEQ_CST (x) = seq_cst;
2999 gfc_add_expr_to_block (&block, x);
3003 if (aop == OMP_ATOMIC_CAPTURE_NEW)
3006 expr2 = code->expr2;
3007 if (expr2->expr_type == EXPR_FUNCTION
3008 && expr2->value.function.isym->id == GFC_ISYM_CONVERSION)
3009 expr2 = expr2->value.function.actual->expr;
3011 gcc_assert (expr2->expr_type == EXPR_VARIABLE);
3012 gfc_conv_expr (&vse, code->expr1);
3013 gfc_add_block_to_block (&block, &vse.pre);
3015 gfc_init_se (&lse, NULL);
3016 gfc_conv_expr (&lse, expr2);
3017 gfc_add_block_to_block (&block, &lse.pre);
3019 x = build2 (aop, type, lhsaddr, convert (type, x));
3020 OMP_ATOMIC_SEQ_CST (x) = seq_cst;
3021 x = convert (TREE_TYPE (vse.expr), x);
3022 gfc_add_modify (&block, vse.expr, x);
3025 return gfc_finish_block (&block);
3029 gfc_trans_omp_barrier (void)
3031 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER);
3032 return build_call_expr_loc (input_location, decl, 0);
3036 gfc_trans_omp_cancel (gfc_code *code)
3039 tree ifc = boolean_true_node;
3041 switch (code->ext.omp_clauses->cancel)
3043 case OMP_CANCEL_PARALLEL: mask = 1; break;
3044 case OMP_CANCEL_DO: mask = 2; break;
3045 case OMP_CANCEL_SECTIONS: mask = 4; break;
3046 case OMP_CANCEL_TASKGROUP: mask = 8; break;
3047 default: gcc_unreachable ();
3049 gfc_start_block (&block);
3050 if (code->ext.omp_clauses->if_expr)
3055 gfc_init_se (&se, NULL);
3056 gfc_conv_expr (&se, code->ext.omp_clauses->if_expr);
3057 gfc_add_block_to_block (&block, &se.pre);
3058 if_var = gfc_evaluate_now (se.expr, &block);
3059 gfc_add_block_to_block (&block, &se.post);
3060 tree type = TREE_TYPE (if_var);
3061 ifc = fold_build2_loc (input_location, NE_EXPR,
3062 boolean_type_node, if_var,
3063 build_zero_cst (type));
3065 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
3066 tree c_bool_type = TREE_TYPE (TREE_TYPE (decl));
3067 ifc = fold_convert (c_bool_type, ifc);
3068 gfc_add_expr_to_block (&block,
3069 build_call_expr_loc (input_location, decl, 2,
3070 build_int_cst (integer_type_node,
3072 return gfc_finish_block (&block);
3076 gfc_trans_omp_cancellation_point (gfc_code *code)
3079 switch (code->ext.omp_clauses->cancel)
3081 case OMP_CANCEL_PARALLEL: mask = 1; break;
3082 case OMP_CANCEL_DO: mask = 2; break;
3083 case OMP_CANCEL_SECTIONS: mask = 4; break;
3084 case OMP_CANCEL_TASKGROUP: mask = 8; break;
3085 default: gcc_unreachable ();
3087 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_CANCELLATION_POINT);
3088 return build_call_expr_loc (input_location, decl, 1,
3089 build_int_cst (integer_type_node, mask));
3093 gfc_trans_omp_critical (gfc_code *code)
3095 tree name = NULL_TREE, stmt;
3096 if (code->ext.omp_name != NULL)
3097 name = get_identifier (code->ext.omp_name);
3098 stmt = gfc_trans_code (code->block->next);
3099 return build2_loc (input_location, OMP_CRITICAL, void_type_node, stmt, name);
3102 typedef struct dovar_init_d {
3109 gfc_trans_omp_do (gfc_code *code, gfc_exec_op op, stmtblock_t *pblock,
3110 gfc_omp_clauses *do_clauses, tree par_clauses)
3113 tree dovar, stmt, from, to, step, type, init, cond, incr;
3114 tree count = NULL_TREE, cycle_label, tmp, omp_clauses;
3117 gfc_omp_clauses *clauses = code->ext.omp_clauses;
3118 int i, collapse = clauses->collapse;
3119 vec<dovar_init> inits = vNULL;
3126 code = code->block->next;
3127 gcc_assert (code->op == EXEC_DO);
3129 init = make_tree_vec (collapse);
3130 cond = make_tree_vec (collapse);
3131 incr = make_tree_vec (collapse);
3135 gfc_start_block (&block);
3139 omp_clauses = gfc_trans_omp_clauses (pblock, do_clauses, code->loc);
3141 for (i = 0; i < collapse; i++)
3144 int dovar_found = 0;
3149 gfc_omp_namelist *n = NULL;
3150 if (op != EXEC_OMP_DISTRIBUTE)
3151 for (n = clauses->lists[(op == EXEC_OMP_SIMD && collapse == 1)
3152 ? OMP_LIST_LINEAR : OMP_LIST_LASTPRIVATE];
3153 n != NULL; n = n->next)
3154 if (code->ext.iterator->var->symtree->n.sym == n->sym)
3158 else if (n == NULL && op != EXEC_OMP_SIMD)
3159 for (n = clauses->lists[OMP_LIST_PRIVATE]; n != NULL; n = n->next)
3160 if (code->ext.iterator->var->symtree->n.sym == n->sym)
3166 /* Evaluate all the expressions in the iterator. */
3167 gfc_init_se (&se, NULL);
3168 gfc_conv_expr_lhs (&se, code->ext.iterator->var);
3169 gfc_add_block_to_block (pblock, &se.pre);
3171 type = TREE_TYPE (dovar);
3172 gcc_assert (TREE_CODE (type) == INTEGER_TYPE);
3174 gfc_init_se (&se, NULL);
3175 gfc_conv_expr_val (&se, code->ext.iterator->start);
3176 gfc_add_block_to_block (pblock, &se.pre);
3177 from = gfc_evaluate_now (se.expr, pblock);
3179 gfc_init_se (&se, NULL);
3180 gfc_conv_expr_val (&se, code->ext.iterator->end);
3181 gfc_add_block_to_block (pblock, &se.pre);
3182 to = gfc_evaluate_now (se.expr, pblock);
3184 gfc_init_se (&se, NULL);
3185 gfc_conv_expr_val (&se, code->ext.iterator->step);
3186 gfc_add_block_to_block (pblock, &se.pre);
3187 step = gfc_evaluate_now (se.expr, pblock);
3190 /* Special case simple loops. */
3191 if (TREE_CODE (dovar) == VAR_DECL)
3193 if (integer_onep (step))
3195 else if (tree_int_cst_equal (step, integer_minus_one_node))
3200 = gfc_trans_omp_variable (code->ext.iterator->var->symtree->n.sym,
3206 TREE_VEC_ELT (init, i) = build2_v (MODIFY_EXPR, dovar, from);
3207 /* The condition should not be folded. */
3208 TREE_VEC_ELT (cond, i) = build2_loc (input_location, simple > 0
3209 ? LE_EXPR : GE_EXPR,
3210 boolean_type_node, dovar, to);
3211 TREE_VEC_ELT (incr, i) = fold_build2_loc (input_location, PLUS_EXPR,
3213 TREE_VEC_ELT (incr, i) = fold_build2_loc (input_location,
3216 TREE_VEC_ELT (incr, i));
3220 /* STEP is not 1 or -1. Use:
3221 for (count = 0; count < (to + step - from) / step; count++)
3223 dovar = from + count * step;
3227 tmp = fold_build2_loc (input_location, MINUS_EXPR, type, step, from);
3228 tmp = fold_build2_loc (input_location, PLUS_EXPR, type, to, tmp);
3229 tmp = fold_build2_loc (input_location, TRUNC_DIV_EXPR, type, tmp,
3231 tmp = gfc_evaluate_now (tmp, pblock);
3232 count = gfc_create_var (type, "count");
3233 TREE_VEC_ELT (init, i) = build2_v (MODIFY_EXPR, count,
3234 build_int_cst (type, 0));
3235 /* The condition should not be folded. */
3236 TREE_VEC_ELT (cond, i) = build2_loc (input_location, LT_EXPR,
3239 TREE_VEC_ELT (incr, i) = fold_build2_loc (input_location, PLUS_EXPR,
3241 build_int_cst (type, 1));
3242 TREE_VEC_ELT (incr, i) = fold_build2_loc (input_location,
3243 MODIFY_EXPR, type, count,
3244 TREE_VEC_ELT (incr, i));
3246 /* Initialize DOVAR. */
3247 tmp = fold_build2_loc (input_location, MULT_EXPR, type, count, step);
3248 tmp = fold_build2_loc (input_location, PLUS_EXPR, type, from, tmp);
3249 dovar_init e = {dovar, tmp};
3250 inits.safe_push (e);
3253 if (dovar_found == 2
3254 && op == EXEC_OMP_SIMD
3258 for (tmp = omp_clauses; tmp; tmp = OMP_CLAUSE_CHAIN (tmp))
3259 if (OMP_CLAUSE_CODE (tmp) == OMP_CLAUSE_LINEAR
3260 && OMP_CLAUSE_DECL (tmp) == dovar)
3262 OMP_CLAUSE_LINEAR_NO_COPYIN (tmp) = 1;
3268 if (op == EXEC_OMP_SIMD)
3272 tmp = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
3273 OMP_CLAUSE_LINEAR_STEP (tmp) = step;
3274 OMP_CLAUSE_LINEAR_NO_COPYIN (tmp) = 1;
3277 tmp = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
3282 tmp = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
3283 OMP_CLAUSE_DECL (tmp) = dovar_decl;
3284 omp_clauses = gfc_trans_add_clause (tmp, omp_clauses);
3286 if (dovar_found == 2)
3293 /* If dovar is lastprivate, but different counter is used,
3294 dovar += step needs to be added to
3295 OMP_CLAUSE_LASTPRIVATE_STMT, otherwise the copied dovar
3296 will have the value on entry of the last loop, rather
3297 than value after iterator increment. */
3298 tmp = gfc_evaluate_now (step, pblock);
3299 tmp = fold_build2_loc (input_location, PLUS_EXPR, type, dovar,
3301 tmp = fold_build2_loc (input_location, MODIFY_EXPR, type,
3303 for (c = omp_clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3304 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
3305 && OMP_CLAUSE_DECL (c) == dovar_decl)
3307 OMP_CLAUSE_LASTPRIVATE_STMT (c) = tmp;
3310 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
3311 && OMP_CLAUSE_DECL (c) == dovar_decl)
3313 OMP_CLAUSE_LINEAR_STMT (c) = tmp;
3317 if (c == NULL && op == EXEC_OMP_DO && par_clauses != NULL)
3319 for (c = par_clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3320 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
3321 && OMP_CLAUSE_DECL (c) == dovar_decl)
3323 tree l = build_omp_clause (input_location,
3324 OMP_CLAUSE_LASTPRIVATE);
3325 OMP_CLAUSE_DECL (l) = dovar_decl;
3326 OMP_CLAUSE_CHAIN (l) = omp_clauses;
3327 OMP_CLAUSE_LASTPRIVATE_STMT (l) = tmp;
3329 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_SHARED);
3333 gcc_assert (simple || c != NULL);
3337 if (op != EXEC_OMP_SIMD)
3338 tmp = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
3339 else if (collapse == 1)
3341 tmp = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
3342 OMP_CLAUSE_LINEAR_STEP (tmp) = build_int_cst (type, 1);
3343 OMP_CLAUSE_LINEAR_NO_COPYIN (tmp) = 1;
3344 OMP_CLAUSE_LINEAR_NO_COPYOUT (tmp) = 1;
3347 tmp = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
3348 OMP_CLAUSE_DECL (tmp) = count;
3349 omp_clauses = gfc_trans_add_clause (tmp, omp_clauses);
3352 if (i + 1 < collapse)
3353 code = code->block->next;
3356 if (pblock != &block)
3359 gfc_start_block (&block);
3362 gfc_start_block (&body);
3364 FOR_EACH_VEC_ELT (inits, ix, di)
3365 gfc_add_modify (&body, di->var, di->init);
3368 /* Cycle statement is implemented with a goto. Exit statement must not be
3369 present for this loop. */
3370 cycle_label = gfc_build_label_decl (NULL_TREE);
3372 /* Put these labels where they can be found later. */
3374 code->cycle_label = cycle_label;
3375 code->exit_label = NULL_TREE;
3377 /* Main loop body. */
3378 tmp = gfc_trans_omp_code (code->block->next, true);
3379 gfc_add_expr_to_block (&body, tmp);
3381 /* Label for cycle statements (if needed). */
3382 if (TREE_USED (cycle_label))
3384 tmp = build1_v (LABEL_EXPR, cycle_label);
3385 gfc_add_expr_to_block (&body, tmp);
3388 /* End of loop body. */
3391 case EXEC_OMP_SIMD: stmt = make_node (OMP_SIMD); break;
3392 case EXEC_OMP_DO: stmt = make_node (OMP_FOR); break;
3393 case EXEC_OMP_DISTRIBUTE: stmt = make_node (OMP_DISTRIBUTE); break;
3394 case EXEC_OACC_LOOP: stmt = make_node (OACC_LOOP); break;
3395 default: gcc_unreachable ();
3398 TREE_TYPE (stmt) = void_type_node;
3399 OMP_FOR_BODY (stmt) = gfc_finish_block (&body);
3400 OMP_FOR_CLAUSES (stmt) = omp_clauses;
3401 OMP_FOR_INIT (stmt) = init;
3402 OMP_FOR_COND (stmt) = cond;
3403 OMP_FOR_INCR (stmt) = incr;
3404 gfc_add_expr_to_block (&block, stmt);
3406 return gfc_finish_block (&block);
3409 /* parallel loop and kernels loop. */
3411 gfc_trans_oacc_combined_directive (gfc_code *code)
3413 stmtblock_t block, *pblock = NULL;
3414 gfc_omp_clauses construct_clauses, loop_clauses;
3415 tree stmt, oacc_clauses = NULL_TREE;
3416 enum tree_code construct_code;
3420 case EXEC_OACC_PARALLEL_LOOP:
3421 construct_code = OACC_PARALLEL;
3423 case EXEC_OACC_KERNELS_LOOP:
3424 construct_code = OACC_KERNELS;
3430 gfc_start_block (&block);
3432 memset (&loop_clauses, 0, sizeof (loop_clauses));
3433 if (code->ext.omp_clauses != NULL)
3435 memcpy (&construct_clauses, code->ext.omp_clauses,
3436 sizeof (construct_clauses));
3437 loop_clauses.collapse = construct_clauses.collapse;
3438 loop_clauses.gang = construct_clauses.gang;
3439 loop_clauses.vector = construct_clauses.vector;
3440 loop_clauses.worker = construct_clauses.worker;
3441 loop_clauses.seq = construct_clauses.seq;
3442 loop_clauses.independent = construct_clauses.independent;
3443 construct_clauses.collapse = 0;
3444 construct_clauses.gang = false;
3445 construct_clauses.vector = false;
3446 construct_clauses.worker = false;
3447 construct_clauses.seq = false;
3448 construct_clauses.independent = false;
3449 oacc_clauses = gfc_trans_omp_clauses (&block, &construct_clauses,
3452 if (!loop_clauses.seq)
3456 stmt = gfc_trans_omp_do (code, EXEC_OACC_LOOP, pblock, &loop_clauses, NULL);
3457 if (TREE_CODE (stmt) != BIND_EXPR)
3458 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
3461 stmt = build2_loc (input_location, construct_code, void_type_node, stmt,
3463 if (code->op == EXEC_OACC_KERNELS_LOOP)
3464 OACC_KERNELS_COMBINED (stmt) = 1;
3466 OACC_PARALLEL_COMBINED (stmt) = 1;
3467 gfc_add_expr_to_block (&block, stmt);
3468 return gfc_finish_block (&block);
3472 gfc_trans_omp_flush (void)
3474 tree decl = builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE);
3475 return build_call_expr_loc (input_location, decl, 0);
3479 gfc_trans_omp_master (gfc_code *code)
3481 tree stmt = gfc_trans_code (code->block->next);
3482 if (IS_EMPTY_STMT (stmt))
3484 return build1_v (OMP_MASTER, stmt);
3488 gfc_trans_omp_ordered (gfc_code *code)
3490 return build1_v (OMP_ORDERED, gfc_trans_code (code->block->next));
3494 gfc_trans_omp_parallel (gfc_code *code)
3497 tree stmt, omp_clauses;
3499 gfc_start_block (&block);
3500 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
3502 stmt = gfc_trans_omp_code (code->block->next, true);
3503 stmt = build2_loc (input_location, OMP_PARALLEL, void_type_node, stmt,
3505 gfc_add_expr_to_block (&block, stmt);
3506 return gfc_finish_block (&block);
3513 GFC_OMP_SPLIT_PARALLEL,
3514 GFC_OMP_SPLIT_DISTRIBUTE,
3515 GFC_OMP_SPLIT_TEAMS,
3516 GFC_OMP_SPLIT_TARGET,
3522 GFC_OMP_MASK_SIMD = (1 << GFC_OMP_SPLIT_SIMD),
3523 GFC_OMP_MASK_DO = (1 << GFC_OMP_SPLIT_DO),
3524 GFC_OMP_MASK_PARALLEL = (1 << GFC_OMP_SPLIT_PARALLEL),
3525 GFC_OMP_MASK_DISTRIBUTE = (1 << GFC_OMP_SPLIT_DISTRIBUTE),
3526 GFC_OMP_MASK_TEAMS = (1 << GFC_OMP_SPLIT_TEAMS),
3527 GFC_OMP_MASK_TARGET = (1 << GFC_OMP_SPLIT_TARGET)
3531 gfc_split_omp_clauses (gfc_code *code,
3532 gfc_omp_clauses clausesa[GFC_OMP_SPLIT_NUM])
3534 int mask = 0, innermost = 0;
3535 memset (clausesa, 0, GFC_OMP_SPLIT_NUM * sizeof (gfc_omp_clauses));
3538 case EXEC_OMP_DISTRIBUTE:
3539 innermost = GFC_OMP_SPLIT_DISTRIBUTE;
3541 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO:
3542 mask = GFC_OMP_MASK_DISTRIBUTE | GFC_OMP_MASK_PARALLEL | GFC_OMP_MASK_DO;
3543 innermost = GFC_OMP_SPLIT_DO;
3545 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO_SIMD:
3546 mask = GFC_OMP_MASK_DISTRIBUTE | GFC_OMP_MASK_PARALLEL
3547 | GFC_OMP_MASK_DO | GFC_OMP_MASK_SIMD;
3548 innermost = GFC_OMP_SPLIT_SIMD;
3550 case EXEC_OMP_DISTRIBUTE_SIMD:
3551 mask = GFC_OMP_MASK_DISTRIBUTE | GFC_OMP_MASK_SIMD;
3552 innermost = GFC_OMP_SPLIT_SIMD;
3555 innermost = GFC_OMP_SPLIT_DO;
3557 case EXEC_OMP_DO_SIMD:
3558 mask = GFC_OMP_MASK_DO | GFC_OMP_MASK_SIMD;
3559 innermost = GFC_OMP_SPLIT_SIMD;
3561 case EXEC_OMP_PARALLEL:
3562 innermost = GFC_OMP_SPLIT_PARALLEL;
3564 case EXEC_OMP_PARALLEL_DO:
3565 mask = GFC_OMP_MASK_PARALLEL | GFC_OMP_MASK_DO;
3566 innermost = GFC_OMP_SPLIT_DO;
3568 case EXEC_OMP_PARALLEL_DO_SIMD:
3569 mask = GFC_OMP_MASK_PARALLEL | GFC_OMP_MASK_DO | GFC_OMP_MASK_SIMD;
3570 innermost = GFC_OMP_SPLIT_SIMD;
3573 innermost = GFC_OMP_SPLIT_SIMD;
3575 case EXEC_OMP_TARGET:
3576 innermost = GFC_OMP_SPLIT_TARGET;
3578 case EXEC_OMP_TARGET_TEAMS:
3579 mask = GFC_OMP_MASK_TARGET | GFC_OMP_MASK_TEAMS;
3580 innermost = GFC_OMP_SPLIT_TEAMS;
3582 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE:
3583 mask = GFC_OMP_MASK_TARGET | GFC_OMP_MASK_TEAMS
3584 | GFC_OMP_MASK_DISTRIBUTE;
3585 innermost = GFC_OMP_SPLIT_DISTRIBUTE;
3587 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO:
3588 mask = GFC_OMP_MASK_TARGET | GFC_OMP_MASK_TEAMS | GFC_OMP_MASK_DISTRIBUTE
3589 | GFC_OMP_MASK_PARALLEL | GFC_OMP_MASK_DO;
3590 innermost = GFC_OMP_SPLIT_DO;
3592 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD:
3593 mask = GFC_OMP_MASK_TARGET | GFC_OMP_MASK_TEAMS | GFC_OMP_MASK_DISTRIBUTE
3594 | GFC_OMP_MASK_PARALLEL | GFC_OMP_MASK_DO | GFC_OMP_MASK_SIMD;
3595 innermost = GFC_OMP_SPLIT_SIMD;
3597 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_SIMD:
3598 mask = GFC_OMP_MASK_TARGET | GFC_OMP_MASK_TEAMS
3599 | GFC_OMP_MASK_DISTRIBUTE | GFC_OMP_MASK_SIMD;
3600 innermost = GFC_OMP_SPLIT_SIMD;
3602 case EXEC_OMP_TEAMS:
3603 innermost = GFC_OMP_SPLIT_TEAMS;
3605 case EXEC_OMP_TEAMS_DISTRIBUTE:
3606 mask = GFC_OMP_MASK_TEAMS | GFC_OMP_MASK_DISTRIBUTE;
3607 innermost = GFC_OMP_SPLIT_DISTRIBUTE;
3609 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO:
3610 mask = GFC_OMP_MASK_TEAMS | GFC_OMP_MASK_DISTRIBUTE
3611 | GFC_OMP_MASK_PARALLEL | GFC_OMP_MASK_DO;
3612 innermost = GFC_OMP_SPLIT_DO;
3614 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD:
3615 mask = GFC_OMP_MASK_TEAMS | GFC_OMP_MASK_DISTRIBUTE
3616 | GFC_OMP_MASK_PARALLEL | GFC_OMP_MASK_DO | GFC_OMP_MASK_SIMD;
3617 innermost = GFC_OMP_SPLIT_SIMD;
3619 case EXEC_OMP_TEAMS_DISTRIBUTE_SIMD:
3620 mask = GFC_OMP_MASK_TEAMS | GFC_OMP_MASK_DISTRIBUTE | GFC_OMP_MASK_SIMD;
3621 innermost = GFC_OMP_SPLIT_SIMD;
3628 clausesa[innermost] = *code->ext.omp_clauses;
3631 if (code->ext.omp_clauses != NULL)
3633 if (mask & GFC_OMP_MASK_TARGET)
3635 /* First the clauses that are unique to some constructs. */
3636 clausesa[GFC_OMP_SPLIT_TARGET].lists[OMP_LIST_MAP]
3637 = code->ext.omp_clauses->lists[OMP_LIST_MAP];
3638 clausesa[GFC_OMP_SPLIT_TARGET].device
3639 = code->ext.omp_clauses->device;
3641 if (mask & GFC_OMP_MASK_TEAMS)
3643 /* First the clauses that are unique to some constructs. */
3644 clausesa[GFC_OMP_SPLIT_TEAMS].num_teams
3645 = code->ext.omp_clauses->num_teams;
3646 clausesa[GFC_OMP_SPLIT_TEAMS].thread_limit
3647 = code->ext.omp_clauses->thread_limit;
3648 /* Shared and default clauses are allowed on parallel and teams. */
3649 clausesa[GFC_OMP_SPLIT_TEAMS].lists[OMP_LIST_SHARED]
3650 = code->ext.omp_clauses->lists[OMP_LIST_SHARED];
3651 clausesa[GFC_OMP_SPLIT_TEAMS].default_sharing
3652 = code->ext.omp_clauses->default_sharing;
3654 if (mask & GFC_OMP_MASK_DISTRIBUTE)
3656 /* First the clauses that are unique to some constructs. */
3657 clausesa[GFC_OMP_SPLIT_DISTRIBUTE].dist_sched_kind
3658 = code->ext.omp_clauses->dist_sched_kind;
3659 clausesa[GFC_OMP_SPLIT_DISTRIBUTE].dist_chunk_size
3660 = code->ext.omp_clauses->dist_chunk_size;
3661 /* Duplicate collapse. */
3662 clausesa[GFC_OMP_SPLIT_DISTRIBUTE].collapse
3663 = code->ext.omp_clauses->collapse;
3665 if (mask & GFC_OMP_MASK_PARALLEL)
3667 /* First the clauses that are unique to some constructs. */
3668 clausesa[GFC_OMP_SPLIT_PARALLEL].lists[OMP_LIST_COPYIN]
3669 = code->ext.omp_clauses->lists[OMP_LIST_COPYIN];
3670 clausesa[GFC_OMP_SPLIT_PARALLEL].num_threads
3671 = code->ext.omp_clauses->num_threads;
3672 clausesa[GFC_OMP_SPLIT_PARALLEL].proc_bind
3673 = code->ext.omp_clauses->proc_bind;
3674 /* Shared and default clauses are allowed on parallel and teams. */
3675 clausesa[GFC_OMP_SPLIT_PARALLEL].lists[OMP_LIST_SHARED]
3676 = code->ext.omp_clauses->lists[OMP_LIST_SHARED];
3677 clausesa[GFC_OMP_SPLIT_PARALLEL].default_sharing
3678 = code->ext.omp_clauses->default_sharing;
3680 if (mask & GFC_OMP_MASK_DO)
3682 /* First the clauses that are unique to some constructs. */
3683 clausesa[GFC_OMP_SPLIT_DO].ordered
3684 = code->ext.omp_clauses->ordered;
3685 clausesa[GFC_OMP_SPLIT_DO].sched_kind
3686 = code->ext.omp_clauses->sched_kind;
3687 clausesa[GFC_OMP_SPLIT_DO].chunk_size
3688 = code->ext.omp_clauses->chunk_size;
3689 clausesa[GFC_OMP_SPLIT_DO].nowait
3690 = code->ext.omp_clauses->nowait;
3691 /* Duplicate collapse. */
3692 clausesa[GFC_OMP_SPLIT_DO].collapse
3693 = code->ext.omp_clauses->collapse;
3695 if (mask & GFC_OMP_MASK_SIMD)
3697 clausesa[GFC_OMP_SPLIT_SIMD].safelen_expr
3698 = code->ext.omp_clauses->safelen_expr;
3699 clausesa[GFC_OMP_SPLIT_SIMD].lists[OMP_LIST_LINEAR]
3700 = code->ext.omp_clauses->lists[OMP_LIST_LINEAR];
3701 clausesa[GFC_OMP_SPLIT_SIMD].lists[OMP_LIST_ALIGNED]
3702 = code->ext.omp_clauses->lists[OMP_LIST_ALIGNED];
3703 /* Duplicate collapse. */
3704 clausesa[GFC_OMP_SPLIT_SIMD].collapse
3705 = code->ext.omp_clauses->collapse;
3707 /* Private clause is supported on all constructs but target,
3708 it is enough to put it on the innermost one. For
3709 !$ omp do put it on parallel though,
3710 as that's what we did for OpenMP 3.1. */
3711 clausesa[innermost == GFC_OMP_SPLIT_DO
3712 ? (int) GFC_OMP_SPLIT_PARALLEL
3713 : innermost].lists[OMP_LIST_PRIVATE]
3714 = code->ext.omp_clauses->lists[OMP_LIST_PRIVATE];
3715 /* Firstprivate clause is supported on all constructs but
3716 target and simd. Put it on the outermost of those and
3717 duplicate on parallel. */
3718 if (mask & GFC_OMP_MASK_TEAMS)
3719 clausesa[GFC_OMP_SPLIT_TEAMS].lists[OMP_LIST_FIRSTPRIVATE]
3720 = code->ext.omp_clauses->lists[OMP_LIST_FIRSTPRIVATE];
3721 else if (mask & GFC_OMP_MASK_DISTRIBUTE)
3722 clausesa[GFC_OMP_SPLIT_DISTRIBUTE].lists[OMP_LIST_FIRSTPRIVATE]
3723 = code->ext.omp_clauses->lists[OMP_LIST_FIRSTPRIVATE];
3724 if (mask & GFC_OMP_MASK_PARALLEL)
3725 clausesa[GFC_OMP_SPLIT_PARALLEL].lists[OMP_LIST_FIRSTPRIVATE]
3726 = code->ext.omp_clauses->lists[OMP_LIST_FIRSTPRIVATE];
3727 else if (mask & GFC_OMP_MASK_DO)
3728 clausesa[GFC_OMP_SPLIT_DO].lists[OMP_LIST_FIRSTPRIVATE]
3729 = code->ext.omp_clauses->lists[OMP_LIST_FIRSTPRIVATE];
3730 /* Lastprivate is allowed on do and simd. In
3731 parallel do{, simd} we actually want to put it on
3732 parallel rather than do. */
3733 if (mask & GFC_OMP_MASK_PARALLEL)
3734 clausesa[GFC_OMP_SPLIT_PARALLEL].lists[OMP_LIST_LASTPRIVATE]
3735 = code->ext.omp_clauses->lists[OMP_LIST_LASTPRIVATE];
3736 else if (mask & GFC_OMP_MASK_DO)
3737 clausesa[GFC_OMP_SPLIT_DO].lists[OMP_LIST_LASTPRIVATE]
3738 = code->ext.omp_clauses->lists[OMP_LIST_LASTPRIVATE];
3739 if (mask & GFC_OMP_MASK_SIMD)
3740 clausesa[GFC_OMP_SPLIT_SIMD].lists[OMP_LIST_LASTPRIVATE]
3741 = code->ext.omp_clauses->lists[OMP_LIST_LASTPRIVATE];
3742 /* Reduction is allowed on simd, do, parallel and teams.
3743 Duplicate it on all of them, but omit on do if
3744 parallel is present. */
3745 if (mask & GFC_OMP_MASK_TEAMS)
3746 clausesa[GFC_OMP_SPLIT_TEAMS].lists[OMP_LIST_REDUCTION]
3747 = code->ext.omp_clauses->lists[OMP_LIST_REDUCTION];
3748 if (mask & GFC_OMP_MASK_PARALLEL)
3749 clausesa[GFC_OMP_SPLIT_PARALLEL].lists[OMP_LIST_REDUCTION]
3750 = code->ext.omp_clauses->lists[OMP_LIST_REDUCTION];
3751 else if (mask & GFC_OMP_MASK_DO)
3752 clausesa[GFC_OMP_SPLIT_DO].lists[OMP_LIST_REDUCTION]
3753 = code->ext.omp_clauses->lists[OMP_LIST_REDUCTION];
3754 if (mask & GFC_OMP_MASK_SIMD)
3755 clausesa[GFC_OMP_SPLIT_SIMD].lists[OMP_LIST_REDUCTION]
3756 = code->ext.omp_clauses->lists[OMP_LIST_REDUCTION];
3757 /* FIXME: This is currently being discussed. */
3758 if (mask & GFC_OMP_MASK_PARALLEL)
3759 clausesa[GFC_OMP_SPLIT_PARALLEL].if_expr
3760 = code->ext.omp_clauses->if_expr;
3762 clausesa[GFC_OMP_SPLIT_TARGET].if_expr
3763 = code->ext.omp_clauses->if_expr;
3765 if ((mask & (GFC_OMP_MASK_PARALLEL | GFC_OMP_MASK_DO))
3766 == (GFC_OMP_MASK_PARALLEL | GFC_OMP_MASK_DO))
3767 clausesa[GFC_OMP_SPLIT_DO].nowait = true;
3771 gfc_trans_omp_do_simd (gfc_code *code, stmtblock_t *pblock,
3772 gfc_omp_clauses *clausesa, tree omp_clauses)
3775 gfc_omp_clauses clausesa_buf[GFC_OMP_SPLIT_NUM];
3776 tree stmt, body, omp_do_clauses = NULL_TREE;
3779 gfc_start_block (&block);
3781 gfc_init_block (&block);
3783 if (clausesa == NULL)
3785 clausesa = clausesa_buf;
3786 gfc_split_omp_clauses (code, clausesa);
3790 = gfc_trans_omp_clauses (&block, &clausesa[GFC_OMP_SPLIT_DO], code->loc);
3791 body = gfc_trans_omp_do (code, EXEC_OMP_SIMD, pblock ? pblock : &block,
3792 &clausesa[GFC_OMP_SPLIT_SIMD], omp_clauses);
3795 if (TREE_CODE (body) != BIND_EXPR)
3796 body = build3_v (BIND_EXPR, NULL, body, poplevel (1, 0));
3800 else if (TREE_CODE (body) != BIND_EXPR)
3801 body = build3_v (BIND_EXPR, NULL, body, NULL_TREE);
3804 stmt = make_node (OMP_FOR);
3805 TREE_TYPE (stmt) = void_type_node;
3806 OMP_FOR_BODY (stmt) = body;
3807 OMP_FOR_CLAUSES (stmt) = omp_do_clauses;
3811 gfc_add_expr_to_block (&block, stmt);
3812 return gfc_finish_block (&block);
3816 gfc_trans_omp_parallel_do (gfc_code *code, stmtblock_t *pblock,
3817 gfc_omp_clauses *clausesa)
3819 stmtblock_t block, *new_pblock = pblock;
3820 gfc_omp_clauses clausesa_buf[GFC_OMP_SPLIT_NUM];
3821 tree stmt, omp_clauses = NULL_TREE;
3824 gfc_start_block (&block);
3826 gfc_init_block (&block);
3828 if (clausesa == NULL)
3830 clausesa = clausesa_buf;
3831 gfc_split_omp_clauses (code, clausesa);
3834 = gfc_trans_omp_clauses (&block, &clausesa[GFC_OMP_SPLIT_PARALLEL],
3838 if (!clausesa[GFC_OMP_SPLIT_DO].ordered
3839 && clausesa[GFC_OMP_SPLIT_DO].sched_kind != OMP_SCHED_STATIC)
3840 new_pblock = █
3844 stmt = gfc_trans_omp_do (code, EXEC_OMP_DO, new_pblock,
3845 &clausesa[GFC_OMP_SPLIT_DO], omp_clauses);
3848 if (TREE_CODE (stmt) != BIND_EXPR)
3849 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
3853 else if (TREE_CODE (stmt) != BIND_EXPR)
3854 stmt = build3_v (BIND_EXPR, NULL, stmt, NULL_TREE);
3855 stmt = build2_loc (input_location, OMP_PARALLEL, void_type_node, stmt,
3857 OMP_PARALLEL_COMBINED (stmt) = 1;
3858 gfc_add_expr_to_block (&block, stmt);
3859 return gfc_finish_block (&block);
3863 gfc_trans_omp_parallel_do_simd (gfc_code *code, stmtblock_t *pblock,
3864 gfc_omp_clauses *clausesa)
3867 gfc_omp_clauses clausesa_buf[GFC_OMP_SPLIT_NUM];
3868 tree stmt, omp_clauses = NULL_TREE;
3871 gfc_start_block (&block);
3873 gfc_init_block (&block);
3875 if (clausesa == NULL)
3877 clausesa = clausesa_buf;
3878 gfc_split_omp_clauses (code, clausesa);
3882 = gfc_trans_omp_clauses (&block, &clausesa[GFC_OMP_SPLIT_PARALLEL],
3886 stmt = gfc_trans_omp_do_simd (code, pblock, clausesa, omp_clauses);
3889 if (TREE_CODE (stmt) != BIND_EXPR)
3890 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
3894 else if (TREE_CODE (stmt) != BIND_EXPR)
3895 stmt = build3_v (BIND_EXPR, NULL, stmt, NULL_TREE);
3898 stmt = build2_loc (input_location, OMP_PARALLEL, void_type_node, stmt,
3900 OMP_PARALLEL_COMBINED (stmt) = 1;
3902 gfc_add_expr_to_block (&block, stmt);
3903 return gfc_finish_block (&block);
3907 gfc_trans_omp_parallel_sections (gfc_code *code)
3910 gfc_omp_clauses section_clauses;
3911 tree stmt, omp_clauses;
3913 memset (§ion_clauses, 0, sizeof (section_clauses));
3914 section_clauses.nowait = true;
3916 gfc_start_block (&block);
3917 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
3920 stmt = gfc_trans_omp_sections (code, §ion_clauses);
3921 if (TREE_CODE (stmt) != BIND_EXPR)
3922 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
3925 stmt = build2_loc (input_location, OMP_PARALLEL, void_type_node, stmt,
3927 OMP_PARALLEL_COMBINED (stmt) = 1;
3928 gfc_add_expr_to_block (&block, stmt);
3929 return gfc_finish_block (&block);
3933 gfc_trans_omp_parallel_workshare (gfc_code *code)
3936 gfc_omp_clauses workshare_clauses;
3937 tree stmt, omp_clauses;
3939 memset (&workshare_clauses, 0, sizeof (workshare_clauses));
3940 workshare_clauses.nowait = true;
3942 gfc_start_block (&block);
3943 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
3946 stmt = gfc_trans_omp_workshare (code, &workshare_clauses);
3947 if (TREE_CODE (stmt) != BIND_EXPR)
3948 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
3951 stmt = build2_loc (input_location, OMP_PARALLEL, void_type_node, stmt,
3953 OMP_PARALLEL_COMBINED (stmt) = 1;
3954 gfc_add_expr_to_block (&block, stmt);
3955 return gfc_finish_block (&block);
3959 gfc_trans_omp_sections (gfc_code *code, gfc_omp_clauses *clauses)
3961 stmtblock_t block, body;
3962 tree omp_clauses, stmt;
3963 bool has_lastprivate = clauses->lists[OMP_LIST_LASTPRIVATE] != NULL;
3965 gfc_start_block (&block);
3967 omp_clauses = gfc_trans_omp_clauses (&block, clauses, code->loc);
3969 gfc_init_block (&body);
3970 for (code = code->block; code; code = code->block)
3972 /* Last section is special because of lastprivate, so even if it
3973 is empty, chain it in. */
3974 stmt = gfc_trans_omp_code (code->next,
3975 has_lastprivate && code->block == NULL);
3976 if (! IS_EMPTY_STMT (stmt))
3978 stmt = build1_v (OMP_SECTION, stmt);
3979 gfc_add_expr_to_block (&body, stmt);
3982 stmt = gfc_finish_block (&body);
3984 stmt = build2_loc (input_location, OMP_SECTIONS, void_type_node, stmt,
3986 gfc_add_expr_to_block (&block, stmt);
3988 return gfc_finish_block (&block);
3992 gfc_trans_omp_single (gfc_code *code, gfc_omp_clauses *clauses)
3994 tree omp_clauses = gfc_trans_omp_clauses (NULL, clauses, code->loc);
3995 tree stmt = gfc_trans_omp_code (code->block->next, true);
3996 stmt = build2_loc (input_location, OMP_SINGLE, void_type_node, stmt,
4002 gfc_trans_omp_task (gfc_code *code)
4005 tree stmt, omp_clauses;
4007 gfc_start_block (&block);
4008 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
4010 stmt = gfc_trans_omp_code (code->block->next, true);
4011 stmt = build2_loc (input_location, OMP_TASK, void_type_node, stmt,
4013 gfc_add_expr_to_block (&block, stmt);
4014 return gfc_finish_block (&block);
4018 gfc_trans_omp_taskgroup (gfc_code *code)
4020 tree stmt = gfc_trans_code (code->block->next);
4021 return build1_loc (input_location, OMP_TASKGROUP, void_type_node, stmt);
4025 gfc_trans_omp_taskwait (void)
4027 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TASKWAIT);
4028 return build_call_expr_loc (input_location, decl, 0);
4032 gfc_trans_omp_taskyield (void)
4034 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TASKYIELD);
4035 return build_call_expr_loc (input_location, decl, 0);
4039 gfc_trans_omp_distribute (gfc_code *code, gfc_omp_clauses *clausesa)
4042 gfc_omp_clauses clausesa_buf[GFC_OMP_SPLIT_NUM];
4043 tree stmt, omp_clauses = NULL_TREE;
4045 gfc_start_block (&block);
4046 if (clausesa == NULL)
4048 clausesa = clausesa_buf;
4049 gfc_split_omp_clauses (code, clausesa);
4053 = gfc_trans_omp_clauses (&block, &clausesa[GFC_OMP_SPLIT_DISTRIBUTE],
4057 case EXEC_OMP_DISTRIBUTE:
4058 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE:
4059 case EXEC_OMP_TEAMS_DISTRIBUTE:
4060 /* This is handled in gfc_trans_omp_do. */
4063 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO:
4064 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO:
4065 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO:
4066 stmt = gfc_trans_omp_parallel_do (code, &block, clausesa);
4067 if (TREE_CODE (stmt) != BIND_EXPR)
4068 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
4072 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO_SIMD:
4073 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD:
4074 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD:
4075 stmt = gfc_trans_omp_parallel_do_simd (code, &block, clausesa);
4076 if (TREE_CODE (stmt) != BIND_EXPR)
4077 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
4081 case EXEC_OMP_DISTRIBUTE_SIMD:
4082 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_SIMD:
4083 case EXEC_OMP_TEAMS_DISTRIBUTE_SIMD:
4084 stmt = gfc_trans_omp_do (code, EXEC_OMP_SIMD, &block,
4085 &clausesa[GFC_OMP_SPLIT_SIMD], NULL_TREE);
4086 if (TREE_CODE (stmt) != BIND_EXPR)
4087 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
4096 tree distribute = make_node (OMP_DISTRIBUTE);
4097 TREE_TYPE (distribute) = void_type_node;
4098 OMP_FOR_BODY (distribute) = stmt;
4099 OMP_FOR_CLAUSES (distribute) = omp_clauses;
4102 gfc_add_expr_to_block (&block, stmt);
4103 return gfc_finish_block (&block);
4107 gfc_trans_omp_teams (gfc_code *code, gfc_omp_clauses *clausesa)
4110 gfc_omp_clauses clausesa_buf[GFC_OMP_SPLIT_NUM];
4111 tree stmt, omp_clauses = NULL_TREE;
4112 bool combined = true;
4114 gfc_start_block (&block);
4115 if (clausesa == NULL)
4117 clausesa = clausesa_buf;
4118 gfc_split_omp_clauses (code, clausesa);
4122 = gfc_trans_omp_clauses (&block, &clausesa[GFC_OMP_SPLIT_TEAMS],
4126 case EXEC_OMP_TARGET_TEAMS:
4127 case EXEC_OMP_TEAMS:
4128 stmt = gfc_trans_omp_code (code->block->next, true);
4131 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE:
4132 case EXEC_OMP_TEAMS_DISTRIBUTE:
4133 stmt = gfc_trans_omp_do (code, EXEC_OMP_DISTRIBUTE, NULL,
4134 &clausesa[GFC_OMP_SPLIT_DISTRIBUTE],
4138 stmt = gfc_trans_omp_distribute (code, clausesa);
4141 stmt = build2_loc (input_location, OMP_TEAMS, void_type_node, stmt,
4144 OMP_TEAMS_COMBINED (stmt) = 1;
4145 gfc_add_expr_to_block (&block, stmt);
4146 return gfc_finish_block (&block);
4150 gfc_trans_omp_target (gfc_code *code)
4153 gfc_omp_clauses clausesa[GFC_OMP_SPLIT_NUM];
4154 tree stmt, omp_clauses = NULL_TREE;
4156 gfc_start_block (&block);
4157 gfc_split_omp_clauses (code, clausesa);
4160 = gfc_trans_omp_clauses (&block, &clausesa[GFC_OMP_SPLIT_TARGET],
4162 if (code->op == EXEC_OMP_TARGET)
4163 stmt = gfc_trans_omp_code (code->block->next, true);
4167 stmt = gfc_trans_omp_teams (code, clausesa);
4168 if (TREE_CODE (stmt) != BIND_EXPR)
4169 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
4174 stmt = build2_loc (input_location, OMP_TARGET, void_type_node, stmt,
4176 gfc_add_expr_to_block (&block, stmt);
4177 return gfc_finish_block (&block);
4181 gfc_trans_omp_target_data (gfc_code *code)
4184 tree stmt, omp_clauses;
4186 gfc_start_block (&block);
4187 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
4189 stmt = gfc_trans_omp_code (code->block->next, true);
4190 stmt = build2_loc (input_location, OMP_TARGET_DATA, void_type_node, stmt,
4192 gfc_add_expr_to_block (&block, stmt);
4193 return gfc_finish_block (&block);
4197 gfc_trans_omp_target_update (gfc_code *code)
4200 tree stmt, omp_clauses;
4202 gfc_start_block (&block);
4203 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
4205 stmt = build1_loc (input_location, OMP_TARGET_UPDATE, void_type_node,
4207 gfc_add_expr_to_block (&block, stmt);
4208 return gfc_finish_block (&block);
4212 gfc_trans_omp_workshare (gfc_code *code, gfc_omp_clauses *clauses)
4214 tree res, tmp, stmt;
4215 stmtblock_t block, *pblock = NULL;
4216 stmtblock_t singleblock;
4217 int saved_ompws_flags;
4218 bool singleblock_in_progress = false;
4219 /* True if previous gfc_code in workshare construct is not workshared. */
4220 bool prev_singleunit;
4222 code = code->block->next;
4226 gfc_start_block (&block);
4229 ompws_flags = OMPWS_WORKSHARE_FLAG;
4230 prev_singleunit = false;
4232 /* Translate statements one by one to trees until we reach
4233 the end of the workshare construct. Adjacent gfc_codes that
4234 are a single unit of work are clustered and encapsulated in a
4235 single OMP_SINGLE construct. */
4236 for (; code; code = code->next)
4238 if (code->here != 0)
4240 res = gfc_trans_label_here (code);
4241 gfc_add_expr_to_block (pblock, res);
4244 /* No dependence analysis, use for clauses with wait.
4245 If this is the last gfc_code, use default omp_clauses. */
4246 if (code->next == NULL && clauses->nowait)
4247 ompws_flags |= OMPWS_NOWAIT;
4249 /* By default, every gfc_code is a single unit of work. */
4250 ompws_flags |= OMPWS_CURR_SINGLEUNIT;
4251 ompws_flags &= ~OMPWS_SCALARIZER_WS;
4260 res = gfc_trans_assign (code);
4263 case EXEC_POINTER_ASSIGN:
4264 res = gfc_trans_pointer_assign (code);
4267 case EXEC_INIT_ASSIGN:
4268 res = gfc_trans_init_assign (code);
4272 res = gfc_trans_forall (code);
4276 res = gfc_trans_where (code);
4279 case EXEC_OMP_ATOMIC:
4280 res = gfc_trans_omp_directive (code);
4283 case EXEC_OMP_PARALLEL:
4284 case EXEC_OMP_PARALLEL_DO:
4285 case EXEC_OMP_PARALLEL_SECTIONS:
4286 case EXEC_OMP_PARALLEL_WORKSHARE:
4287 case EXEC_OMP_CRITICAL:
4288 saved_ompws_flags = ompws_flags;
4290 res = gfc_trans_omp_directive (code);
4291 ompws_flags = saved_ompws_flags;
4295 gfc_internal_error ("gfc_trans_omp_workshare(): Bad statement code");
4298 gfc_set_backend_locus (&code->loc);
4300 if (res != NULL_TREE && ! IS_EMPTY_STMT (res))
4302 if (prev_singleunit)
4304 if (ompws_flags & OMPWS_CURR_SINGLEUNIT)
4305 /* Add current gfc_code to single block. */
4306 gfc_add_expr_to_block (&singleblock, res);
4309 /* Finish single block and add it to pblock. */
4310 tmp = gfc_finish_block (&singleblock);
4311 tmp = build2_loc (input_location, OMP_SINGLE,
4312 void_type_node, tmp, NULL_TREE);
4313 gfc_add_expr_to_block (pblock, tmp);
4314 /* Add current gfc_code to pblock. */
4315 gfc_add_expr_to_block (pblock, res);
4316 singleblock_in_progress = false;
4321 if (ompws_flags & OMPWS_CURR_SINGLEUNIT)
4323 /* Start single block. */
4324 gfc_init_block (&singleblock);
4325 gfc_add_expr_to_block (&singleblock, res);
4326 singleblock_in_progress = true;
4329 /* Add the new statement to the block. */
4330 gfc_add_expr_to_block (pblock, res);
4332 prev_singleunit = (ompws_flags & OMPWS_CURR_SINGLEUNIT) != 0;
4336 /* Finish remaining SINGLE block, if we were in the middle of one. */
4337 if (singleblock_in_progress)
4339 /* Finish single block and add it to pblock. */
4340 tmp = gfc_finish_block (&singleblock);
4341 tmp = build2_loc (input_location, OMP_SINGLE, void_type_node, tmp,
4343 ? build_omp_clause (input_location, OMP_CLAUSE_NOWAIT)
4345 gfc_add_expr_to_block (pblock, tmp);
4348 stmt = gfc_finish_block (pblock);
4349 if (TREE_CODE (stmt) != BIND_EXPR)
4351 if (!IS_EMPTY_STMT (stmt))
4353 tree bindblock = poplevel (1, 0);
4354 stmt = build3_v (BIND_EXPR, NULL, stmt, bindblock);
4362 if (IS_EMPTY_STMT (stmt) && !clauses->nowait)
4363 stmt = gfc_trans_omp_barrier ();
4370 gfc_trans_oacc_declare (stmtblock_t *block, gfc_namespace *ns)
4373 oacc_clauses = gfc_trans_omp_clauses (block, ns->oacc_declare_clauses,
4374 ns->oacc_declare_clauses->loc);
4375 return build1_loc (ns->oacc_declare_clauses->loc.lb->location,
4376 OACC_DECLARE, void_type_node, oacc_clauses);
4380 gfc_trans_oacc_directive (gfc_code *code)
4384 case EXEC_OACC_PARALLEL_LOOP:
4385 case EXEC_OACC_KERNELS_LOOP:
4386 return gfc_trans_oacc_combined_directive (code);
4387 case EXEC_OACC_PARALLEL:
4388 case EXEC_OACC_KERNELS:
4389 case EXEC_OACC_DATA:
4390 case EXEC_OACC_HOST_DATA:
4391 return gfc_trans_oacc_construct (code);
4392 case EXEC_OACC_LOOP:
4393 return gfc_trans_omp_do (code, code->op, NULL, code->ext.omp_clauses,
4395 case EXEC_OACC_UPDATE:
4396 case EXEC_OACC_CACHE:
4397 case EXEC_OACC_ENTER_DATA:
4398 case EXEC_OACC_EXIT_DATA:
4399 return gfc_trans_oacc_executable_directive (code);
4400 case EXEC_OACC_WAIT:
4401 return gfc_trans_oacc_wait_directive (code);
4408 gfc_trans_omp_directive (gfc_code *code)
4412 case EXEC_OMP_ATOMIC:
4413 return gfc_trans_omp_atomic (code);
4414 case EXEC_OMP_BARRIER:
4415 return gfc_trans_omp_barrier ();
4416 case EXEC_OMP_CANCEL:
4417 return gfc_trans_omp_cancel (code);
4418 case EXEC_OMP_CANCELLATION_POINT:
4419 return gfc_trans_omp_cancellation_point (code);
4420 case EXEC_OMP_CRITICAL:
4421 return gfc_trans_omp_critical (code);
4422 case EXEC_OMP_DISTRIBUTE:
4425 return gfc_trans_omp_do (code, code->op, NULL, code->ext.omp_clauses,
4427 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO:
4428 case EXEC_OMP_DISTRIBUTE_PARALLEL_DO_SIMD:
4429 case EXEC_OMP_DISTRIBUTE_SIMD:
4430 return gfc_trans_omp_distribute (code, NULL);
4431 case EXEC_OMP_DO_SIMD:
4432 return gfc_trans_omp_do_simd (code, NULL, NULL, NULL_TREE);
4433 case EXEC_OMP_FLUSH:
4434 return gfc_trans_omp_flush ();
4435 case EXEC_OMP_MASTER:
4436 return gfc_trans_omp_master (code);
4437 case EXEC_OMP_ORDERED:
4438 return gfc_trans_omp_ordered (code);
4439 case EXEC_OMP_PARALLEL:
4440 return gfc_trans_omp_parallel (code);
4441 case EXEC_OMP_PARALLEL_DO:
4442 return gfc_trans_omp_parallel_do (code, NULL, NULL);
4443 case EXEC_OMP_PARALLEL_DO_SIMD:
4444 return gfc_trans_omp_parallel_do_simd (code, NULL, NULL);
4445 case EXEC_OMP_PARALLEL_SECTIONS:
4446 return gfc_trans_omp_parallel_sections (code);
4447 case EXEC_OMP_PARALLEL_WORKSHARE:
4448 return gfc_trans_omp_parallel_workshare (code);
4449 case EXEC_OMP_SECTIONS:
4450 return gfc_trans_omp_sections (code, code->ext.omp_clauses);
4451 case EXEC_OMP_SINGLE:
4452 return gfc_trans_omp_single (code, code->ext.omp_clauses);
4453 case EXEC_OMP_TARGET:
4454 case EXEC_OMP_TARGET_TEAMS:
4455 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE:
4456 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO:
4457 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD:
4458 case EXEC_OMP_TARGET_TEAMS_DISTRIBUTE_SIMD:
4459 return gfc_trans_omp_target (code);
4460 case EXEC_OMP_TARGET_DATA:
4461 return gfc_trans_omp_target_data (code);
4462 case EXEC_OMP_TARGET_UPDATE:
4463 return gfc_trans_omp_target_update (code);
4465 return gfc_trans_omp_task (code);
4466 case EXEC_OMP_TASKGROUP:
4467 return gfc_trans_omp_taskgroup (code);
4468 case EXEC_OMP_TASKWAIT:
4469 return gfc_trans_omp_taskwait ();
4470 case EXEC_OMP_TASKYIELD:
4471 return gfc_trans_omp_taskyield ();
4472 case EXEC_OMP_TEAMS:
4473 case EXEC_OMP_TEAMS_DISTRIBUTE:
4474 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO:
4475 case EXEC_OMP_TEAMS_DISTRIBUTE_PARALLEL_DO_SIMD:
4476 case EXEC_OMP_TEAMS_DISTRIBUTE_SIMD:
4477 return gfc_trans_omp_teams (code, NULL);
4478 case EXEC_OMP_WORKSHARE:
4479 return gfc_trans_omp_workshare (code, code->ext.omp_clauses);
4486 gfc_trans_omp_declare_simd (gfc_namespace *ns)
4491 gfc_omp_declare_simd *ods;
4492 for (ods = ns->omp_declare_simd; ods; ods = ods->next)
4494 tree c = gfc_trans_omp_clauses (NULL, ods->clauses, ods->where, true);
4495 tree fndecl = ns->proc_name->backend_decl;
4497 c = tree_cons (NULL_TREE, c, NULL_TREE);
4498 c = build_tree_list (get_identifier ("omp declare simd"), c);
4499 TREE_CHAIN (c) = DECL_ATTRIBUTES (fndecl);
4500 DECL_ATTRIBUTES (fndecl) = c;