1 /* OpenMP directive translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
26 #include "gimple-expr.h"
27 #include "gimplify.h" /* For create_tmp_var_raw. */
28 #include "stringpool.h"
29 #include "diagnostic-core.h" /* For internal_error. */
32 #include "trans-stmt.h"
33 #include "trans-types.h"
34 #include "trans-array.h"
35 #include "trans-const.h"
41 /* True if OpenMP should privatize what this DECL points to rather
42 than the DECL itself. */
45 gfc_omp_privatize_by_reference (const_tree decl)
47 tree type = TREE_TYPE (decl);
49 if (TREE_CODE (type) == REFERENCE_TYPE
50 && (!DECL_ARTIFICIAL (decl) || TREE_CODE (decl) == PARM_DECL))
53 if (TREE_CODE (type) == POINTER_TYPE)
55 /* Array POINTER/ALLOCATABLE have aggregate types, all user variables
56 that have POINTER_TYPE type and don't have GFC_POINTER_TYPE_P
57 set are supposed to be privatized by reference. */
58 if (GFC_POINTER_TYPE_P (type))
61 if (!DECL_ARTIFICIAL (decl)
62 && TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE)
65 /* Some arrays are expanded as DECL_ARTIFICIAL pointers
67 if (DECL_LANG_SPECIFIC (decl)
68 && GFC_DECL_SAVED_DESCRIPTOR (decl))
75 /* True if OpenMP sharing attribute of DECL is predetermined. */
77 enum omp_clause_default_kind
78 gfc_omp_predetermined_sharing (tree decl)
80 if (DECL_ARTIFICIAL (decl)
81 && ! GFC_DECL_RESULT (decl)
82 && ! (DECL_LANG_SPECIFIC (decl)
83 && GFC_DECL_SAVED_DESCRIPTOR (decl)))
84 return OMP_CLAUSE_DEFAULT_SHARED;
86 /* Cray pointees shouldn't be listed in any clauses and should be
87 gimplified to dereference of the corresponding Cray pointer.
88 Make them all private, so that they are emitted in the debug
90 if (GFC_DECL_CRAY_POINTEE (decl))
91 return OMP_CLAUSE_DEFAULT_PRIVATE;
93 /* Assumed-size arrays are predetermined shared. */
94 if (TREE_CODE (decl) == PARM_DECL
95 && GFC_ARRAY_TYPE_P (TREE_TYPE (decl))
96 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl)) == GFC_ARRAY_UNKNOWN
97 && GFC_TYPE_ARRAY_UBOUND (TREE_TYPE (decl),
98 GFC_TYPE_ARRAY_RANK (TREE_TYPE (decl)) - 1)
100 return OMP_CLAUSE_DEFAULT_SHARED;
102 /* Dummy procedures aren't considered variables by OpenMP, thus are
103 disallowed in OpenMP clauses. They are represented as PARM_DECLs
104 in the middle-end, so return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE here
105 to avoid complaining about their uses with default(none). */
106 if (TREE_CODE (decl) == PARM_DECL
107 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
108 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == FUNCTION_TYPE)
109 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
111 /* COMMON and EQUIVALENCE decls are shared. They
112 are only referenced through DECL_VALUE_EXPR of the variables
113 contained in them. If those are privatized, they will not be
114 gimplified to the COMMON or EQUIVALENCE decls. */
115 if (GFC_DECL_COMMON_OR_EQUIV (decl) && ! DECL_HAS_VALUE_EXPR_P (decl))
116 return OMP_CLAUSE_DEFAULT_SHARED;
118 if (GFC_DECL_RESULT (decl) && ! DECL_HAS_VALUE_EXPR_P (decl))
119 return OMP_CLAUSE_DEFAULT_SHARED;
121 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
124 /* Return decl that should be used when reporting DEFAULT(NONE)
128 gfc_omp_report_decl (tree decl)
130 if (DECL_ARTIFICIAL (decl)
131 && DECL_LANG_SPECIFIC (decl)
132 && GFC_DECL_SAVED_DESCRIPTOR (decl))
133 return GFC_DECL_SAVED_DESCRIPTOR (decl);
138 /* Return true if DECL in private clause needs
139 OMP_CLAUSE_PRIVATE_OUTER_REF on the private clause. */
141 gfc_omp_private_outer_ref (tree decl)
143 tree type = TREE_TYPE (decl);
145 if (GFC_DESCRIPTOR_TYPE_P (type)
146 && GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ALLOCATABLE)
152 /* Return code to initialize DECL with its default constructor, or
153 NULL if there's nothing to do. */
156 gfc_omp_clause_default_ctor (tree clause, tree decl, tree outer)
158 tree type = TREE_TYPE (decl), rank, size, esize, ptr, cond, then_b, else_b;
159 stmtblock_t block, cond_block;
161 if (! GFC_DESCRIPTOR_TYPE_P (type)
162 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
165 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_REDUCTION)
168 gcc_assert (outer != NULL);
169 gcc_assert (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_PRIVATE
170 || OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_LASTPRIVATE);
172 /* Allocatable arrays in PRIVATE clauses need to be set to
173 "not currently allocated" allocation status if outer
174 array is "not currently allocated", otherwise should be allocated. */
175 gfc_start_block (&block);
177 gfc_init_block (&cond_block);
179 gfc_add_modify (&cond_block, decl, outer);
180 rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
181 size = gfc_conv_descriptor_ubound_get (decl, rank);
182 size = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
183 size, gfc_conv_descriptor_lbound_get (decl, rank));
184 size = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
185 size, gfc_index_one_node);
186 if (GFC_TYPE_ARRAY_RANK (type) > 1)
187 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
188 size, gfc_conv_descriptor_stride_get (decl, rank));
189 esize = fold_convert (gfc_array_index_type,
190 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
191 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
193 size = gfc_evaluate_now (fold_convert (size_type_node, size), &cond_block);
195 ptr = gfc_create_var (pvoid_type_node, NULL);
196 gfc_allocate_using_malloc (&cond_block, ptr, size, NULL_TREE);
197 gfc_conv_descriptor_data_set (&cond_block, decl, ptr);
199 then_b = gfc_finish_block (&cond_block);
201 gfc_init_block (&cond_block);
202 gfc_conv_descriptor_data_set (&cond_block, decl, null_pointer_node);
203 else_b = gfc_finish_block (&cond_block);
205 cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
206 fold_convert (pvoid_type_node,
207 gfc_conv_descriptor_data_get (outer)),
209 gfc_add_expr_to_block (&block, build3_loc (input_location, COND_EXPR,
210 void_type_node, cond, then_b, else_b));
212 return gfc_finish_block (&block);
215 /* Build and return code for a copy constructor from SRC to DEST. */
218 gfc_omp_clause_copy_ctor (tree clause, tree dest, tree src)
220 tree type = TREE_TYPE (dest), ptr, size, esize, rank, call;
221 tree cond, then_b, else_b;
222 stmtblock_t block, cond_block;
224 if (! GFC_DESCRIPTOR_TYPE_P (type)
225 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
226 return build2_v (MODIFY_EXPR, dest, src);
228 gcc_assert (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_FIRSTPRIVATE);
230 /* Allocatable arrays in FIRSTPRIVATE clauses need to be allocated
231 and copied from SRC. */
232 gfc_start_block (&block);
234 gfc_init_block (&cond_block);
236 gfc_add_modify (&cond_block, dest, src);
237 rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
238 size = gfc_conv_descriptor_ubound_get (dest, rank);
239 size = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
240 size, gfc_conv_descriptor_lbound_get (dest, rank));
241 size = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
242 size, gfc_index_one_node);
243 if (GFC_TYPE_ARRAY_RANK (type) > 1)
244 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
245 size, gfc_conv_descriptor_stride_get (dest, rank));
246 esize = fold_convert (gfc_array_index_type,
247 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
248 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
250 size = gfc_evaluate_now (fold_convert (size_type_node, size), &cond_block);
252 ptr = gfc_create_var (pvoid_type_node, NULL);
253 gfc_allocate_using_malloc (&cond_block, ptr, size, NULL_TREE);
254 gfc_conv_descriptor_data_set (&cond_block, dest, ptr);
256 call = build_call_expr_loc (input_location,
257 builtin_decl_explicit (BUILT_IN_MEMCPY),
259 fold_convert (pvoid_type_node,
260 gfc_conv_descriptor_data_get (src)),
262 gfc_add_expr_to_block (&cond_block, fold_convert (void_type_node, call));
263 then_b = gfc_finish_block (&cond_block);
265 gfc_init_block (&cond_block);
266 gfc_conv_descriptor_data_set (&cond_block, dest, null_pointer_node);
267 else_b = gfc_finish_block (&cond_block);
269 cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
270 fold_convert (pvoid_type_node,
271 gfc_conv_descriptor_data_get (src)),
273 gfc_add_expr_to_block (&block, build3_loc (input_location, COND_EXPR,
274 void_type_node, cond, then_b, else_b));
276 return gfc_finish_block (&block);
279 /* Similarly, except use an assignment operator instead. */
282 gfc_omp_clause_assign_op (tree clause ATTRIBUTE_UNUSED, tree dest, tree src)
284 tree type = TREE_TYPE (dest), rank, size, esize, call;
287 if (! GFC_DESCRIPTOR_TYPE_P (type)
288 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
289 return build2_v (MODIFY_EXPR, dest, src);
291 /* Handle copying allocatable arrays. */
292 gfc_start_block (&block);
294 rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
295 size = gfc_conv_descriptor_ubound_get (dest, rank);
296 size = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
297 size, gfc_conv_descriptor_lbound_get (dest, rank));
298 size = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
299 size, gfc_index_one_node);
300 if (GFC_TYPE_ARRAY_RANK (type) > 1)
301 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
302 size, gfc_conv_descriptor_stride_get (dest, rank));
303 esize = fold_convert (gfc_array_index_type,
304 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
305 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
307 size = gfc_evaluate_now (fold_convert (size_type_node, size), &block);
308 call = build_call_expr_loc (input_location,
309 builtin_decl_explicit (BUILT_IN_MEMCPY), 3,
310 fold_convert (pvoid_type_node,
311 gfc_conv_descriptor_data_get (dest)),
312 fold_convert (pvoid_type_node,
313 gfc_conv_descriptor_data_get (src)),
315 gfc_add_expr_to_block (&block, fold_convert (void_type_node, call));
317 return gfc_finish_block (&block);
320 /* Build and return code destructing DECL. Return NULL if nothing
324 gfc_omp_clause_dtor (tree clause ATTRIBUTE_UNUSED, tree decl)
326 tree type = TREE_TYPE (decl);
328 if (! GFC_DESCRIPTOR_TYPE_P (type)
329 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
332 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_REDUCTION)
335 /* Allocatable arrays in FIRSTPRIVATE/LASTPRIVATE etc. clauses need
336 to be deallocated if they were allocated. */
337 return gfc_trans_dealloc_allocated (decl, false, NULL);
341 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
342 disregarded in OpenMP construct, because it is going to be
343 remapped during OpenMP lowering. SHARED is true if DECL
344 is going to be shared, false if it is going to be privatized. */
347 gfc_omp_disregard_value_expr (tree decl, bool shared)
349 if (GFC_DECL_COMMON_OR_EQUIV (decl)
350 && DECL_HAS_VALUE_EXPR_P (decl))
352 tree value = DECL_VALUE_EXPR (decl);
354 if (TREE_CODE (value) == COMPONENT_REF
355 && TREE_CODE (TREE_OPERAND (value, 0)) == VAR_DECL
356 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value, 0)))
358 /* If variable in COMMON or EQUIVALENCE is privatized, return
359 true, as just that variable is supposed to be privatized,
360 not the whole COMMON or whole EQUIVALENCE.
361 For shared variables in COMMON or EQUIVALENCE, let them be
362 gimplified to DECL_VALUE_EXPR, so that for multiple shared vars
363 from the same COMMON or EQUIVALENCE just one sharing of the
364 whole COMMON or EQUIVALENCE is enough. */
369 if (GFC_DECL_RESULT (decl) && DECL_HAS_VALUE_EXPR_P (decl))
375 /* Return true if DECL that is shared iff SHARED is true should
376 be put into OMP_CLAUSE_PRIVATE with OMP_CLAUSE_PRIVATE_DEBUG
380 gfc_omp_private_debug_clause (tree decl, bool shared)
382 if (GFC_DECL_CRAY_POINTEE (decl))
385 if (GFC_DECL_COMMON_OR_EQUIV (decl)
386 && DECL_HAS_VALUE_EXPR_P (decl))
388 tree value = DECL_VALUE_EXPR (decl);
390 if (TREE_CODE (value) == COMPONENT_REF
391 && TREE_CODE (TREE_OPERAND (value, 0)) == VAR_DECL
392 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value, 0)))
399 /* Register language specific type size variables as potentially OpenMP
400 firstprivate variables. */
403 gfc_omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
405 if (GFC_ARRAY_TYPE_P (type) || GFC_DESCRIPTOR_TYPE_P (type))
409 gcc_assert (TYPE_LANG_SPECIFIC (type) != NULL);
410 for (r = 0; r < GFC_TYPE_ARRAY_RANK (type); r++)
412 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_LBOUND (type, r));
413 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_UBOUND (type, r));
414 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_STRIDE (type, r));
416 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_SIZE (type));
417 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_OFFSET (type));
423 gfc_trans_add_clause (tree node, tree tail)
425 OMP_CLAUSE_CHAIN (node) = tail;
430 gfc_trans_omp_variable (gfc_symbol *sym, bool declare_simd)
435 gfc_symbol *proc_sym;
436 gfc_formal_arglist *f;
438 gcc_assert (sym->attr.dummy);
439 proc_sym = sym->ns->proc_name;
440 if (proc_sym->attr.entry_master)
442 if (gfc_return_by_reference (proc_sym))
445 if (proc_sym->ts.type == BT_CHARACTER)
448 for (f = gfc_sym_get_dummy_args (proc_sym); f; f = f->next)
454 return build_int_cst (integer_type_node, cnt);
457 tree t = gfc_get_symbol_decl (sym);
461 bool alternate_entry;
464 return_value = sym->attr.function && sym->result == sym;
465 alternate_entry = sym->attr.function && sym->attr.entry
466 && sym->result == sym;
467 entry_master = sym->attr.result
468 && sym->ns->proc_name->attr.entry_master
469 && !gfc_return_by_reference (sym->ns->proc_name);
470 parent_decl = current_function_decl
471 ? DECL_CONTEXT (current_function_decl) : NULL_TREE;
473 if ((t == parent_decl && return_value)
474 || (sym->ns && sym->ns->proc_name
475 && sym->ns->proc_name->backend_decl == parent_decl
476 && (alternate_entry || entry_master)))
481 /* Special case for assigning the return value of a function.
482 Self recursive functions must have an explicit return value. */
483 if (return_value && (t == current_function_decl || parent_flag))
484 t = gfc_get_fake_result_decl (sym, parent_flag);
486 /* Similarly for alternate entry points. */
487 else if (alternate_entry
488 && (sym->ns->proc_name->backend_decl == current_function_decl
491 gfc_entry_list *el = NULL;
493 for (el = sym->ns->entries; el; el = el->next)
496 t = gfc_get_fake_result_decl (sym, parent_flag);
501 else if (entry_master
502 && (sym->ns->proc_name->backend_decl == current_function_decl
504 t = gfc_get_fake_result_decl (sym, parent_flag);
510 gfc_trans_omp_variable_list (enum omp_clause_code code,
511 gfc_omp_namelist *namelist, tree list,
514 for (; namelist != NULL; namelist = namelist->next)
515 if (namelist->sym->attr.referenced || declare_simd)
517 tree t = gfc_trans_omp_variable (namelist->sym, declare_simd);
518 if (t != error_mark_node)
520 tree node = build_omp_clause (input_location, code);
521 OMP_CLAUSE_DECL (node) = t;
522 list = gfc_trans_add_clause (node, list);
528 struct omp_udr_find_orig_data
530 gfc_omp_udr *omp_udr;
535 omp_udr_find_orig (gfc_expr **e, int *walk_subtrees ATTRIBUTE_UNUSED,
538 struct omp_udr_find_orig_data *cd = (struct omp_udr_find_orig_data *) data;
539 if ((*e)->expr_type == EXPR_VARIABLE
540 && (*e)->symtree->n.sym == cd->omp_udr->omp_orig)
541 cd->omp_orig_seen = true;
547 gfc_trans_omp_udr_expr (gfc_omp_namelist *n, bool is_initializer,
548 gfc_expr *syme, gfc_expr *outere)
550 gfc_se symse, outerse;
551 gfc_ss *symss, *outerss;
553 stmtblock_t block, body;
556 gfc_namespace *ns = (is_initializer
557 ? n->udr->initializer_ns : n->udr->combiner_ns);
559 syme = gfc_copy_expr (syme);
560 outere = gfc_copy_expr (outere);
561 gfc_init_se (&symse, NULL);
562 gfc_init_se (&outerse, NULL);
563 gfc_start_block (&block);
564 gfc_init_loopinfo (&loop);
565 symss = gfc_walk_expr (syme);
566 outerss = gfc_walk_expr (outere);
567 gfc_add_ss_to_loop (&loop, symss);
568 gfc_add_ss_to_loop (&loop, outerss);
569 gfc_conv_ss_startstride (&loop);
570 /* Enable loop reversal. */
571 for (i = 0; i < GFC_MAX_DIMENSIONS; i++)
572 loop.reverse[i] = GFC_ENABLE_REVERSE;
573 gfc_conv_loop_setup (&loop, &ns->code->loc);
574 gfc_copy_loopinfo_to_se (&symse, &loop);
575 gfc_copy_loopinfo_to_se (&outerse, &loop);
577 outerse.ss = outerss;
578 gfc_mark_ss_chain_used (symss, 1);
579 gfc_mark_ss_chain_used (outerss, 1);
580 gfc_start_scalarized_body (&loop, &body);
581 gfc_conv_expr (&symse, syme);
582 gfc_conv_expr (&outerse, outere);
586 n->udr->omp_priv->backend_decl = symse.expr;
587 n->udr->omp_orig->backend_decl = outerse.expr;
591 n->udr->omp_out->backend_decl = outerse.expr;
592 n->udr->omp_in->backend_decl = symse.expr;
595 if (ns->code->op == EXEC_ASSIGN)
596 tem = gfc_trans_assignment (ns->code->expr1, ns->code->expr2,
599 tem = gfc_trans_call (ns->code, false, NULL_TREE, NULL_TREE, false);
600 gfc_add_expr_to_block (&body, tem);
602 gcc_assert (symse.ss == gfc_ss_terminator
603 && outerse.ss == gfc_ss_terminator);
604 /* Generate the copying loops. */
605 gfc_trans_scalarizing_loops (&loop, &body);
607 /* Wrap the whole thing up. */
608 gfc_add_block_to_block (&block, &loop.pre);
609 gfc_add_block_to_block (&block, &loop.post);
611 gfc_cleanup_loop (&loop);
612 gfc_free_expr (syme);
613 gfc_free_expr (outere);
615 return gfc_finish_block (&block);
619 gfc_trans_omp_array_reduction_or_udr (tree c, gfc_omp_namelist *n, locus where)
621 gfc_symbol *sym = n->sym;
622 gfc_symtree *root1 = NULL, *root2 = NULL, *root3 = NULL, *root4 = NULL;
623 gfc_symtree *symtree1, *symtree2, *symtree3, *symtree4 = NULL;
624 gfc_symbol init_val_sym, outer_sym, intrinsic_sym;
625 gfc_symbol omp_var_copy[4];
626 gfc_expr *e1, *e2, *e3, *e4;
628 tree decl, backend_decl, stmt, type, outer_decl;
629 locus old_loc = gfc_current_locus;
633 decl = OMP_CLAUSE_DECL (c);
634 gfc_current_locus = where;
635 type = TREE_TYPE (decl);
636 outer_decl = create_tmp_var_raw (type, NULL);
637 if (TREE_CODE (decl) == PARM_DECL
638 && TREE_CODE (type) == REFERENCE_TYPE
639 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (type))
640 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (type)) == GFC_ARRAY_ALLOCATABLE)
642 decl = build_fold_indirect_ref (decl);
643 type = TREE_TYPE (type);
646 /* Create a fake symbol for init value. */
647 memset (&init_val_sym, 0, sizeof (init_val_sym));
648 init_val_sym.ns = sym->ns;
649 init_val_sym.name = sym->name;
650 init_val_sym.ts = sym->ts;
651 init_val_sym.attr.referenced = 1;
652 init_val_sym.declared_at = where;
653 init_val_sym.attr.flavor = FL_VARIABLE;
654 if (OMP_CLAUSE_REDUCTION_CODE (c) != ERROR_MARK)
655 backend_decl = omp_reduction_init (c, gfc_sym_type (&init_val_sym));
656 else if (n->udr->initializer_ns)
659 switch (sym->ts.type)
665 backend_decl = build_zero_cst (gfc_sym_type (&init_val_sym));
668 backend_decl = NULL_TREE;
671 init_val_sym.backend_decl = backend_decl;
673 /* Create a fake symbol for the outer array reference. */
676 outer_sym.as = gfc_copy_array_spec (sym->as);
677 outer_sym.attr.dummy = 0;
678 outer_sym.attr.result = 0;
679 outer_sym.attr.flavor = FL_VARIABLE;
680 outer_sym.backend_decl = outer_decl;
681 if (decl != OMP_CLAUSE_DECL (c))
682 outer_sym.backend_decl = build_fold_indirect_ref (outer_decl);
684 /* Create fake symtrees for it. */
685 symtree1 = gfc_new_symtree (&root1, sym->name);
686 symtree1->n.sym = sym;
687 gcc_assert (symtree1 == root1);
689 symtree2 = gfc_new_symtree (&root2, sym->name);
690 symtree2->n.sym = &init_val_sym;
691 gcc_assert (symtree2 == root2);
693 symtree3 = gfc_new_symtree (&root3, sym->name);
694 symtree3->n.sym = &outer_sym;
695 gcc_assert (symtree3 == root3);
697 memset (omp_var_copy, 0, sizeof omp_var_copy);
700 omp_var_copy[0] = *n->udr->omp_out;
701 omp_var_copy[1] = *n->udr->omp_in;
702 if (sym->attr.dimension)
704 n->udr->omp_out->ts = sym->ts;
705 n->udr->omp_in->ts = sym->ts;
709 *n->udr->omp_out = outer_sym;
710 *n->udr->omp_in = *sym;
712 if (n->udr->initializer_ns)
714 omp_var_copy[2] = *n->udr->omp_priv;
715 omp_var_copy[3] = *n->udr->omp_orig;
716 if (sym->attr.dimension)
718 n->udr->omp_priv->ts = sym->ts;
719 n->udr->omp_orig->ts = sym->ts;
723 *n->udr->omp_priv = *sym;
724 *n->udr->omp_orig = outer_sym;
729 /* Create expressions. */
730 e1 = gfc_get_expr ();
731 e1->expr_type = EXPR_VARIABLE;
733 e1->symtree = symtree1;
735 if (sym->attr.dimension)
737 e1->ref = ref = gfc_get_ref ();
738 ref->type = REF_ARRAY;
739 ref->u.ar.where = where;
740 ref->u.ar.as = sym->as;
741 ref->u.ar.type = AR_FULL;
744 t = gfc_resolve_expr (e1);
748 if (backend_decl != NULL_TREE)
750 e2 = gfc_get_expr ();
751 e2->expr_type = EXPR_VARIABLE;
753 e2->symtree = symtree2;
755 t = gfc_resolve_expr (e2);
758 else if (n->udr->initializer_ns == NULL)
760 gcc_assert (sym->ts.type == BT_DERIVED);
761 e2 = gfc_default_initializer (&sym->ts);
763 t = gfc_resolve_expr (e2);
766 else if (n->udr->initializer_ns->code->op == EXEC_ASSIGN)
768 if (!sym->attr.dimension)
770 e2 = gfc_copy_expr (n->udr->initializer_ns->code->expr2);
771 t = gfc_resolve_expr (e2);
775 if (n->udr && n->udr->initializer_ns)
777 struct omp_udr_find_orig_data cd;
779 cd.omp_orig_seen = false;
780 gfc_code_walker (&n->udr->initializer_ns->code,
781 gfc_dummy_code_callback, omp_udr_find_orig, &cd);
782 if (cd.omp_orig_seen)
783 OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c) = 1;
786 e3 = gfc_copy_expr (e1);
787 e3->symtree = symtree3;
788 t = gfc_resolve_expr (e3);
793 switch (OMP_CLAUSE_REDUCTION_CODE (c))
797 e4 = gfc_add (e3, e1);
800 e4 = gfc_multiply (e3, e1);
802 case TRUTH_ANDIF_EXPR:
803 e4 = gfc_and (e3, e1);
805 case TRUTH_ORIF_EXPR:
806 e4 = gfc_or (e3, e1);
809 e4 = gfc_eqv (e3, e1);
812 e4 = gfc_neqv (e3, e1);
830 if (n->udr->combiner_ns->code->op == EXEC_ASSIGN)
832 if (!sym->attr.dimension)
835 e3 = gfc_copy_expr (n->udr->combiner_ns->code->expr1);
836 e4 = gfc_copy_expr (n->udr->combiner_ns->code->expr2);
837 t = gfc_resolve_expr (e3);
839 t = gfc_resolve_expr (e4);
849 memset (&intrinsic_sym, 0, sizeof (intrinsic_sym));
850 intrinsic_sym.ns = sym->ns;
851 intrinsic_sym.name = iname;
852 intrinsic_sym.ts = sym->ts;
853 intrinsic_sym.attr.referenced = 1;
854 intrinsic_sym.attr.intrinsic = 1;
855 intrinsic_sym.attr.function = 1;
856 intrinsic_sym.result = &intrinsic_sym;
857 intrinsic_sym.declared_at = where;
859 symtree4 = gfc_new_symtree (&root4, iname);
860 symtree4->n.sym = &intrinsic_sym;
861 gcc_assert (symtree4 == root4);
863 e4 = gfc_get_expr ();
864 e4->expr_type = EXPR_FUNCTION;
866 e4->symtree = symtree4;
867 e4->value.function.isym = gfc_find_function (iname);
868 e4->value.function.actual = gfc_get_actual_arglist ();
869 e4->value.function.actual->expr = e3;
870 e4->value.function.actual->next = gfc_get_actual_arglist ();
871 e4->value.function.actual->next->expr = e1;
873 if (OMP_CLAUSE_REDUCTION_CODE (c) != ERROR_MARK)
875 /* e1 and e3 have been stored as arguments of e4, avoid sharing. */
876 e1 = gfc_copy_expr (e1);
877 e3 = gfc_copy_expr (e3);
878 t = gfc_resolve_expr (e4);
882 /* Create the init statement list. */
884 if (sym->attr.dimension
885 && GFC_DESCRIPTOR_TYPE_P (type)
886 && GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ALLOCATABLE)
888 /* If decl is an allocatable array, it needs to be allocated
889 with the same bounds as the outer var. */
890 tree rank, size, esize, ptr;
893 gfc_start_block (&block);
895 gfc_add_modify (&block, decl, outer_sym.backend_decl);
896 rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
897 size = gfc_conv_descriptor_ubound_get (decl, rank);
898 size = fold_build2_loc (input_location, MINUS_EXPR,
899 gfc_array_index_type, size,
900 gfc_conv_descriptor_lbound_get (decl, rank));
901 size = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
902 size, gfc_index_one_node);
903 if (GFC_TYPE_ARRAY_RANK (type) > 1)
904 size = fold_build2_loc (input_location, MULT_EXPR,
905 gfc_array_index_type, size,
906 gfc_conv_descriptor_stride_get (decl, rank));
907 esize = fold_convert (gfc_array_index_type,
908 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
909 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
911 size = gfc_evaluate_now (fold_convert (size_type_node, size), &block);
913 ptr = gfc_create_var (pvoid_type_node, NULL);
914 gfc_allocate_using_malloc (&block, ptr, size, NULL_TREE);
915 gfc_conv_descriptor_data_set (&block, decl, ptr);
918 stmt = gfc_trans_assignment (e1, e2, false, false);
920 stmt = gfc_trans_omp_udr_expr (n, true, e1, e3);
921 gfc_add_expr_to_block (&block, stmt);
922 stmt = gfc_finish_block (&block);
925 stmt = gfc_trans_assignment (e1, e2, false, false);
926 else if (sym->attr.dimension)
927 stmt = gfc_trans_omp_udr_expr (n, true, e1, e3);
929 stmt = gfc_trans_call (n->udr->initializer_ns->code, false,
930 NULL_TREE, NULL_TREE, false);
931 if (TREE_CODE (stmt) != BIND_EXPR)
932 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
935 OMP_CLAUSE_REDUCTION_INIT (c) = stmt;
937 /* Create the merge statement list. */
939 if (sym->attr.dimension
940 && GFC_DESCRIPTOR_TYPE_P (type)
941 && GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ALLOCATABLE)
943 /* If decl is an allocatable array, it needs to be deallocated
947 gfc_start_block (&block);
949 stmt = gfc_trans_assignment (e3, e4, false, true);
951 stmt = gfc_trans_omp_udr_expr (n, false, e1, e3);
952 gfc_add_expr_to_block (&block, stmt);
953 gfc_add_expr_to_block (&block, gfc_trans_dealloc_allocated (decl, false,
955 stmt = gfc_finish_block (&block);
958 stmt = gfc_trans_assignment (e3, e4, false, true);
959 else if (sym->attr.dimension)
960 stmt = gfc_trans_omp_udr_expr (n, false, e1, e3);
962 stmt = gfc_trans_call (n->udr->combiner_ns->code, false,
963 NULL_TREE, NULL_TREE, false);
964 if (TREE_CODE (stmt) != BIND_EXPR)
965 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
968 OMP_CLAUSE_REDUCTION_MERGE (c) = stmt;
970 /* And stick the placeholder VAR_DECL into the clause as well. */
971 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = outer_decl;
973 gfc_current_locus = old_loc;
986 gfc_free_array_spec (outer_sym.as);
990 *n->udr->omp_out = omp_var_copy[0];
991 *n->udr->omp_in = omp_var_copy[1];
992 if (n->udr->initializer_ns)
994 *n->udr->omp_priv = omp_var_copy[2];
995 *n->udr->omp_orig = omp_var_copy[3];
1001 gfc_trans_omp_reduction_list (gfc_omp_namelist *namelist, tree list,
1004 for (; namelist != NULL; namelist = namelist->next)
1005 if (namelist->sym->attr.referenced)
1007 tree t = gfc_trans_omp_variable (namelist->sym, false);
1008 if (t != error_mark_node)
1010 tree node = build_omp_clause (where.lb->location,
1011 OMP_CLAUSE_REDUCTION);
1012 OMP_CLAUSE_DECL (node) = t;
1013 switch (namelist->rop)
1015 case OMP_REDUCTION_PLUS:
1016 OMP_CLAUSE_REDUCTION_CODE (node) = PLUS_EXPR;
1018 case OMP_REDUCTION_MINUS:
1019 OMP_CLAUSE_REDUCTION_CODE (node) = MINUS_EXPR;
1021 case OMP_REDUCTION_TIMES:
1022 OMP_CLAUSE_REDUCTION_CODE (node) = MULT_EXPR;
1024 case OMP_REDUCTION_AND:
1025 OMP_CLAUSE_REDUCTION_CODE (node) = TRUTH_ANDIF_EXPR;
1027 case OMP_REDUCTION_OR:
1028 OMP_CLAUSE_REDUCTION_CODE (node) = TRUTH_ORIF_EXPR;
1030 case OMP_REDUCTION_EQV:
1031 OMP_CLAUSE_REDUCTION_CODE (node) = EQ_EXPR;
1033 case OMP_REDUCTION_NEQV:
1034 OMP_CLAUSE_REDUCTION_CODE (node) = NE_EXPR;
1036 case OMP_REDUCTION_MAX:
1037 OMP_CLAUSE_REDUCTION_CODE (node) = MAX_EXPR;
1039 case OMP_REDUCTION_MIN:
1040 OMP_CLAUSE_REDUCTION_CODE (node) = MIN_EXPR;
1042 case OMP_REDUCTION_IAND:
1043 OMP_CLAUSE_REDUCTION_CODE (node) = BIT_AND_EXPR;
1045 case OMP_REDUCTION_IOR:
1046 OMP_CLAUSE_REDUCTION_CODE (node) = BIT_IOR_EXPR;
1048 case OMP_REDUCTION_IEOR:
1049 OMP_CLAUSE_REDUCTION_CODE (node) = BIT_XOR_EXPR;
1051 case OMP_REDUCTION_USER:
1052 OMP_CLAUSE_REDUCTION_CODE (node) = ERROR_MARK;
1057 if (namelist->sym->attr.dimension
1058 || namelist->rop == OMP_REDUCTION_USER)
1059 gfc_trans_omp_array_reduction_or_udr (node, namelist, where);
1060 list = gfc_trans_add_clause (node, list);
1067 gfc_trans_omp_clauses (stmtblock_t *block, gfc_omp_clauses *clauses,
1068 locus where, bool declare_simd = false)
1070 tree omp_clauses = NULL_TREE, chunk_size, c;
1072 enum omp_clause_code clause_code;
1075 if (clauses == NULL)
1078 for (list = 0; list < OMP_LIST_NUM; list++)
1080 gfc_omp_namelist *n = clauses->lists[list];
1086 case OMP_LIST_REDUCTION:
1087 omp_clauses = gfc_trans_omp_reduction_list (n, omp_clauses, where);
1089 case OMP_LIST_PRIVATE:
1090 clause_code = OMP_CLAUSE_PRIVATE;
1092 case OMP_LIST_SHARED:
1093 clause_code = OMP_CLAUSE_SHARED;
1095 case OMP_LIST_FIRSTPRIVATE:
1096 clause_code = OMP_CLAUSE_FIRSTPRIVATE;
1098 case OMP_LIST_LASTPRIVATE:
1099 clause_code = OMP_CLAUSE_LASTPRIVATE;
1101 case OMP_LIST_COPYIN:
1102 clause_code = OMP_CLAUSE_COPYIN;
1104 case OMP_LIST_COPYPRIVATE:
1105 clause_code = OMP_CLAUSE_COPYPRIVATE;
1107 case OMP_LIST_UNIFORM:
1108 clause_code = OMP_CLAUSE_UNIFORM;
1112 = gfc_trans_omp_variable_list (clause_code, n, omp_clauses,
1115 case OMP_LIST_ALIGNED:
1116 for (; n != NULL; n = n->next)
1117 if (n->sym->attr.referenced || declare_simd)
1119 tree t = gfc_trans_omp_variable (n->sym, declare_simd);
1120 if (t != error_mark_node)
1122 tree node = build_omp_clause (input_location,
1123 OMP_CLAUSE_ALIGNED);
1124 OMP_CLAUSE_DECL (node) = t;
1130 alignment_var = gfc_conv_constant_to_tree (n->expr);
1133 gfc_init_se (&se, NULL);
1134 gfc_conv_expr (&se, n->expr);
1135 gfc_add_block_to_block (block, &se.pre);
1136 alignment_var = gfc_evaluate_now (se.expr, block);
1137 gfc_add_block_to_block (block, &se.post);
1139 OMP_CLAUSE_ALIGNED_ALIGNMENT (node) = alignment_var;
1141 omp_clauses = gfc_trans_add_clause (node, omp_clauses);
1145 case OMP_LIST_LINEAR:
1147 gfc_expr *last_step_expr = NULL;
1148 tree last_step = NULL_TREE;
1150 for (; n != NULL; n = n->next)
1154 last_step_expr = n->expr;
1155 last_step = NULL_TREE;
1157 if (n->sym->attr.referenced || declare_simd)
1159 tree t = gfc_trans_omp_variable (n->sym, declare_simd);
1160 if (t != error_mark_node)
1162 tree node = build_omp_clause (input_location,
1164 OMP_CLAUSE_DECL (node) = t;
1165 if (last_step_expr && last_step == NULL_TREE)
1169 = gfc_conv_constant_to_tree (last_step_expr);
1172 gfc_init_se (&se, NULL);
1173 gfc_conv_expr (&se, last_step_expr);
1174 gfc_add_block_to_block (block, &se.pre);
1175 last_step = gfc_evaluate_now (se.expr, block);
1176 gfc_add_block_to_block (block, &se.post);
1179 OMP_CLAUSE_LINEAR_STEP (node) = last_step;
1180 omp_clauses = gfc_trans_add_clause (node, omp_clauses);
1186 case OMP_LIST_DEPEND_IN:
1187 case OMP_LIST_DEPEND_OUT:
1188 for (; n != NULL; n = n->next)
1190 if (!n->sym->attr.referenced)
1193 tree node = build_omp_clause (input_location, OMP_CLAUSE_DEPEND);
1194 if (n->expr == NULL || n->expr->ref->u.ar.type == AR_FULL)
1196 OMP_CLAUSE_DECL (node) = gfc_get_symbol_decl (n->sym);
1197 if (DECL_P (OMP_CLAUSE_DECL (node)))
1198 TREE_ADDRESSABLE (OMP_CLAUSE_DECL (node)) = 1;
1203 gfc_init_se (&se, NULL);
1204 if (n->expr->ref->u.ar.type == AR_ELEMENT)
1206 gfc_conv_expr_reference (&se, n->expr);
1211 gfc_conv_expr_descriptor (&se, n->expr);
1212 ptr = gfc_conv_array_data (se.expr);
1214 gfc_add_block_to_block (block, &se.pre);
1215 gfc_add_block_to_block (block, &se.post);
1216 OMP_CLAUSE_DECL (node)
1217 = fold_build1_loc (input_location, INDIRECT_REF,
1218 TREE_TYPE (TREE_TYPE (ptr)), ptr);
1220 OMP_CLAUSE_DEPEND_KIND (node)
1221 = ((list == OMP_LIST_DEPEND_IN)
1222 ? OMP_CLAUSE_DEPEND_IN : OMP_CLAUSE_DEPEND_OUT);
1223 omp_clauses = gfc_trans_add_clause (node, omp_clauses);
1231 if (clauses->if_expr)
1235 gfc_init_se (&se, NULL);
1236 gfc_conv_expr (&se, clauses->if_expr);
1237 gfc_add_block_to_block (block, &se.pre);
1238 if_var = gfc_evaluate_now (se.expr, block);
1239 gfc_add_block_to_block (block, &se.post);
1241 c = build_omp_clause (where.lb->location, OMP_CLAUSE_IF);
1242 OMP_CLAUSE_IF_EXPR (c) = if_var;
1243 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
1246 if (clauses->final_expr)
1250 gfc_init_se (&se, NULL);
1251 gfc_conv_expr (&se, clauses->final_expr);
1252 gfc_add_block_to_block (block, &se.pre);
1253 final_var = gfc_evaluate_now (se.expr, block);
1254 gfc_add_block_to_block (block, &se.post);
1256 c = build_omp_clause (where.lb->location, OMP_CLAUSE_FINAL);
1257 OMP_CLAUSE_FINAL_EXPR (c) = final_var;
1258 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
1261 if (clauses->num_threads)
1265 gfc_init_se (&se, NULL);
1266 gfc_conv_expr (&se, clauses->num_threads);
1267 gfc_add_block_to_block (block, &se.pre);
1268 num_threads = gfc_evaluate_now (se.expr, block);
1269 gfc_add_block_to_block (block, &se.post);
1271 c = build_omp_clause (where.lb->location, OMP_CLAUSE_NUM_THREADS);
1272 OMP_CLAUSE_NUM_THREADS_EXPR (c) = num_threads;
1273 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
1276 chunk_size = NULL_TREE;
1277 if (clauses->chunk_size)
1279 gfc_init_se (&se, NULL);
1280 gfc_conv_expr (&se, clauses->chunk_size);
1281 gfc_add_block_to_block (block, &se.pre);
1282 chunk_size = gfc_evaluate_now (se.expr, block);
1283 gfc_add_block_to_block (block, &se.post);
1286 if (clauses->sched_kind != OMP_SCHED_NONE)
1288 c = build_omp_clause (where.lb->location, OMP_CLAUSE_SCHEDULE);
1289 OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (c) = chunk_size;
1290 switch (clauses->sched_kind)
1292 case OMP_SCHED_STATIC:
1293 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_STATIC;
1295 case OMP_SCHED_DYNAMIC:
1296 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_DYNAMIC;
1298 case OMP_SCHED_GUIDED:
1299 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_GUIDED;
1301 case OMP_SCHED_RUNTIME:
1302 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_RUNTIME;
1304 case OMP_SCHED_AUTO:
1305 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_AUTO;
1310 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
1313 if (clauses->default_sharing != OMP_DEFAULT_UNKNOWN)
1315 c = build_omp_clause (where.lb->location, OMP_CLAUSE_DEFAULT);
1316 switch (clauses->default_sharing)
1318 case OMP_DEFAULT_NONE:
1319 OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_NONE;
1321 case OMP_DEFAULT_SHARED:
1322 OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_SHARED;
1324 case OMP_DEFAULT_PRIVATE:
1325 OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_PRIVATE;
1327 case OMP_DEFAULT_FIRSTPRIVATE:
1328 OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
1333 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
1336 if (clauses->nowait)
1338 c = build_omp_clause (where.lb->location, OMP_CLAUSE_NOWAIT);
1339 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
1342 if (clauses->ordered)
1344 c = build_omp_clause (where.lb->location, OMP_CLAUSE_ORDERED);
1345 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
1348 if (clauses->untied)
1350 c = build_omp_clause (where.lb->location, OMP_CLAUSE_UNTIED);
1351 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
1354 if (clauses->mergeable)
1356 c = build_omp_clause (where.lb->location, OMP_CLAUSE_MERGEABLE);
1357 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
1360 if (clauses->collapse)
1362 c = build_omp_clause (where.lb->location, OMP_CLAUSE_COLLAPSE);
1363 OMP_CLAUSE_COLLAPSE_EXPR (c)
1364 = build_int_cst (integer_type_node, clauses->collapse);
1365 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
1368 if (clauses->inbranch)
1370 c = build_omp_clause (where.lb->location, OMP_CLAUSE_INBRANCH);
1371 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
1374 if (clauses->notinbranch)
1376 c = build_omp_clause (where.lb->location, OMP_CLAUSE_NOTINBRANCH);
1377 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
1380 switch (clauses->cancel)
1382 case OMP_CANCEL_UNKNOWN:
1384 case OMP_CANCEL_PARALLEL:
1385 c = build_omp_clause (where.lb->location, OMP_CLAUSE_PARALLEL);
1386 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
1388 case OMP_CANCEL_SECTIONS:
1389 c = build_omp_clause (where.lb->location, OMP_CLAUSE_SECTIONS);
1390 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
1393 c = build_omp_clause (where.lb->location, OMP_CLAUSE_FOR);
1394 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
1396 case OMP_CANCEL_TASKGROUP:
1397 c = build_omp_clause (where.lb->location, OMP_CLAUSE_TASKGROUP);
1398 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
1402 if (clauses->proc_bind != OMP_PROC_BIND_UNKNOWN)
1404 c = build_omp_clause (where.lb->location, OMP_CLAUSE_PROC_BIND);
1405 switch (clauses->proc_bind)
1407 case OMP_PROC_BIND_MASTER:
1408 OMP_CLAUSE_PROC_BIND_KIND (c) = OMP_CLAUSE_PROC_BIND_MASTER;
1410 case OMP_PROC_BIND_SPREAD:
1411 OMP_CLAUSE_PROC_BIND_KIND (c) = OMP_CLAUSE_PROC_BIND_SPREAD;
1413 case OMP_PROC_BIND_CLOSE:
1414 OMP_CLAUSE_PROC_BIND_KIND (c) = OMP_CLAUSE_PROC_BIND_CLOSE;
1419 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
1422 if (clauses->safelen_expr)
1426 gfc_init_se (&se, NULL);
1427 gfc_conv_expr (&se, clauses->safelen_expr);
1428 gfc_add_block_to_block (block, &se.pre);
1429 safelen_var = gfc_evaluate_now (se.expr, block);
1430 gfc_add_block_to_block (block, &se.post);
1432 c = build_omp_clause (where.lb->location, OMP_CLAUSE_SAFELEN);
1433 OMP_CLAUSE_SAFELEN_EXPR (c) = safelen_var;
1434 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
1437 if (clauses->simdlen_expr)
1439 c = build_omp_clause (where.lb->location, OMP_CLAUSE_SIMDLEN);
1440 OMP_CLAUSE_SIMDLEN_EXPR (c)
1441 = gfc_conv_constant_to_tree (clauses->simdlen_expr);
1442 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
1448 /* Like gfc_trans_code, but force creation of a BIND_EXPR around it. */
1451 gfc_trans_omp_code (gfc_code *code, bool force_empty)
1456 stmt = gfc_trans_code (code);
1457 if (TREE_CODE (stmt) != BIND_EXPR)
1459 if (!IS_EMPTY_STMT (stmt) || force_empty)
1461 tree block = poplevel (1, 0);
1462 stmt = build3_v (BIND_EXPR, NULL, stmt, block);
1473 static tree gfc_trans_omp_sections (gfc_code *, gfc_omp_clauses *);
1474 static tree gfc_trans_omp_workshare (gfc_code *, gfc_omp_clauses *);
1477 gfc_trans_omp_atomic (gfc_code *code)
1479 gfc_code *atomic_code = code;
1483 gfc_expr *expr2, *e;
1486 tree lhsaddr, type, rhs, x;
1487 enum tree_code op = ERROR_MARK;
1488 enum tree_code aop = OMP_ATOMIC;
1489 bool var_on_left = false;
1490 bool seq_cst = (atomic_code->ext.omp_atomic & GFC_OMP_ATOMIC_SEQ_CST) != 0;
1492 code = code->block->next;
1493 gcc_assert (code->op == EXEC_ASSIGN);
1494 var = code->expr1->symtree->n.sym;
1496 gfc_init_se (&lse, NULL);
1497 gfc_init_se (&rse, NULL);
1498 gfc_init_se (&vse, NULL);
1499 gfc_start_block (&block);
1501 expr2 = code->expr2;
1502 if (expr2->expr_type == EXPR_FUNCTION
1503 && expr2->value.function.isym->id == GFC_ISYM_CONVERSION)
1504 expr2 = expr2->value.function.actual->expr;
1506 switch (atomic_code->ext.omp_atomic & GFC_OMP_ATOMIC_MASK)
1508 case GFC_OMP_ATOMIC_READ:
1509 gfc_conv_expr (&vse, code->expr1);
1510 gfc_add_block_to_block (&block, &vse.pre);
1512 gfc_conv_expr (&lse, expr2);
1513 gfc_add_block_to_block (&block, &lse.pre);
1514 type = TREE_TYPE (lse.expr);
1515 lhsaddr = gfc_build_addr_expr (NULL, lse.expr);
1517 x = build1 (OMP_ATOMIC_READ, type, lhsaddr);
1518 OMP_ATOMIC_SEQ_CST (x) = seq_cst;
1519 x = convert (TREE_TYPE (vse.expr), x);
1520 gfc_add_modify (&block, vse.expr, x);
1522 gfc_add_block_to_block (&block, &lse.pre);
1523 gfc_add_block_to_block (&block, &rse.pre);
1525 return gfc_finish_block (&block);
1526 case GFC_OMP_ATOMIC_CAPTURE:
1527 aop = OMP_ATOMIC_CAPTURE_NEW;
1528 if (expr2->expr_type == EXPR_VARIABLE)
1530 aop = OMP_ATOMIC_CAPTURE_OLD;
1531 gfc_conv_expr (&vse, code->expr1);
1532 gfc_add_block_to_block (&block, &vse.pre);
1534 gfc_conv_expr (&lse, expr2);
1535 gfc_add_block_to_block (&block, &lse.pre);
1536 gfc_init_se (&lse, NULL);
1538 var = code->expr1->symtree->n.sym;
1539 expr2 = code->expr2;
1540 if (expr2->expr_type == EXPR_FUNCTION
1541 && expr2->value.function.isym->id == GFC_ISYM_CONVERSION)
1542 expr2 = expr2->value.function.actual->expr;
1549 gfc_conv_expr (&lse, code->expr1);
1550 gfc_add_block_to_block (&block, &lse.pre);
1551 type = TREE_TYPE (lse.expr);
1552 lhsaddr = gfc_build_addr_expr (NULL, lse.expr);
1554 if (((atomic_code->ext.omp_atomic & GFC_OMP_ATOMIC_MASK)
1555 == GFC_OMP_ATOMIC_WRITE)
1556 || (atomic_code->ext.omp_atomic & GFC_OMP_ATOMIC_SWAP))
1558 gfc_conv_expr (&rse, expr2);
1559 gfc_add_block_to_block (&block, &rse.pre);
1561 else if (expr2->expr_type == EXPR_OP)
1564 switch (expr2->value.op.op)
1566 case INTRINSIC_PLUS:
1569 case INTRINSIC_TIMES:
1572 case INTRINSIC_MINUS:
1575 case INTRINSIC_DIVIDE:
1576 if (expr2->ts.type == BT_INTEGER)
1577 op = TRUNC_DIV_EXPR;
1582 op = TRUTH_ANDIF_EXPR;
1585 op = TRUTH_ORIF_EXPR;
1590 case INTRINSIC_NEQV:
1596 e = expr2->value.op.op1;
1597 if (e->expr_type == EXPR_FUNCTION
1598 && e->value.function.isym->id == GFC_ISYM_CONVERSION)
1599 e = e->value.function.actual->expr;
1600 if (e->expr_type == EXPR_VARIABLE
1601 && e->symtree != NULL
1602 && e->symtree->n.sym == var)
1604 expr2 = expr2->value.op.op2;
1609 e = expr2->value.op.op2;
1610 if (e->expr_type == EXPR_FUNCTION
1611 && e->value.function.isym->id == GFC_ISYM_CONVERSION)
1612 e = e->value.function.actual->expr;
1613 gcc_assert (e->expr_type == EXPR_VARIABLE
1614 && e->symtree != NULL
1615 && e->symtree->n.sym == var);
1616 expr2 = expr2->value.op.op1;
1617 var_on_left = false;
1619 gfc_conv_expr (&rse, expr2);
1620 gfc_add_block_to_block (&block, &rse.pre);
1624 gcc_assert (expr2->expr_type == EXPR_FUNCTION);
1625 switch (expr2->value.function.isym->id)
1645 e = expr2->value.function.actual->expr;
1646 gcc_assert (e->expr_type == EXPR_VARIABLE
1647 && e->symtree != NULL
1648 && e->symtree->n.sym == var);
1650 gfc_conv_expr (&rse, expr2->value.function.actual->next->expr);
1651 gfc_add_block_to_block (&block, &rse.pre);
1652 if (expr2->value.function.actual->next->next != NULL)
1654 tree accum = gfc_create_var (TREE_TYPE (rse.expr), NULL);
1655 gfc_actual_arglist *arg;
1657 gfc_add_modify (&block, accum, rse.expr);
1658 for (arg = expr2->value.function.actual->next->next; arg;
1661 gfc_init_block (&rse.pre);
1662 gfc_conv_expr (&rse, arg->expr);
1663 gfc_add_block_to_block (&block, &rse.pre);
1664 x = fold_build2_loc (input_location, op, TREE_TYPE (accum),
1666 gfc_add_modify (&block, accum, x);
1672 expr2 = expr2->value.function.actual->next->expr;
1675 lhsaddr = save_expr (lhsaddr);
1676 rhs = gfc_evaluate_now (rse.expr, &block);
1678 if (((atomic_code->ext.omp_atomic & GFC_OMP_ATOMIC_MASK)
1679 == GFC_OMP_ATOMIC_WRITE)
1680 || (atomic_code->ext.omp_atomic & GFC_OMP_ATOMIC_SWAP))
1684 x = convert (TREE_TYPE (rhs),
1685 build_fold_indirect_ref_loc (input_location, lhsaddr));
1687 x = fold_build2_loc (input_location, op, TREE_TYPE (rhs), x, rhs);
1689 x = fold_build2_loc (input_location, op, TREE_TYPE (rhs), rhs, x);
1692 if (TREE_CODE (TREE_TYPE (rhs)) == COMPLEX_TYPE
1693 && TREE_CODE (type) != COMPLEX_TYPE)
1694 x = fold_build1_loc (input_location, REALPART_EXPR,
1695 TREE_TYPE (TREE_TYPE (rhs)), x);
1697 gfc_add_block_to_block (&block, &lse.pre);
1698 gfc_add_block_to_block (&block, &rse.pre);
1700 if (aop == OMP_ATOMIC)
1702 x = build2_v (OMP_ATOMIC, lhsaddr, convert (type, x));
1703 OMP_ATOMIC_SEQ_CST (x) = seq_cst;
1704 gfc_add_expr_to_block (&block, x);
1708 if (aop == OMP_ATOMIC_CAPTURE_NEW)
1711 expr2 = code->expr2;
1712 if (expr2->expr_type == EXPR_FUNCTION
1713 && expr2->value.function.isym->id == GFC_ISYM_CONVERSION)
1714 expr2 = expr2->value.function.actual->expr;
1716 gcc_assert (expr2->expr_type == EXPR_VARIABLE);
1717 gfc_conv_expr (&vse, code->expr1);
1718 gfc_add_block_to_block (&block, &vse.pre);
1720 gfc_init_se (&lse, NULL);
1721 gfc_conv_expr (&lse, expr2);
1722 gfc_add_block_to_block (&block, &lse.pre);
1724 x = build2 (aop, type, lhsaddr, convert (type, x));
1725 OMP_ATOMIC_SEQ_CST (x) = seq_cst;
1726 x = convert (TREE_TYPE (vse.expr), x);
1727 gfc_add_modify (&block, vse.expr, x);
1730 return gfc_finish_block (&block);
1734 gfc_trans_omp_barrier (void)
1736 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER);
1737 return build_call_expr_loc (input_location, decl, 0);
1741 gfc_trans_omp_cancel (gfc_code *code)
1744 tree ifc = boolean_true_node;
1746 switch (code->ext.omp_clauses->cancel)
1748 case OMP_CANCEL_PARALLEL: mask = 1; break;
1749 case OMP_CANCEL_DO: mask = 2; break;
1750 case OMP_CANCEL_SECTIONS: mask = 4; break;
1751 case OMP_CANCEL_TASKGROUP: mask = 8; break;
1752 default: gcc_unreachable ();
1754 gfc_start_block (&block);
1755 if (code->ext.omp_clauses->if_expr)
1760 gfc_init_se (&se, NULL);
1761 gfc_conv_expr (&se, code->ext.omp_clauses->if_expr);
1762 gfc_add_block_to_block (&block, &se.pre);
1763 if_var = gfc_evaluate_now (se.expr, &block);
1764 gfc_add_block_to_block (&block, &se.post);
1765 tree type = TREE_TYPE (if_var);
1766 ifc = fold_build2_loc (input_location, NE_EXPR,
1767 boolean_type_node, if_var,
1768 build_zero_cst (type));
1770 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
1771 tree c_bool_type = TREE_TYPE (TREE_TYPE (decl));
1772 ifc = fold_convert (c_bool_type, ifc);
1773 gfc_add_expr_to_block (&block,
1774 build_call_expr_loc (input_location, decl, 2,
1775 build_int_cst (integer_type_node,
1777 return gfc_finish_block (&block);
1781 gfc_trans_omp_cancellation_point (gfc_code *code)
1784 switch (code->ext.omp_clauses->cancel)
1786 case OMP_CANCEL_PARALLEL: mask = 1; break;
1787 case OMP_CANCEL_DO: mask = 2; break;
1788 case OMP_CANCEL_SECTIONS: mask = 4; break;
1789 case OMP_CANCEL_TASKGROUP: mask = 8; break;
1790 default: gcc_unreachable ();
1792 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_CANCELLATION_POINT);
1793 return build_call_expr_loc (input_location, decl, 1,
1794 build_int_cst (integer_type_node, mask));
1798 gfc_trans_omp_critical (gfc_code *code)
1800 tree name = NULL_TREE, stmt;
1801 if (code->ext.omp_name != NULL)
1802 name = get_identifier (code->ext.omp_name);
1803 stmt = gfc_trans_code (code->block->next);
1804 return build2_loc (input_location, OMP_CRITICAL, void_type_node, stmt, name);
1807 typedef struct dovar_init_d {
1814 gfc_trans_omp_do (gfc_code *code, gfc_exec_op op, stmtblock_t *pblock,
1815 gfc_omp_clauses *do_clauses, tree par_clauses)
1818 tree dovar, stmt, from, to, step, type, init, cond, incr;
1819 tree count = NULL_TREE, cycle_label, tmp, omp_clauses;
1822 gfc_omp_clauses *clauses = code->ext.omp_clauses;
1823 int i, collapse = clauses->collapse;
1824 vec<dovar_init> inits = vNULL;
1831 code = code->block->next;
1832 gcc_assert (code->op == EXEC_DO);
1834 init = make_tree_vec (collapse);
1835 cond = make_tree_vec (collapse);
1836 incr = make_tree_vec (collapse);
1840 gfc_start_block (&block);
1844 omp_clauses = gfc_trans_omp_clauses (pblock, do_clauses, code->loc);
1846 for (i = 0; i < collapse; i++)
1849 int dovar_found = 0;
1854 gfc_omp_namelist *n;
1855 for (n = clauses->lists[(op == EXEC_OMP_SIMD && collapse == 1)
1856 ? OMP_LIST_LINEAR : OMP_LIST_LASTPRIVATE];
1857 n != NULL; n = n->next)
1858 if (code->ext.iterator->var->symtree->n.sym == n->sym)
1862 else if (n == NULL && op != EXEC_OMP_SIMD)
1863 for (n = clauses->lists[OMP_LIST_PRIVATE]; n != NULL; n = n->next)
1864 if (code->ext.iterator->var->symtree->n.sym == n->sym)
1870 /* Evaluate all the expressions in the iterator. */
1871 gfc_init_se (&se, NULL);
1872 gfc_conv_expr_lhs (&se, code->ext.iterator->var);
1873 gfc_add_block_to_block (pblock, &se.pre);
1875 type = TREE_TYPE (dovar);
1876 gcc_assert (TREE_CODE (type) == INTEGER_TYPE);
1878 gfc_init_se (&se, NULL);
1879 gfc_conv_expr_val (&se, code->ext.iterator->start);
1880 gfc_add_block_to_block (pblock, &se.pre);
1881 from = gfc_evaluate_now (se.expr, pblock);
1883 gfc_init_se (&se, NULL);
1884 gfc_conv_expr_val (&se, code->ext.iterator->end);
1885 gfc_add_block_to_block (pblock, &se.pre);
1886 to = gfc_evaluate_now (se.expr, pblock);
1888 gfc_init_se (&se, NULL);
1889 gfc_conv_expr_val (&se, code->ext.iterator->step);
1890 gfc_add_block_to_block (pblock, &se.pre);
1891 step = gfc_evaluate_now (se.expr, pblock);
1894 /* Special case simple loops. */
1895 if (TREE_CODE (dovar) == VAR_DECL)
1897 if (integer_onep (step))
1899 else if (tree_int_cst_equal (step, integer_minus_one_node))
1904 = gfc_trans_omp_variable (code->ext.iterator->var->symtree->n.sym,
1910 TREE_VEC_ELT (init, i) = build2_v (MODIFY_EXPR, dovar, from);
1911 /* The condition should not be folded. */
1912 TREE_VEC_ELT (cond, i) = build2_loc (input_location, simple > 0
1913 ? LE_EXPR : GE_EXPR,
1914 boolean_type_node, dovar, to);
1915 TREE_VEC_ELT (incr, i) = fold_build2_loc (input_location, PLUS_EXPR,
1917 TREE_VEC_ELT (incr, i) = fold_build2_loc (input_location,
1920 TREE_VEC_ELT (incr, i));
1924 /* STEP is not 1 or -1. Use:
1925 for (count = 0; count < (to + step - from) / step; count++)
1927 dovar = from + count * step;
1931 tmp = fold_build2_loc (input_location, MINUS_EXPR, type, step, from);
1932 tmp = fold_build2_loc (input_location, PLUS_EXPR, type, to, tmp);
1933 tmp = fold_build2_loc (input_location, TRUNC_DIV_EXPR, type, tmp,
1935 tmp = gfc_evaluate_now (tmp, pblock);
1936 count = gfc_create_var (type, "count");
1937 TREE_VEC_ELT (init, i) = build2_v (MODIFY_EXPR, count,
1938 build_int_cst (type, 0));
1939 /* The condition should not be folded. */
1940 TREE_VEC_ELT (cond, i) = build2_loc (input_location, LT_EXPR,
1943 TREE_VEC_ELT (incr, i) = fold_build2_loc (input_location, PLUS_EXPR,
1945 build_int_cst (type, 1));
1946 TREE_VEC_ELT (incr, i) = fold_build2_loc (input_location,
1947 MODIFY_EXPR, type, count,
1948 TREE_VEC_ELT (incr, i));
1950 /* Initialize DOVAR. */
1951 tmp = fold_build2_loc (input_location, MULT_EXPR, type, count, step);
1952 tmp = fold_build2_loc (input_location, PLUS_EXPR, type, from, tmp);
1953 dovar_init e = {dovar, tmp};
1954 inits.safe_push (e);
1959 if (op == EXEC_OMP_SIMD)
1963 tmp = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
1964 OMP_CLAUSE_LINEAR_STEP (tmp) = step;
1967 tmp = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
1972 tmp = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
1973 OMP_CLAUSE_DECL (tmp) = dovar_decl;
1974 omp_clauses = gfc_trans_add_clause (tmp, omp_clauses);
1976 if (dovar_found == 2)
1983 /* If dovar is lastprivate, but different counter is used,
1984 dovar += step needs to be added to
1985 OMP_CLAUSE_LASTPRIVATE_STMT, otherwise the copied dovar
1986 will have the value on entry of the last loop, rather
1987 than value after iterator increment. */
1988 tmp = gfc_evaluate_now (step, pblock);
1989 tmp = fold_build2_loc (input_location, PLUS_EXPR, type, dovar,
1991 tmp = fold_build2_loc (input_location, MODIFY_EXPR, type,
1993 for (c = omp_clauses; c ; c = OMP_CLAUSE_CHAIN (c))
1994 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1995 && OMP_CLAUSE_DECL (c) == dovar_decl)
1997 OMP_CLAUSE_LASTPRIVATE_STMT (c) = tmp;
2000 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
2001 && OMP_CLAUSE_DECL (c) == dovar_decl)
2003 OMP_CLAUSE_LINEAR_STMT (c) = tmp;
2007 if (c == NULL && op == EXEC_OMP_DO && par_clauses != NULL)
2009 for (c = par_clauses; c ; c = OMP_CLAUSE_CHAIN (c))
2010 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
2011 && OMP_CLAUSE_DECL (c) == dovar_decl)
2013 tree l = build_omp_clause (input_location,
2014 OMP_CLAUSE_LASTPRIVATE);
2015 OMP_CLAUSE_DECL (l) = dovar_decl;
2016 OMP_CLAUSE_CHAIN (l) = omp_clauses;
2017 OMP_CLAUSE_LASTPRIVATE_STMT (l) = tmp;
2019 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_SHARED);
2023 gcc_assert (simple || c != NULL);
2027 if (op != EXEC_OMP_SIMD)
2028 tmp = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
2029 else if (collapse == 1)
2031 tmp = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
2032 OMP_CLAUSE_LINEAR_STEP (tmp) = step;
2033 OMP_CLAUSE_LINEAR_NO_COPYIN (tmp) = 1;
2034 OMP_CLAUSE_LINEAR_NO_COPYOUT (tmp) = 1;
2037 tmp = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
2038 OMP_CLAUSE_DECL (tmp) = count;
2039 omp_clauses = gfc_trans_add_clause (tmp, omp_clauses);
2042 if (i + 1 < collapse)
2043 code = code->block->next;
2046 if (pblock != &block)
2049 gfc_start_block (&block);
2052 gfc_start_block (&body);
2054 FOR_EACH_VEC_ELT (inits, ix, di)
2055 gfc_add_modify (&body, di->var, di->init);
2058 /* Cycle statement is implemented with a goto. Exit statement must not be
2059 present for this loop. */
2060 cycle_label = gfc_build_label_decl (NULL_TREE);
2062 /* Put these labels where they can be found later. */
2064 code->cycle_label = cycle_label;
2065 code->exit_label = NULL_TREE;
2067 /* Main loop body. */
2068 tmp = gfc_trans_omp_code (code->block->next, true);
2069 gfc_add_expr_to_block (&body, tmp);
2071 /* Label for cycle statements (if needed). */
2072 if (TREE_USED (cycle_label))
2074 tmp = build1_v (LABEL_EXPR, cycle_label);
2075 gfc_add_expr_to_block (&body, tmp);
2078 /* End of loop body. */
2079 stmt = make_node (op == EXEC_OMP_SIMD ? OMP_SIMD : OMP_FOR);
2081 TREE_TYPE (stmt) = void_type_node;
2082 OMP_FOR_BODY (stmt) = gfc_finish_block (&body);
2083 OMP_FOR_CLAUSES (stmt) = omp_clauses;
2084 OMP_FOR_INIT (stmt) = init;
2085 OMP_FOR_COND (stmt) = cond;
2086 OMP_FOR_INCR (stmt) = incr;
2087 gfc_add_expr_to_block (&block, stmt);
2089 return gfc_finish_block (&block);
2093 gfc_trans_omp_flush (void)
2095 tree decl = builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE);
2096 return build_call_expr_loc (input_location, decl, 0);
2100 gfc_trans_omp_master (gfc_code *code)
2102 tree stmt = gfc_trans_code (code->block->next);
2103 if (IS_EMPTY_STMT (stmt))
2105 return build1_v (OMP_MASTER, stmt);
2109 gfc_trans_omp_ordered (gfc_code *code)
2111 return build1_v (OMP_ORDERED, gfc_trans_code (code->block->next));
2115 gfc_trans_omp_parallel (gfc_code *code)
2118 tree stmt, omp_clauses;
2120 gfc_start_block (&block);
2121 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
2123 stmt = gfc_trans_omp_code (code->block->next, true);
2124 stmt = build2_loc (input_location, OMP_PARALLEL, void_type_node, stmt,
2126 gfc_add_expr_to_block (&block, stmt);
2127 return gfc_finish_block (&block);
2134 GFC_OMP_SPLIT_PARALLEL,
2140 GFC_OMP_MASK_SIMD = (1 << GFC_OMP_SPLIT_SIMD),
2141 GFC_OMP_MASK_DO = (1 << GFC_OMP_SPLIT_DO),
2142 GFC_OMP_MASK_PARALLEL = (1 << GFC_OMP_SPLIT_PARALLEL)
2146 gfc_split_omp_clauses (gfc_code *code,
2147 gfc_omp_clauses clausesa[GFC_OMP_SPLIT_NUM])
2149 int mask = 0, innermost = 0;
2150 memset (clausesa, 0, GFC_OMP_SPLIT_NUM * sizeof (gfc_omp_clauses));
2153 case EXEC_OMP_DO_SIMD:
2154 mask = GFC_OMP_MASK_DO | GFC_OMP_MASK_SIMD;
2155 innermost = GFC_OMP_SPLIT_SIMD;
2157 case EXEC_OMP_PARALLEL_DO:
2158 mask = GFC_OMP_MASK_PARALLEL | GFC_OMP_MASK_DO;
2159 innermost = GFC_OMP_SPLIT_DO;
2161 case EXEC_OMP_PARALLEL_DO_SIMD:
2162 mask = GFC_OMP_MASK_PARALLEL | GFC_OMP_MASK_DO | GFC_OMP_MASK_SIMD;
2163 innermost = GFC_OMP_SPLIT_SIMD;
2168 if (code->ext.omp_clauses != NULL)
2170 if (mask & GFC_OMP_MASK_PARALLEL)
2172 /* First the clauses that are unique to some constructs. */
2173 clausesa[GFC_OMP_SPLIT_PARALLEL].lists[OMP_LIST_COPYIN]
2174 = code->ext.omp_clauses->lists[OMP_LIST_COPYIN];
2175 clausesa[GFC_OMP_SPLIT_PARALLEL].num_threads
2176 = code->ext.omp_clauses->num_threads;
2177 clausesa[GFC_OMP_SPLIT_PARALLEL].proc_bind
2178 = code->ext.omp_clauses->proc_bind;
2179 /* Shared and default clauses are allowed on parallel and teams. */
2180 clausesa[GFC_OMP_SPLIT_PARALLEL].lists[OMP_LIST_SHARED]
2181 = code->ext.omp_clauses->lists[OMP_LIST_SHARED];
2182 clausesa[GFC_OMP_SPLIT_PARALLEL].default_sharing
2183 = code->ext.omp_clauses->default_sharing;
2184 /* FIXME: This is currently being discussed. */
2185 clausesa[GFC_OMP_SPLIT_PARALLEL].if_expr
2186 = code->ext.omp_clauses->if_expr;
2188 if (mask & GFC_OMP_MASK_DO)
2190 /* First the clauses that are unique to some constructs. */
2191 clausesa[GFC_OMP_SPLIT_DO].ordered
2192 = code->ext.omp_clauses->ordered;
2193 clausesa[GFC_OMP_SPLIT_DO].sched_kind
2194 = code->ext.omp_clauses->sched_kind;
2195 clausesa[GFC_OMP_SPLIT_DO].chunk_size
2196 = code->ext.omp_clauses->chunk_size;
2197 clausesa[GFC_OMP_SPLIT_DO].nowait
2198 = code->ext.omp_clauses->nowait;
2199 /* Duplicate collapse. */
2200 clausesa[GFC_OMP_SPLIT_DO].collapse
2201 = code->ext.omp_clauses->collapse;
2203 if (mask & GFC_OMP_MASK_SIMD)
2205 clausesa[GFC_OMP_SPLIT_SIMD].safelen_expr
2206 = code->ext.omp_clauses->safelen_expr;
2207 clausesa[GFC_OMP_SPLIT_SIMD].lists[OMP_LIST_LINEAR]
2208 = code->ext.omp_clauses->lists[OMP_LIST_LINEAR];
2209 clausesa[GFC_OMP_SPLIT_SIMD].lists[OMP_LIST_ALIGNED]
2210 = code->ext.omp_clauses->lists[OMP_LIST_ALIGNED];
2211 /* Duplicate collapse. */
2212 clausesa[GFC_OMP_SPLIT_SIMD].collapse
2213 = code->ext.omp_clauses->collapse;
2215 /* Private clause is supported on all constructs but target,
2216 it is enough to put it on the innermost one. For
2217 !$ omp do put it on parallel though,
2218 as that's what we did for OpenMP 3.1. */
2219 clausesa[innermost == GFC_OMP_SPLIT_DO
2220 ? (int) GFC_OMP_SPLIT_PARALLEL
2221 : innermost].lists[OMP_LIST_PRIVATE]
2222 = code->ext.omp_clauses->lists[OMP_LIST_PRIVATE];
2223 /* Firstprivate clause is supported on all constructs but
2224 target and simd. Put it on the outermost of those and
2225 duplicate on parallel. */
2226 if (mask & GFC_OMP_MASK_PARALLEL)
2227 clausesa[GFC_OMP_SPLIT_PARALLEL].lists[OMP_LIST_FIRSTPRIVATE]
2228 = code->ext.omp_clauses->lists[OMP_LIST_FIRSTPRIVATE];
2229 else if (mask & GFC_OMP_MASK_DO)
2230 clausesa[GFC_OMP_SPLIT_DO].lists[OMP_LIST_FIRSTPRIVATE]
2231 = code->ext.omp_clauses->lists[OMP_LIST_FIRSTPRIVATE];
2232 /* Lastprivate is allowed on do and simd. In
2233 parallel do{, simd} we actually want to put it on
2234 parallel rather than do. */
2235 if (mask & GFC_OMP_MASK_PARALLEL)
2236 clausesa[GFC_OMP_SPLIT_PARALLEL].lists[OMP_LIST_LASTPRIVATE]
2237 = code->ext.omp_clauses->lists[OMP_LIST_LASTPRIVATE];
2238 else if (mask & GFC_OMP_MASK_DO)
2239 clausesa[GFC_OMP_SPLIT_DO].lists[OMP_LIST_LASTPRIVATE]
2240 = code->ext.omp_clauses->lists[OMP_LIST_LASTPRIVATE];
2241 if (mask & GFC_OMP_MASK_SIMD)
2242 clausesa[GFC_OMP_SPLIT_SIMD].lists[OMP_LIST_LASTPRIVATE]
2243 = code->ext.omp_clauses->lists[OMP_LIST_LASTPRIVATE];
2244 /* Reduction is allowed on simd, do, parallel and teams.
2245 Duplicate it on all of them, but omit on do if
2246 parallel is present. */
2247 if (mask & GFC_OMP_MASK_PARALLEL)
2248 clausesa[GFC_OMP_SPLIT_PARALLEL].lists[OMP_LIST_REDUCTION]
2249 = code->ext.omp_clauses->lists[OMP_LIST_REDUCTION];
2250 else if (mask & GFC_OMP_MASK_DO)
2251 clausesa[GFC_OMP_SPLIT_DO].lists[OMP_LIST_REDUCTION]
2252 = code->ext.omp_clauses->lists[OMP_LIST_REDUCTION];
2253 if (mask & GFC_OMP_MASK_SIMD)
2254 clausesa[GFC_OMP_SPLIT_SIMD].lists[OMP_LIST_REDUCTION]
2255 = code->ext.omp_clauses->lists[OMP_LIST_REDUCTION];
2257 if ((mask & (GFC_OMP_MASK_PARALLEL | GFC_OMP_MASK_DO))
2258 == (GFC_OMP_MASK_PARALLEL | GFC_OMP_MASK_DO))
2259 clausesa[GFC_OMP_SPLIT_DO].nowait = true;
2263 gfc_trans_omp_do_simd (gfc_code *code, gfc_omp_clauses *clausesa,
2266 stmtblock_t block, *pblock = NULL;
2267 gfc_omp_clauses clausesa_buf[GFC_OMP_SPLIT_NUM];
2268 tree stmt, body, omp_do_clauses = NULL_TREE;
2270 gfc_start_block (&block);
2272 if (clausesa == NULL)
2274 clausesa = clausesa_buf;
2275 gfc_split_omp_clauses (code, clausesa);
2278 = gfc_trans_omp_clauses (&block, &clausesa[GFC_OMP_SPLIT_DO], code->loc);
2280 body = gfc_trans_omp_do (code, EXEC_OMP_SIMD, pblock,
2281 &clausesa[GFC_OMP_SPLIT_SIMD], omp_clauses);
2282 if (TREE_CODE (body) != BIND_EXPR)
2283 body = build3_v (BIND_EXPR, NULL, body, poplevel (1, 0));
2286 stmt = make_node (OMP_FOR);
2287 TREE_TYPE (stmt) = void_type_node;
2288 OMP_FOR_BODY (stmt) = body;
2289 OMP_FOR_CLAUSES (stmt) = omp_do_clauses;
2290 gfc_add_expr_to_block (&block, stmt);
2291 return gfc_finish_block (&block);
2295 gfc_trans_omp_parallel_do (gfc_code *code)
2297 stmtblock_t block, *pblock = NULL;
2298 gfc_omp_clauses clausesa[GFC_OMP_SPLIT_NUM];
2299 tree stmt, omp_clauses = NULL_TREE;
2301 gfc_start_block (&block);
2303 gfc_split_omp_clauses (code, clausesa);
2305 = gfc_trans_omp_clauses (&block, &clausesa[GFC_OMP_SPLIT_PARALLEL],
2307 if (!clausesa[GFC_OMP_SPLIT_DO].ordered
2308 && clausesa[GFC_OMP_SPLIT_DO].sched_kind != OMP_SCHED_STATIC)
2312 stmt = gfc_trans_omp_do (code, EXEC_OMP_DO, pblock,
2313 &clausesa[GFC_OMP_SPLIT_DO], omp_clauses);
2314 if (TREE_CODE (stmt) != BIND_EXPR)
2315 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
2318 stmt = build2_loc (input_location, OMP_PARALLEL, void_type_node, stmt,
2320 OMP_PARALLEL_COMBINED (stmt) = 1;
2321 gfc_add_expr_to_block (&block, stmt);
2322 return gfc_finish_block (&block);
2326 gfc_trans_omp_parallel_do_simd (gfc_code *code)
2329 gfc_omp_clauses clausesa[GFC_OMP_SPLIT_NUM];
2330 tree stmt, omp_clauses = NULL_TREE;
2332 gfc_start_block (&block);
2334 gfc_split_omp_clauses (code, clausesa);
2336 = gfc_trans_omp_clauses (&block, &clausesa[GFC_OMP_SPLIT_PARALLEL],
2339 stmt = gfc_trans_omp_do_simd (code, clausesa, omp_clauses);
2340 if (TREE_CODE (stmt) != BIND_EXPR)
2341 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
2344 stmt = build2_loc (input_location, OMP_PARALLEL, void_type_node, stmt,
2346 OMP_PARALLEL_COMBINED (stmt) = 1;
2347 gfc_add_expr_to_block (&block, stmt);
2348 return gfc_finish_block (&block);
2352 gfc_trans_omp_parallel_sections (gfc_code *code)
2355 gfc_omp_clauses section_clauses;
2356 tree stmt, omp_clauses;
2358 memset (§ion_clauses, 0, sizeof (section_clauses));
2359 section_clauses.nowait = true;
2361 gfc_start_block (&block);
2362 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
2365 stmt = gfc_trans_omp_sections (code, §ion_clauses);
2366 if (TREE_CODE (stmt) != BIND_EXPR)
2367 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
2370 stmt = build2_loc (input_location, OMP_PARALLEL, void_type_node, stmt,
2372 OMP_PARALLEL_COMBINED (stmt) = 1;
2373 gfc_add_expr_to_block (&block, stmt);
2374 return gfc_finish_block (&block);
2378 gfc_trans_omp_parallel_workshare (gfc_code *code)
2381 gfc_omp_clauses workshare_clauses;
2382 tree stmt, omp_clauses;
2384 memset (&workshare_clauses, 0, sizeof (workshare_clauses));
2385 workshare_clauses.nowait = true;
2387 gfc_start_block (&block);
2388 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
2391 stmt = gfc_trans_omp_workshare (code, &workshare_clauses);
2392 if (TREE_CODE (stmt) != BIND_EXPR)
2393 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0));
2396 stmt = build2_loc (input_location, OMP_PARALLEL, void_type_node, stmt,
2398 OMP_PARALLEL_COMBINED (stmt) = 1;
2399 gfc_add_expr_to_block (&block, stmt);
2400 return gfc_finish_block (&block);
2404 gfc_trans_omp_sections (gfc_code *code, gfc_omp_clauses *clauses)
2406 stmtblock_t block, body;
2407 tree omp_clauses, stmt;
2408 bool has_lastprivate = clauses->lists[OMP_LIST_LASTPRIVATE] != NULL;
2410 gfc_start_block (&block);
2412 omp_clauses = gfc_trans_omp_clauses (&block, clauses, code->loc);
2414 gfc_init_block (&body);
2415 for (code = code->block; code; code = code->block)
2417 /* Last section is special because of lastprivate, so even if it
2418 is empty, chain it in. */
2419 stmt = gfc_trans_omp_code (code->next,
2420 has_lastprivate && code->block == NULL);
2421 if (! IS_EMPTY_STMT (stmt))
2423 stmt = build1_v (OMP_SECTION, stmt);
2424 gfc_add_expr_to_block (&body, stmt);
2427 stmt = gfc_finish_block (&body);
2429 stmt = build2_loc (input_location, OMP_SECTIONS, void_type_node, stmt,
2431 gfc_add_expr_to_block (&block, stmt);
2433 return gfc_finish_block (&block);
2437 gfc_trans_omp_single (gfc_code *code, gfc_omp_clauses *clauses)
2439 tree omp_clauses = gfc_trans_omp_clauses (NULL, clauses, code->loc);
2440 tree stmt = gfc_trans_omp_code (code->block->next, true);
2441 stmt = build2_loc (input_location, OMP_SINGLE, void_type_node, stmt,
2447 gfc_trans_omp_task (gfc_code *code)
2450 tree stmt, omp_clauses;
2452 gfc_start_block (&block);
2453 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
2455 stmt = gfc_trans_omp_code (code->block->next, true);
2456 stmt = build2_loc (input_location, OMP_TASK, void_type_node, stmt,
2458 gfc_add_expr_to_block (&block, stmt);
2459 return gfc_finish_block (&block);
2463 gfc_trans_omp_taskgroup (gfc_code *code)
2465 tree stmt = gfc_trans_code (code->block->next);
2466 return build1_loc (input_location, OMP_TASKGROUP, void_type_node, stmt);
2470 gfc_trans_omp_taskwait (void)
2472 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TASKWAIT);
2473 return build_call_expr_loc (input_location, decl, 0);
2477 gfc_trans_omp_taskyield (void)
2479 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TASKYIELD);
2480 return build_call_expr_loc (input_location, decl, 0);
2484 gfc_trans_omp_workshare (gfc_code *code, gfc_omp_clauses *clauses)
2486 tree res, tmp, stmt;
2487 stmtblock_t block, *pblock = NULL;
2488 stmtblock_t singleblock;
2489 int saved_ompws_flags;
2490 bool singleblock_in_progress = false;
2491 /* True if previous gfc_code in workshare construct is not workshared. */
2492 bool prev_singleunit;
2494 code = code->block->next;
2498 gfc_start_block (&block);
2501 ompws_flags = OMPWS_WORKSHARE_FLAG;
2502 prev_singleunit = false;
2504 /* Translate statements one by one to trees until we reach
2505 the end of the workshare construct. Adjacent gfc_codes that
2506 are a single unit of work are clustered and encapsulated in a
2507 single OMP_SINGLE construct. */
2508 for (; code; code = code->next)
2510 if (code->here != 0)
2512 res = gfc_trans_label_here (code);
2513 gfc_add_expr_to_block (pblock, res);
2516 /* No dependence analysis, use for clauses with wait.
2517 If this is the last gfc_code, use default omp_clauses. */
2518 if (code->next == NULL && clauses->nowait)
2519 ompws_flags |= OMPWS_NOWAIT;
2521 /* By default, every gfc_code is a single unit of work. */
2522 ompws_flags |= OMPWS_CURR_SINGLEUNIT;
2523 ompws_flags &= ~OMPWS_SCALARIZER_WS;
2532 res = gfc_trans_assign (code);
2535 case EXEC_POINTER_ASSIGN:
2536 res = gfc_trans_pointer_assign (code);
2539 case EXEC_INIT_ASSIGN:
2540 res = gfc_trans_init_assign (code);
2544 res = gfc_trans_forall (code);
2548 res = gfc_trans_where (code);
2551 case EXEC_OMP_ATOMIC:
2552 res = gfc_trans_omp_directive (code);
2555 case EXEC_OMP_PARALLEL:
2556 case EXEC_OMP_PARALLEL_DO:
2557 case EXEC_OMP_PARALLEL_SECTIONS:
2558 case EXEC_OMP_PARALLEL_WORKSHARE:
2559 case EXEC_OMP_CRITICAL:
2560 saved_ompws_flags = ompws_flags;
2562 res = gfc_trans_omp_directive (code);
2563 ompws_flags = saved_ompws_flags;
2567 internal_error ("gfc_trans_omp_workshare(): Bad statement code");
2570 gfc_set_backend_locus (&code->loc);
2572 if (res != NULL_TREE && ! IS_EMPTY_STMT (res))
2574 if (prev_singleunit)
2576 if (ompws_flags & OMPWS_CURR_SINGLEUNIT)
2577 /* Add current gfc_code to single block. */
2578 gfc_add_expr_to_block (&singleblock, res);
2581 /* Finish single block and add it to pblock. */
2582 tmp = gfc_finish_block (&singleblock);
2583 tmp = build2_loc (input_location, OMP_SINGLE,
2584 void_type_node, tmp, NULL_TREE);
2585 gfc_add_expr_to_block (pblock, tmp);
2586 /* Add current gfc_code to pblock. */
2587 gfc_add_expr_to_block (pblock, res);
2588 singleblock_in_progress = false;
2593 if (ompws_flags & OMPWS_CURR_SINGLEUNIT)
2595 /* Start single block. */
2596 gfc_init_block (&singleblock);
2597 gfc_add_expr_to_block (&singleblock, res);
2598 singleblock_in_progress = true;
2601 /* Add the new statement to the block. */
2602 gfc_add_expr_to_block (pblock, res);
2604 prev_singleunit = (ompws_flags & OMPWS_CURR_SINGLEUNIT) != 0;
2608 /* Finish remaining SINGLE block, if we were in the middle of one. */
2609 if (singleblock_in_progress)
2611 /* Finish single block and add it to pblock. */
2612 tmp = gfc_finish_block (&singleblock);
2613 tmp = build2_loc (input_location, OMP_SINGLE, void_type_node, tmp,
2615 ? build_omp_clause (input_location, OMP_CLAUSE_NOWAIT)
2617 gfc_add_expr_to_block (pblock, tmp);
2620 stmt = gfc_finish_block (pblock);
2621 if (TREE_CODE (stmt) != BIND_EXPR)
2623 if (!IS_EMPTY_STMT (stmt))
2625 tree bindblock = poplevel (1, 0);
2626 stmt = build3_v (BIND_EXPR, NULL, stmt, bindblock);
2634 if (IS_EMPTY_STMT (stmt) && !clauses->nowait)
2635 stmt = gfc_trans_omp_barrier ();
2642 gfc_trans_omp_directive (gfc_code *code)
2646 case EXEC_OMP_ATOMIC:
2647 return gfc_trans_omp_atomic (code);
2648 case EXEC_OMP_BARRIER:
2649 return gfc_trans_omp_barrier ();
2650 case EXEC_OMP_CANCEL:
2651 return gfc_trans_omp_cancel (code);
2652 case EXEC_OMP_CANCELLATION_POINT:
2653 return gfc_trans_omp_cancellation_point (code);
2654 case EXEC_OMP_CRITICAL:
2655 return gfc_trans_omp_critical (code);
2658 return gfc_trans_omp_do (code, code->op, NULL, code->ext.omp_clauses,
2660 case EXEC_OMP_DO_SIMD:
2661 return gfc_trans_omp_do_simd (code, NULL, NULL_TREE);
2662 case EXEC_OMP_FLUSH:
2663 return gfc_trans_omp_flush ();
2664 case EXEC_OMP_MASTER:
2665 return gfc_trans_omp_master (code);
2666 case EXEC_OMP_ORDERED:
2667 return gfc_trans_omp_ordered (code);
2668 case EXEC_OMP_PARALLEL:
2669 return gfc_trans_omp_parallel (code);
2670 case EXEC_OMP_PARALLEL_DO:
2671 return gfc_trans_omp_parallel_do (code);
2672 case EXEC_OMP_PARALLEL_DO_SIMD:
2673 return gfc_trans_omp_parallel_do_simd (code);
2674 case EXEC_OMP_PARALLEL_SECTIONS:
2675 return gfc_trans_omp_parallel_sections (code);
2676 case EXEC_OMP_PARALLEL_WORKSHARE:
2677 return gfc_trans_omp_parallel_workshare (code);
2678 case EXEC_OMP_SECTIONS:
2679 return gfc_trans_omp_sections (code, code->ext.omp_clauses);
2680 case EXEC_OMP_SINGLE:
2681 return gfc_trans_omp_single (code, code->ext.omp_clauses);
2683 return gfc_trans_omp_task (code);
2684 case EXEC_OMP_TASKGROUP:
2685 return gfc_trans_omp_taskgroup (code);
2686 case EXEC_OMP_TASKWAIT:
2687 return gfc_trans_omp_taskwait ();
2688 case EXEC_OMP_TASKYIELD:
2689 return gfc_trans_omp_taskyield ();
2690 case EXEC_OMP_WORKSHARE:
2691 return gfc_trans_omp_workshare (code, code->ext.omp_clauses);
2698 gfc_trans_omp_declare_simd (gfc_namespace *ns)
2703 gfc_omp_declare_simd *ods;
2704 for (ods = ns->omp_declare_simd; ods; ods = ods->next)
2706 tree c = gfc_trans_omp_clauses (NULL, ods->clauses, ods->where, true);
2707 tree fndecl = ns->proc_name->backend_decl;
2709 c = tree_cons (NULL_TREE, c, NULL_TREE);
2710 c = build_tree_list (get_identifier ("omp declare simd"), c);
2711 TREE_CHAIN (c) = DECL_ATTRIBUTES (fndecl);
2712 DECL_ATTRIBUTES (fndecl) = c;