1 /* Nested function decomposition for GIMPLE.
2 Copyright (C) 2004-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
27 #include "tree-dump.h"
28 #include "tree-inline.h"
30 #include "tree-iterator.h"
33 #include "expr.h" /* FIXME: For STACK_SAVEAREA_MODE and SAVE_NONLOCAL. */
34 #include "langhooks.h"
35 #include "pointer-set.h"
36 #include "gimple-low.h"
39 /* The object of this pass is to lower the representation of a set of nested
40 functions in order to expose all of the gory details of the various
41 nonlocal references. We want to do this sooner rather than later, in
42 order to give us more freedom in emitting all of the functions in question.
44 Back in olden times, when gcc was young, we developed an insanely
45 complicated scheme whereby variables which were referenced nonlocally
46 were forced to live in the stack of the declaring function, and then
47 the nested functions magically discovered where these variables were
48 placed. In order for this scheme to function properly, it required
49 that the outer function be partially expanded, then we switch to
50 compiling the inner function, and once done with those we switch back
51 to compiling the outer function. Such delicate ordering requirements
52 makes it difficult to do whole translation unit optimizations
53 involving such functions.
55 The implementation here is much more direct. Everything that can be
56 referenced by an inner function is a member of an explicitly created
57 structure herein called the "nonlocal frame struct". The incoming
58 static chain for a nested function is a pointer to this struct in
59 the parent. In this way, we settle on known offsets from a known
60 base, and so are decoupled from the logic that places objects in the
61 function's stack frame. More importantly, we don't have to wait for
62 that to happen -- since the compilation of the inner function is no
63 longer tied to a real stack frame, the nonlocal frame struct can be
64 allocated anywhere. Which means that the outer function is now
67 Theory of operation here is very simple. Iterate over all the
68 statements in all the functions (depth first) several times,
69 allocating structures and fields on demand. In general we want to
70 examine inner functions first, so that we can avoid making changes
71 to outer functions which are unnecessary.
73 The order of the passes matters a bit, in that later passes will be
74 skipped if it is discovered that the functions don't actually interact
75 at all. That is, they're nested in the lexical sense but could have
76 been written as independent functions without change. */
81 struct nesting_info *outer;
82 struct nesting_info *inner;
83 struct nesting_info *next;
85 struct pointer_map_t *field_map;
86 struct pointer_map_t *var_map;
87 struct pointer_set_t *mem_refs;
88 bitmap suppress_expansion;
91 tree new_local_var_chain;
99 bool any_parm_remapped;
100 bool any_tramp_created;
101 char static_chain_added;
105 /* Iterate over the nesting tree, starting with ROOT, depth first. */
107 static inline struct nesting_info *
108 iter_nestinfo_start (struct nesting_info *root)
115 static inline struct nesting_info *
116 iter_nestinfo_next (struct nesting_info *node)
119 return iter_nestinfo_start (node->next);
123 #define FOR_EACH_NEST_INFO(I, ROOT) \
124 for ((I) = iter_nestinfo_start (ROOT); (I); (I) = iter_nestinfo_next (I))
126 /* Obstack used for the bitmaps in the struct above. */
127 static struct bitmap_obstack nesting_info_bitmap_obstack;
130 /* We're working in so many different function contexts simultaneously,
131 that create_tmp_var is dangerous. Prevent mishap. */
132 #define create_tmp_var cant_use_create_tmp_var_here_dummy
134 /* Like create_tmp_var, except record the variable for registration at
135 the given nesting level. */
138 create_tmp_var_for (struct nesting_info *info, tree type, const char *prefix)
142 /* If the type is of variable size or a type which must be created by the
143 frontend, something is wrong. Note that we explicitly allow
144 incomplete types here, since we create them ourselves here. */
145 gcc_assert (!TREE_ADDRESSABLE (type));
146 gcc_assert (!TYPE_SIZE_UNIT (type)
147 || TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST);
149 tmp_var = create_tmp_var_raw (type, prefix);
150 DECL_CONTEXT (tmp_var) = info->context;
151 DECL_CHAIN (tmp_var) = info->new_local_var_chain;
152 DECL_SEEN_IN_BIND_EXPR_P (tmp_var) = 1;
153 if (TREE_CODE (type) == COMPLEX_TYPE
154 || TREE_CODE (type) == VECTOR_TYPE)
155 DECL_GIMPLE_REG_P (tmp_var) = 1;
157 info->new_local_var_chain = tmp_var;
162 /* Take the address of EXP to be used within function CONTEXT.
163 Mark it for addressability as necessary. */
166 build_addr (tree exp, tree context)
172 while (handled_component_p (base))
173 base = TREE_OPERAND (base, 0);
176 TREE_ADDRESSABLE (base) = 1;
178 /* Building the ADDR_EXPR will compute a set of properties for
179 that ADDR_EXPR. Those properties are unfortunately context
180 specific, i.e., they are dependent on CURRENT_FUNCTION_DECL.
182 Temporarily set CURRENT_FUNCTION_DECL to the desired context,
183 build the ADDR_EXPR, then restore CURRENT_FUNCTION_DECL. That
184 way the properties are for the ADDR_EXPR are computed properly. */
185 save_context = current_function_decl;
186 current_function_decl = context;
187 retval = build_fold_addr_expr (exp);
188 current_function_decl = save_context;
192 /* Insert FIELD into TYPE, sorted by alignment requirements. */
195 insert_field_into_struct (tree type, tree field)
199 DECL_CONTEXT (field) = type;
201 for (p = &TYPE_FIELDS (type); *p ; p = &DECL_CHAIN (*p))
202 if (DECL_ALIGN (field) >= DECL_ALIGN (*p))
205 DECL_CHAIN (field) = *p;
208 /* Set correct alignment for frame struct type. */
209 if (TYPE_ALIGN (type) < DECL_ALIGN (field))
210 TYPE_ALIGN (type) = DECL_ALIGN (field);
213 /* Build or return the RECORD_TYPE that describes the frame state that is
214 shared between INFO->CONTEXT and its nested functions. This record will
215 not be complete until finalize_nesting_tree; up until that point we'll
216 be adding fields as necessary.
218 We also build the DECL that represents this frame in the function. */
221 get_frame_type (struct nesting_info *info)
223 tree type = info->frame_type;
228 type = make_node (RECORD_TYPE);
230 name = concat ("FRAME.",
231 IDENTIFIER_POINTER (DECL_NAME (info->context)),
233 TYPE_NAME (type) = get_identifier (name);
236 info->frame_type = type;
237 info->frame_decl = create_tmp_var_for (info, type, "FRAME");
238 DECL_NONLOCAL_FRAME (info->frame_decl) = 1;
240 /* ??? Always make it addressable for now, since it is meant to
241 be pointed to by the static chain pointer. This pessimizes
242 when it turns out that no static chains are needed because
243 the nested functions referencing non-local variables are not
244 reachable, but the true pessimization is to create the non-
245 local frame structure in the first place. */
246 TREE_ADDRESSABLE (info->frame_decl) = 1;
251 /* Return true if DECL should be referenced by pointer in the non-local
255 use_pointer_in_frame (tree decl)
257 if (TREE_CODE (decl) == PARM_DECL)
259 /* It's illegal to copy TREE_ADDRESSABLE, impossible to copy variable
260 sized decls, and inefficient to copy large aggregates. Don't bother
261 moving anything but scalar variables. */
262 return AGGREGATE_TYPE_P (TREE_TYPE (decl));
266 /* Variable sized types make things "interesting" in the frame. */
267 return DECL_SIZE (decl) == NULL || !TREE_CONSTANT (DECL_SIZE (decl));
271 /* Given DECL, a non-locally accessed variable, find or create a field
272 in the non-local frame structure for the given nesting context. */
275 lookup_field_for_decl (struct nesting_info *info, tree decl,
276 enum insert_option insert)
280 if (insert == NO_INSERT)
282 slot = pointer_map_contains (info->field_map, decl);
283 return slot ? (tree) *slot : NULL_TREE;
286 slot = pointer_map_insert (info->field_map, decl);
289 tree field = make_node (FIELD_DECL);
290 DECL_NAME (field) = DECL_NAME (decl);
292 if (use_pointer_in_frame (decl))
294 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
295 DECL_ALIGN (field) = TYPE_ALIGN (TREE_TYPE (field));
296 DECL_NONADDRESSABLE_P (field) = 1;
300 TREE_TYPE (field) = TREE_TYPE (decl);
301 DECL_SOURCE_LOCATION (field) = DECL_SOURCE_LOCATION (decl);
302 DECL_ALIGN (field) = DECL_ALIGN (decl);
303 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
304 TREE_ADDRESSABLE (field) = TREE_ADDRESSABLE (decl);
305 DECL_NONADDRESSABLE_P (field) = !TREE_ADDRESSABLE (decl);
306 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
309 insert_field_into_struct (get_frame_type (info), field);
312 if (TREE_CODE (decl) == PARM_DECL)
313 info->any_parm_remapped = true;
319 /* Build or return the variable that holds the static chain within
320 INFO->CONTEXT. This variable may only be used within INFO->CONTEXT. */
323 get_chain_decl (struct nesting_info *info)
325 tree decl = info->chain_decl;
331 type = get_frame_type (info->outer);
332 type = build_pointer_type (type);
334 /* Note that this variable is *not* entered into any BIND_EXPR;
335 the construction of this variable is handled specially in
336 expand_function_start and initialize_inlined_parameters.
337 Note also that it's represented as a parameter. This is more
338 close to the truth, since the initial value does come from
340 decl = build_decl (DECL_SOURCE_LOCATION (info->context),
341 PARM_DECL, create_tmp_var_name ("CHAIN"), type);
342 DECL_ARTIFICIAL (decl) = 1;
343 DECL_IGNORED_P (decl) = 1;
344 TREE_USED (decl) = 1;
345 DECL_CONTEXT (decl) = info->context;
346 DECL_ARG_TYPE (decl) = type;
348 /* Tell tree-inline.c that we never write to this variable, so
349 it can copy-prop the replacement value immediately. */
350 TREE_READONLY (decl) = 1;
352 info->chain_decl = decl;
355 && (dump_flags & TDF_DETAILS)
356 && !DECL_STATIC_CHAIN (info->context))
357 fprintf (dump_file, "Setting static-chain for %s\n",
358 lang_hooks.decl_printable_name (info->context, 2));
360 DECL_STATIC_CHAIN (info->context) = 1;
365 /* Build or return the field within the non-local frame state that holds
366 the static chain for INFO->CONTEXT. This is the way to walk back up
367 multiple nesting levels. */
370 get_chain_field (struct nesting_info *info)
372 tree field = info->chain_field;
376 tree type = build_pointer_type (get_frame_type (info->outer));
378 field = make_node (FIELD_DECL);
379 DECL_NAME (field) = get_identifier ("__chain");
380 TREE_TYPE (field) = type;
381 DECL_ALIGN (field) = TYPE_ALIGN (type);
382 DECL_NONADDRESSABLE_P (field) = 1;
384 insert_field_into_struct (get_frame_type (info), field);
386 info->chain_field = field;
389 && (dump_flags & TDF_DETAILS)
390 && !DECL_STATIC_CHAIN (info->context))
391 fprintf (dump_file, "Setting static-chain for %s\n",
392 lang_hooks.decl_printable_name (info->context, 2));
394 DECL_STATIC_CHAIN (info->context) = 1;
399 /* Initialize a new temporary with the GIMPLE_CALL STMT. */
402 init_tmp_var_with_call (struct nesting_info *info, gimple_stmt_iterator *gsi,
407 t = create_tmp_var_for (info, gimple_call_return_type (call), NULL);
408 gimple_call_set_lhs (call, t);
409 if (! gsi_end_p (*gsi))
410 gimple_set_location (call, gimple_location (gsi_stmt (*gsi)));
411 gsi_insert_before (gsi, call, GSI_SAME_STMT);
417 /* Copy EXP into a temporary. Allocate the temporary in the context of
418 INFO and insert the initialization statement before GSI. */
421 init_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
426 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
427 stmt = gimple_build_assign (t, exp);
428 if (! gsi_end_p (*gsi))
429 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
430 gsi_insert_before_without_update (gsi, stmt, GSI_SAME_STMT);
436 /* Similarly, but only do so to force EXP to satisfy is_gimple_val. */
439 gsi_gimplify_val (struct nesting_info *info, tree exp,
440 gimple_stmt_iterator *gsi)
442 if (is_gimple_val (exp))
445 return init_tmp_var (info, exp, gsi);
448 /* Similarly, but copy from the temporary and insert the statement
449 after the iterator. */
452 save_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
457 t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
458 stmt = gimple_build_assign (exp, t);
459 if (! gsi_end_p (*gsi))
460 gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
461 gsi_insert_after_without_update (gsi, stmt, GSI_SAME_STMT);
466 /* Build or return the type used to represent a nested function trampoline. */
468 static GTY(()) tree trampoline_type;
471 get_trampoline_type (struct nesting_info *info)
473 unsigned align, size;
477 return trampoline_type;
479 align = TRAMPOLINE_ALIGNMENT;
480 size = TRAMPOLINE_SIZE;
482 /* If we won't be able to guarantee alignment simply via TYPE_ALIGN,
483 then allocate extra space so that we can do dynamic alignment. */
484 if (align > STACK_BOUNDARY)
486 size += ((align/BITS_PER_UNIT) - 1) & -(STACK_BOUNDARY/BITS_PER_UNIT);
487 align = STACK_BOUNDARY;
490 t = build_index_type (size_int (size - 1));
491 t = build_array_type (char_type_node, t);
492 t = build_decl (DECL_SOURCE_LOCATION (info->context),
493 FIELD_DECL, get_identifier ("__data"), t);
494 DECL_ALIGN (t) = align;
495 DECL_USER_ALIGN (t) = 1;
497 trampoline_type = make_node (RECORD_TYPE);
498 TYPE_NAME (trampoline_type) = get_identifier ("__builtin_trampoline");
499 TYPE_FIELDS (trampoline_type) = t;
500 layout_type (trampoline_type);
501 DECL_CONTEXT (t) = trampoline_type;
503 return trampoline_type;
506 /* Given DECL, a nested function, find or create a field in the non-local
507 frame structure for a trampoline for this function. */
510 lookup_tramp_for_decl (struct nesting_info *info, tree decl,
511 enum insert_option insert)
515 if (insert == NO_INSERT)
517 slot = pointer_map_contains (info->var_map, decl);
518 return slot ? (tree) *slot : NULL_TREE;
521 slot = pointer_map_insert (info->var_map, decl);
524 tree field = make_node (FIELD_DECL);
525 DECL_NAME (field) = DECL_NAME (decl);
526 TREE_TYPE (field) = get_trampoline_type (info);
527 TREE_ADDRESSABLE (field) = 1;
529 insert_field_into_struct (get_frame_type (info), field);
532 info->any_tramp_created = true;
538 /* Build or return the field within the non-local frame state that holds
539 the non-local goto "jmp_buf". The buffer itself is maintained by the
540 rtl middle-end as dynamic stack space is allocated. */
543 get_nl_goto_field (struct nesting_info *info)
545 tree field = info->nl_goto_field;
551 /* For __builtin_nonlocal_goto, we need N words. The first is the
552 frame pointer, the rest is for the target's stack pointer save
553 area. The number of words is controlled by STACK_SAVEAREA_MODE;
554 not the best interface, but it'll do for now. */
555 if (Pmode == ptr_mode)
556 type = ptr_type_node;
558 type = lang_hooks.types.type_for_mode (Pmode, 1);
560 size = GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL));
561 size = size / GET_MODE_SIZE (Pmode);
564 type = build_array_type
565 (type, build_index_type (size_int (size)));
567 field = make_node (FIELD_DECL);
568 DECL_NAME (field) = get_identifier ("__nl_goto_buf");
569 TREE_TYPE (field) = type;
570 DECL_ALIGN (field) = TYPE_ALIGN (type);
571 TREE_ADDRESSABLE (field) = 1;
573 insert_field_into_struct (get_frame_type (info), field);
575 info->nl_goto_field = field;
581 /* Invoke CALLBACK on all statements of GIMPLE sequence *PSEQ. */
584 walk_body (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
585 struct nesting_info *info, gimple_seq *pseq)
587 struct walk_stmt_info wi;
589 memset (&wi, 0, sizeof (wi));
592 walk_gimple_seq_mod (pseq, callback_stmt, callback_op, &wi);
596 /* Invoke CALLBACK_STMT/CALLBACK_OP on all statements of INFO->CONTEXT. */
599 walk_function (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
600 struct nesting_info *info)
602 gimple_seq body = gimple_body (info->context);
603 walk_body (callback_stmt, callback_op, info, &body);
604 gimple_set_body (info->context, body);
607 /* Invoke CALLBACK on a GIMPLE_OMP_FOR's init, cond, incr and pre-body. */
610 walk_gimple_omp_for (gimple for_stmt,
611 walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
612 struct nesting_info *info)
614 struct walk_stmt_info wi;
619 walk_body (callback_stmt, callback_op, info, gimple_omp_for_pre_body_ptr (for_stmt));
622 memset (&wi, 0, sizeof (wi));
624 wi.gsi = gsi_last (seq);
626 for (i = 0; i < gimple_omp_for_collapse (for_stmt); i++)
629 walk_tree (gimple_omp_for_index_ptr (for_stmt, i), callback_op,
633 walk_tree (gimple_omp_for_initial_ptr (for_stmt, i), callback_op,
638 walk_tree (gimple_omp_for_final_ptr (for_stmt, i), callback_op,
641 t = gimple_omp_for_incr (for_stmt, i);
642 gcc_assert (BINARY_CLASS_P (t));
644 walk_tree (&TREE_OPERAND (t, 0), callback_op, &wi, NULL);
647 walk_tree (&TREE_OPERAND (t, 1), callback_op, &wi, NULL);
650 seq = gsi_seq (wi.gsi);
651 if (!gimple_seq_empty_p (seq))
653 gimple_seq pre_body = gimple_omp_for_pre_body (for_stmt);
654 annotate_all_with_location (seq, gimple_location (for_stmt));
655 gimple_seq_add_seq (&pre_body, seq);
656 gimple_omp_for_set_pre_body (for_stmt, pre_body);
660 /* Similarly for ROOT and all functions nested underneath, depth first. */
663 walk_all_functions (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
664 struct nesting_info *root)
666 struct nesting_info *n;
667 FOR_EACH_NEST_INFO (n, root)
668 walk_function (callback_stmt, callback_op, n);
672 /* We have to check for a fairly pathological case. The operands of function
673 nested function are to be interpreted in the context of the enclosing
674 function. So if any are variably-sized, they will get remapped when the
675 enclosing function is inlined. But that remapping would also have to be
676 done in the types of the PARM_DECLs of the nested function, meaning the
677 argument types of that function will disagree with the arguments in the
678 calls to that function. So we'd either have to make a copy of the nested
679 function corresponding to each time the enclosing function was inlined or
680 add a VIEW_CONVERT_EXPR to each such operand for each call to the nested
681 function. The former is not practical. The latter would still require
682 detecting this case to know when to add the conversions. So, for now at
683 least, we don't inline such an enclosing function.
685 We have to do that check recursively, so here return indicating whether
686 FNDECL has such a nested function. ORIG_FN is the function we were
687 trying to inline to use for checking whether any argument is variably
688 modified by anything in it.
690 It would be better to do this in tree-inline.c so that we could give
691 the appropriate warning for why a function can't be inlined, but that's
692 too late since the nesting structure has already been flattened and
693 adding a flag just to record this fact seems a waste of a flag. */
696 check_for_nested_with_variably_modified (tree fndecl, tree orig_fndecl)
698 struct cgraph_node *cgn = cgraph_get_node (fndecl);
701 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
703 for (arg = DECL_ARGUMENTS (cgn->symbol.decl); arg; arg = DECL_CHAIN (arg))
704 if (variably_modified_type_p (TREE_TYPE (arg), orig_fndecl))
707 if (check_for_nested_with_variably_modified (cgn->symbol.decl,
715 /* Construct our local datastructure describing the function nesting
716 tree rooted by CGN. */
718 static struct nesting_info *
719 create_nesting_tree (struct cgraph_node *cgn)
721 struct nesting_info *info = XCNEW (struct nesting_info);
722 info->field_map = pointer_map_create ();
723 info->var_map = pointer_map_create ();
724 info->mem_refs = pointer_set_create ();
725 info->suppress_expansion = BITMAP_ALLOC (&nesting_info_bitmap_obstack);
726 info->context = cgn->symbol.decl;
728 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
730 struct nesting_info *sub = create_nesting_tree (cgn);
732 sub->next = info->inner;
736 /* See discussion at check_for_nested_with_variably_modified for a
737 discussion of why this has to be here. */
738 if (check_for_nested_with_variably_modified (info->context, info->context))
739 DECL_UNINLINABLE (info->context) = true;
744 /* Return an expression computing the static chain for TARGET_CONTEXT
745 from INFO->CONTEXT. Insert any necessary computations before TSI. */
748 get_static_chain (struct nesting_info *info, tree target_context,
749 gimple_stmt_iterator *gsi)
751 struct nesting_info *i;
754 if (info->context == target_context)
756 x = build_addr (info->frame_decl, target_context);
760 x = get_chain_decl (info);
762 for (i = info->outer; i->context != target_context; i = i->outer)
764 tree field = get_chain_field (i);
766 x = build_simple_mem_ref (x);
767 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
768 x = init_tmp_var (info, x, gsi);
776 /* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
777 frame as seen from INFO->CONTEXT. Insert any necessary computations
781 get_frame_field (struct nesting_info *info, tree target_context,
782 tree field, gimple_stmt_iterator *gsi)
784 struct nesting_info *i;
787 if (info->context == target_context)
789 /* Make sure frame_decl gets created. */
790 (void) get_frame_type (info);
791 x = info->frame_decl;
795 x = get_chain_decl (info);
797 for (i = info->outer; i->context != target_context; i = i->outer)
799 tree field = get_chain_field (i);
801 x = build_simple_mem_ref (x);
802 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
803 x = init_tmp_var (info, x, gsi);
806 x = build_simple_mem_ref (x);
809 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
813 static void note_nonlocal_vla_type (struct nesting_info *info, tree type);
815 /* A subroutine of convert_nonlocal_reference_op. Create a local variable
816 in the nested function with DECL_VALUE_EXPR set to reference the true
817 variable in the parent function. This is used both for debug info
818 and in OpenMP lowering. */
821 get_nonlocal_debug_decl (struct nesting_info *info, tree decl)
824 struct nesting_info *i;
825 tree x, field, new_decl;
828 slot = pointer_map_insert (info->var_map, decl);
833 target_context = decl_function_context (decl);
835 /* A copy of the code in get_frame_field, but without the temporaries. */
836 if (info->context == target_context)
838 /* Make sure frame_decl gets created. */
839 (void) get_frame_type (info);
840 x = info->frame_decl;
845 x = get_chain_decl (info);
846 for (i = info->outer; i->context != target_context; i = i->outer)
848 field = get_chain_field (i);
849 x = build_simple_mem_ref (x);
850 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
852 x = build_simple_mem_ref (x);
855 field = lookup_field_for_decl (i, decl, INSERT);
856 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
857 if (use_pointer_in_frame (decl))
858 x = build_simple_mem_ref (x);
860 /* ??? We should be remapping types as well, surely. */
861 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
862 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
863 DECL_CONTEXT (new_decl) = info->context;
864 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
865 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
866 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
867 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
868 TREE_READONLY (new_decl) = TREE_READONLY (decl);
869 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
870 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
871 if ((TREE_CODE (decl) == PARM_DECL
872 || TREE_CODE (decl) == RESULT_DECL
873 || TREE_CODE (decl) == VAR_DECL)
874 && DECL_BY_REFERENCE (decl))
875 DECL_BY_REFERENCE (new_decl) = 1;
877 SET_DECL_VALUE_EXPR (new_decl, x);
878 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
881 DECL_CHAIN (new_decl) = info->debug_var_chain;
882 info->debug_var_chain = new_decl;
885 && info->context != target_context
886 && variably_modified_type_p (TREE_TYPE (decl), NULL))
887 note_nonlocal_vla_type (info, TREE_TYPE (decl));
893 /* Callback for walk_gimple_stmt, rewrite all references to VAR
894 and PARM_DECLs that belong to outer functions.
896 The rewrite will involve some number of structure accesses back up
897 the static chain. E.g. for a variable FOO up one nesting level it'll
898 be CHAIN->FOO. For two levels it'll be CHAIN->__chain->FOO. Further
899 indirections apply to decls for which use_pointer_in_frame is true. */
902 convert_nonlocal_reference_op (tree *tp, int *walk_subtrees, void *data)
904 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
905 struct nesting_info *const info = (struct nesting_info *) wi->info;
909 switch (TREE_CODE (t))
912 /* Non-automatic variables are never processed. */
913 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
918 if (decl_function_context (t) != info->context)
923 x = get_nonlocal_debug_decl (info, t);
924 if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
926 tree target_context = decl_function_context (t);
927 struct nesting_info *i;
928 for (i = info->outer; i->context != target_context; i = i->outer)
930 x = lookup_field_for_decl (i, t, INSERT);
931 x = get_frame_field (info, target_context, x, &wi->gsi);
932 if (use_pointer_in_frame (t))
934 x = init_tmp_var (info, x, &wi->gsi);
935 x = build_simple_mem_ref (x);
942 x = save_tmp_var (info, x, &wi->gsi);
944 x = init_tmp_var (info, x, &wi->gsi);
952 /* We're taking the address of a label from a parent function, but
953 this is not itself a non-local goto. Mark the label such that it
954 will not be deleted, much as we would with a label address in
956 if (decl_function_context (t) != info->context)
957 FORCED_LABEL (t) = 1;
962 bool save_val_only = wi->val_only;
964 wi->val_only = false;
967 walk_tree (&TREE_OPERAND (t, 0), convert_nonlocal_reference_op, wi, 0);
974 /* If we changed anything, we might no longer be directly
975 referencing a decl. */
976 save_context = current_function_decl;
977 current_function_decl = info->context;
978 recompute_tree_invariant_for_addr_expr (t);
979 current_function_decl = save_context;
981 /* If the callback converted the address argument in a context
982 where we only accept variables (and min_invariant, presumably),
983 then compute the address into a temporary. */
985 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
995 case ARRAY_RANGE_REF:
997 /* Go down this entire nest and just look at the final prefix and
998 anything that describes the references. Otherwise, we lose track
999 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1000 wi->val_only = true;
1002 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1004 if (TREE_CODE (t) == COMPONENT_REF)
1005 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op, wi,
1007 else if (TREE_CODE (t) == ARRAY_REF
1008 || TREE_CODE (t) == ARRAY_RANGE_REF)
1010 walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference_op,
1012 walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op,
1014 walk_tree (&TREE_OPERAND (t, 3), convert_nonlocal_reference_op,
1018 wi->val_only = false;
1019 walk_tree (tp, convert_nonlocal_reference_op, wi, NULL);
1022 case VIEW_CONVERT_EXPR:
1023 /* Just request to look at the subtrees, leaving val_only and lhs
1024 untouched. This might actually be for !val_only + lhs, in which
1025 case we don't want to force a replacement by a temporary. */
1030 if (!IS_TYPE_OR_DECL_P (t))
1033 wi->val_only = true;
1042 static tree convert_nonlocal_reference_stmt (gimple_stmt_iterator *, bool *,
1043 struct walk_stmt_info *);
1045 /* Helper for convert_nonlocal_references, rewrite all references to VAR
1046 and PARM_DECLs that belong to outer functions. */
1049 convert_nonlocal_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1051 struct nesting_info *const info = (struct nesting_info *) wi->info;
1052 bool need_chain = false, need_stmts = false;
1055 bitmap new_suppress;
1057 new_suppress = BITMAP_GGC_ALLOC ();
1058 bitmap_copy (new_suppress, info->suppress_expansion);
1060 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1062 switch (OMP_CLAUSE_CODE (clause))
1064 case OMP_CLAUSE_REDUCTION:
1065 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1067 goto do_decl_clause;
1069 case OMP_CLAUSE_LASTPRIVATE:
1070 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1072 goto do_decl_clause;
1074 case OMP_CLAUSE_PRIVATE:
1075 case OMP_CLAUSE_FIRSTPRIVATE:
1076 case OMP_CLAUSE_COPYPRIVATE:
1077 case OMP_CLAUSE_SHARED:
1079 decl = OMP_CLAUSE_DECL (clause);
1080 if (TREE_CODE (decl) == VAR_DECL
1081 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1083 if (decl_function_context (decl) != info->context)
1085 bitmap_set_bit (new_suppress, DECL_UID (decl));
1086 OMP_CLAUSE_DECL (clause) = get_nonlocal_debug_decl (info, decl);
1087 if (OMP_CLAUSE_CODE (clause) != OMP_CLAUSE_PRIVATE)
1092 case OMP_CLAUSE_SCHEDULE:
1093 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1096 case OMP_CLAUSE_FINAL:
1098 case OMP_CLAUSE_NUM_THREADS:
1099 wi->val_only = true;
1101 convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
1105 case OMP_CLAUSE_NOWAIT:
1106 case OMP_CLAUSE_ORDERED:
1107 case OMP_CLAUSE_DEFAULT:
1108 case OMP_CLAUSE_COPYIN:
1109 case OMP_CLAUSE_COLLAPSE:
1110 case OMP_CLAUSE_UNTIED:
1111 case OMP_CLAUSE_MERGEABLE:
1119 info->suppress_expansion = new_suppress;
1122 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1123 switch (OMP_CLAUSE_CODE (clause))
1125 case OMP_CLAUSE_REDUCTION:
1126 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1129 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1130 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1132 walk_body (convert_nonlocal_reference_stmt,
1133 convert_nonlocal_reference_op, info,
1134 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
1135 walk_body (convert_nonlocal_reference_stmt,
1136 convert_nonlocal_reference_op, info,
1137 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
1138 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1143 case OMP_CLAUSE_LASTPRIVATE:
1144 walk_body (convert_nonlocal_reference_stmt,
1145 convert_nonlocal_reference_op, info,
1146 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
1156 /* Create nonlocal debug decls for nonlocal VLA array bounds. */
1159 note_nonlocal_vla_type (struct nesting_info *info, tree type)
1161 while (POINTER_TYPE_P (type) && !TYPE_NAME (type))
1162 type = TREE_TYPE (type);
1164 if (TYPE_NAME (type)
1165 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1166 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
1167 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
1169 while (POINTER_TYPE_P (type)
1170 || TREE_CODE (type) == VECTOR_TYPE
1171 || TREE_CODE (type) == FUNCTION_TYPE
1172 || TREE_CODE (type) == METHOD_TYPE)
1173 type = TREE_TYPE (type);
1175 if (TREE_CODE (type) == ARRAY_TYPE)
1179 note_nonlocal_vla_type (info, TREE_TYPE (type));
1180 domain = TYPE_DOMAIN (type);
1183 t = TYPE_MIN_VALUE (domain);
1184 if (t && (TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
1185 && decl_function_context (t) != info->context)
1186 get_nonlocal_debug_decl (info, t);
1187 t = TYPE_MAX_VALUE (domain);
1188 if (t && (TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
1189 && decl_function_context (t) != info->context)
1190 get_nonlocal_debug_decl (info, t);
1195 /* Create nonlocal debug decls for nonlocal VLA array bounds for VLAs
1199 note_nonlocal_block_vlas (struct nesting_info *info, tree block)
1203 for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
1204 if (TREE_CODE (var) == VAR_DECL
1205 && variably_modified_type_p (TREE_TYPE (var), NULL)
1206 && DECL_HAS_VALUE_EXPR_P (var)
1207 && decl_function_context (var) != info->context)
1208 note_nonlocal_vla_type (info, TREE_TYPE (var));
1211 /* Callback for walk_gimple_stmt. Rewrite all references to VAR and
1212 PARM_DECLs that belong to outer functions. This handles statements
1213 that are not handled via the standard recursion done in
1214 walk_gimple_stmt. STMT is the statement to examine, DATA is as in
1215 convert_nonlocal_reference_op. Set *HANDLED_OPS_P to true if all the
1216 operands of STMT have been handled by this function. */
1219 convert_nonlocal_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1220 struct walk_stmt_info *wi)
1222 struct nesting_info *info = (struct nesting_info *) wi->info;
1223 tree save_local_var_chain;
1224 bitmap save_suppress;
1225 gimple stmt = gsi_stmt (*gsi);
1227 switch (gimple_code (stmt))
1230 /* Don't walk non-local gotos for now. */
1231 if (TREE_CODE (gimple_goto_dest (stmt)) != LABEL_DECL)
1233 wi->val_only = true;
1235 *handled_ops_p = true;
1240 case GIMPLE_OMP_PARALLEL:
1241 case GIMPLE_OMP_TASK:
1242 save_suppress = info->suppress_expansion;
1243 if (convert_nonlocal_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1247 decl = get_chain_decl (info);
1248 c = build_omp_clause (gimple_location (stmt),
1249 OMP_CLAUSE_FIRSTPRIVATE);
1250 OMP_CLAUSE_DECL (c) = decl;
1251 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1252 gimple_omp_taskreg_set_clauses (stmt, c);
1255 save_local_var_chain = info->new_local_var_chain;
1256 info->new_local_var_chain = NULL;
1258 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1259 info, gimple_omp_body_ptr (stmt));
1261 if (info->new_local_var_chain)
1262 declare_vars (info->new_local_var_chain,
1263 gimple_seq_first_stmt (gimple_omp_body (stmt)),
1265 info->new_local_var_chain = save_local_var_chain;
1266 info->suppress_expansion = save_suppress;
1269 case GIMPLE_OMP_FOR:
1270 save_suppress = info->suppress_expansion;
1271 convert_nonlocal_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1272 walk_gimple_omp_for (stmt, convert_nonlocal_reference_stmt,
1273 convert_nonlocal_reference_op, info);
1274 walk_body (convert_nonlocal_reference_stmt,
1275 convert_nonlocal_reference_op, info, gimple_omp_body_ptr (stmt));
1276 info->suppress_expansion = save_suppress;
1279 case GIMPLE_OMP_SECTIONS:
1280 save_suppress = info->suppress_expansion;
1281 convert_nonlocal_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1282 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1283 info, gimple_omp_body_ptr (stmt));
1284 info->suppress_expansion = save_suppress;
1287 case GIMPLE_OMP_SINGLE:
1288 save_suppress = info->suppress_expansion;
1289 convert_nonlocal_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1290 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1291 info, gimple_omp_body_ptr (stmt));
1292 info->suppress_expansion = save_suppress;
1295 case GIMPLE_OMP_TARGET:
1296 save_suppress = info->suppress_expansion;
1297 convert_nonlocal_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi);
1298 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1299 info, gimple_omp_body_ptr (stmt));
1300 info->suppress_expansion = save_suppress;
1303 case GIMPLE_OMP_TEAMS:
1304 save_suppress = info->suppress_expansion;
1305 convert_nonlocal_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
1306 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1307 info, gimple_omp_body_ptr (stmt));
1308 info->suppress_expansion = save_suppress;
1311 case GIMPLE_OMP_SECTION:
1312 case GIMPLE_OMP_MASTER:
1313 case GIMPLE_OMP_TASKGROUP:
1314 case GIMPLE_OMP_ORDERED:
1315 walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
1316 info, gimple_omp_body_ptr (stmt));
1320 if (!optimize && gimple_bind_block (stmt))
1321 note_nonlocal_block_vlas (info, gimple_bind_block (stmt));
1323 *handled_ops_p = false;
1327 wi->val_only = true;
1329 *handled_ops_p = false;
1333 /* For every other statement that we are not interested in
1334 handling here, let the walker traverse the operands. */
1335 *handled_ops_p = false;
1339 /* We have handled all of STMT operands, no need to traverse the operands. */
1340 *handled_ops_p = true;
1345 /* A subroutine of convert_local_reference. Create a local variable
1346 in the parent function with DECL_VALUE_EXPR set to reference the
1347 field in FRAME. This is used both for debug info and in OpenMP
1351 get_local_debug_decl (struct nesting_info *info, tree decl, tree field)
1356 slot = pointer_map_insert (info->var_map, decl);
1358 return (tree) *slot;
1360 /* Make sure frame_decl gets created. */
1361 (void) get_frame_type (info);
1362 x = info->frame_decl;
1363 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
1365 new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
1366 VAR_DECL, DECL_NAME (decl), TREE_TYPE (decl));
1367 DECL_CONTEXT (new_decl) = info->context;
1368 DECL_ARTIFICIAL (new_decl) = DECL_ARTIFICIAL (decl);
1369 DECL_IGNORED_P (new_decl) = DECL_IGNORED_P (decl);
1370 TREE_THIS_VOLATILE (new_decl) = TREE_THIS_VOLATILE (decl);
1371 TREE_SIDE_EFFECTS (new_decl) = TREE_SIDE_EFFECTS (decl);
1372 TREE_READONLY (new_decl) = TREE_READONLY (decl);
1373 TREE_ADDRESSABLE (new_decl) = TREE_ADDRESSABLE (decl);
1374 DECL_SEEN_IN_BIND_EXPR_P (new_decl) = 1;
1375 if ((TREE_CODE (decl) == PARM_DECL
1376 || TREE_CODE (decl) == RESULT_DECL
1377 || TREE_CODE (decl) == VAR_DECL)
1378 && DECL_BY_REFERENCE (decl))
1379 DECL_BY_REFERENCE (new_decl) = 1;
1381 SET_DECL_VALUE_EXPR (new_decl, x);
1382 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1385 DECL_CHAIN (new_decl) = info->debug_var_chain;
1386 info->debug_var_chain = new_decl;
1388 /* Do not emit debug info twice. */
1389 DECL_IGNORED_P (decl) = 1;
1395 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1396 and PARM_DECLs that were referenced by inner nested functions.
1397 The rewrite will be a structure reference to the local frame variable. */
1399 static bool convert_local_omp_clauses (tree *, struct walk_stmt_info *);
1402 convert_local_reference_op (tree *tp, int *walk_subtrees, void *data)
1404 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1405 struct nesting_info *const info = (struct nesting_info *) wi->info;
1406 tree t = *tp, field, x;
1410 switch (TREE_CODE (t))
1413 /* Non-automatic variables are never processed. */
1414 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
1419 if (decl_function_context (t) == info->context)
1421 /* If we copied a pointer to the frame, then the original decl
1422 is used unchanged in the parent function. */
1423 if (use_pointer_in_frame (t))
1426 /* No need to transform anything if no child references the
1428 field = lookup_field_for_decl (info, t, NO_INSERT);
1433 x = get_local_debug_decl (info, t, field);
1434 if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
1435 x = get_frame_field (info, info->context, field, &wi->gsi);
1440 x = save_tmp_var (info, x, &wi->gsi);
1442 x = init_tmp_var (info, x, &wi->gsi);
1450 save_val_only = wi->val_only;
1451 wi->val_only = false;
1453 wi->changed = false;
1454 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op, wi, NULL);
1455 wi->val_only = save_val_only;
1457 /* If we converted anything ... */
1462 /* Then the frame decl is now addressable. */
1463 TREE_ADDRESSABLE (info->frame_decl) = 1;
1465 save_context = current_function_decl;
1466 current_function_decl = info->context;
1467 recompute_tree_invariant_for_addr_expr (t);
1468 current_function_decl = save_context;
1470 /* If we are in a context where we only accept values, then
1471 compute the address into a temporary. */
1473 *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
1482 case ARRAY_RANGE_REF:
1484 /* Go down this entire nest and just look at the final prefix and
1485 anything that describes the references. Otherwise, we lose track
1486 of whether a NOP_EXPR or VIEW_CONVERT_EXPR needs a simple value. */
1487 save_val_only = wi->val_only;
1488 wi->val_only = true;
1490 for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
1492 if (TREE_CODE (t) == COMPONENT_REF)
1493 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1495 else if (TREE_CODE (t) == ARRAY_REF
1496 || TREE_CODE (t) == ARRAY_RANGE_REF)
1498 walk_tree (&TREE_OPERAND (t, 1), convert_local_reference_op, wi,
1500 walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
1502 walk_tree (&TREE_OPERAND (t, 3), convert_local_reference_op, wi,
1506 wi->val_only = false;
1507 walk_tree (tp, convert_local_reference_op, wi, NULL);
1508 wi->val_only = save_val_only;
1512 save_val_only = wi->val_only;
1513 wi->val_only = true;
1515 walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op,
1517 /* We need to re-fold the MEM_REF as component references as
1518 part of a ADDR_EXPR address are not allowed. But we cannot
1519 fold here, as the chain record type is not yet finalized. */
1520 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
1521 && !DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
1522 pointer_set_insert (info->mem_refs, tp);
1523 wi->val_only = save_val_only;
1526 case VIEW_CONVERT_EXPR:
1527 /* Just request to look at the subtrees, leaving val_only and lhs
1528 untouched. This might actually be for !val_only + lhs, in which
1529 case we don't want to force a replacement by a temporary. */
1534 if (!IS_TYPE_OR_DECL_P (t))
1537 wi->val_only = true;
1546 static tree convert_local_reference_stmt (gimple_stmt_iterator *, bool *,
1547 struct walk_stmt_info *);
1549 /* Helper for convert_local_reference. Convert all the references in
1550 the chain of clauses at *PCLAUSES. WI is as in convert_local_reference. */
1553 convert_local_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
1555 struct nesting_info *const info = (struct nesting_info *) wi->info;
1556 bool need_frame = false, need_stmts = false;
1559 bitmap new_suppress;
1561 new_suppress = BITMAP_GGC_ALLOC ();
1562 bitmap_copy (new_suppress, info->suppress_expansion);
1564 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1566 switch (OMP_CLAUSE_CODE (clause))
1568 case OMP_CLAUSE_REDUCTION:
1569 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1571 goto do_decl_clause;
1573 case OMP_CLAUSE_LASTPRIVATE:
1574 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
1576 goto do_decl_clause;
1578 case OMP_CLAUSE_PRIVATE:
1579 case OMP_CLAUSE_FIRSTPRIVATE:
1580 case OMP_CLAUSE_COPYPRIVATE:
1581 case OMP_CLAUSE_SHARED:
1583 decl = OMP_CLAUSE_DECL (clause);
1584 if (TREE_CODE (decl) == VAR_DECL
1585 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
1587 if (decl_function_context (decl) == info->context
1588 && !use_pointer_in_frame (decl))
1590 tree field = lookup_field_for_decl (info, decl, NO_INSERT);
1593 bitmap_set_bit (new_suppress, DECL_UID (decl));
1594 OMP_CLAUSE_DECL (clause)
1595 = get_local_debug_decl (info, decl, field);
1601 case OMP_CLAUSE_SCHEDULE:
1602 if (OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (clause) == NULL)
1605 case OMP_CLAUSE_FINAL:
1607 case OMP_CLAUSE_NUM_THREADS:
1608 wi->val_only = true;
1610 convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0), &dummy,
1614 case OMP_CLAUSE_NOWAIT:
1615 case OMP_CLAUSE_ORDERED:
1616 case OMP_CLAUSE_DEFAULT:
1617 case OMP_CLAUSE_COPYIN:
1618 case OMP_CLAUSE_COLLAPSE:
1619 case OMP_CLAUSE_UNTIED:
1620 case OMP_CLAUSE_MERGEABLE:
1628 info->suppress_expansion = new_suppress;
1631 for (clause = *pclauses; clause ; clause = OMP_CLAUSE_CHAIN (clause))
1632 switch (OMP_CLAUSE_CODE (clause))
1634 case OMP_CLAUSE_REDUCTION:
1635 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1638 = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
1639 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1641 walk_body (convert_local_reference_stmt,
1642 convert_local_reference_op, info,
1643 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
1644 walk_body (convert_local_reference_stmt,
1645 convert_local_reference_op, info,
1646 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
1647 DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
1652 case OMP_CLAUSE_LASTPRIVATE:
1653 walk_body (convert_local_reference_stmt,
1654 convert_local_reference_op, info,
1655 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
1666 /* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
1667 and PARM_DECLs that were referenced by inner nested functions.
1668 The rewrite will be a structure reference to the local frame variable. */
1671 convert_local_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1672 struct walk_stmt_info *wi)
1674 struct nesting_info *info = (struct nesting_info *) wi->info;
1675 tree save_local_var_chain;
1676 bitmap save_suppress;
1677 gimple stmt = gsi_stmt (*gsi);
1679 switch (gimple_code (stmt))
1681 case GIMPLE_OMP_PARALLEL:
1682 case GIMPLE_OMP_TASK:
1683 save_suppress = info->suppress_expansion;
1684 if (convert_local_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
1688 (void) get_frame_type (info);
1689 c = build_omp_clause (gimple_location (stmt),
1691 OMP_CLAUSE_DECL (c) = info->frame_decl;
1692 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1693 gimple_omp_taskreg_set_clauses (stmt, c);
1696 save_local_var_chain = info->new_local_var_chain;
1697 info->new_local_var_chain = NULL;
1699 walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
1700 gimple_omp_body_ptr (stmt));
1702 if (info->new_local_var_chain)
1703 declare_vars (info->new_local_var_chain,
1704 gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
1705 info->new_local_var_chain = save_local_var_chain;
1706 info->suppress_expansion = save_suppress;
1709 case GIMPLE_OMP_FOR:
1710 save_suppress = info->suppress_expansion;
1711 convert_local_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
1712 walk_gimple_omp_for (stmt, convert_local_reference_stmt,
1713 convert_local_reference_op, info);
1714 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1715 info, gimple_omp_body_ptr (stmt));
1716 info->suppress_expansion = save_suppress;
1719 case GIMPLE_OMP_SECTIONS:
1720 save_suppress = info->suppress_expansion;
1721 convert_local_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
1722 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1723 info, gimple_omp_body_ptr (stmt));
1724 info->suppress_expansion = save_suppress;
1727 case GIMPLE_OMP_SINGLE:
1728 save_suppress = info->suppress_expansion;
1729 convert_local_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
1730 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1731 info, gimple_omp_body_ptr (stmt));
1732 info->suppress_expansion = save_suppress;
1735 case GIMPLE_OMP_TARGET:
1736 save_suppress = info->suppress_expansion;
1737 convert_local_omp_clauses (gimple_omp_target_clauses_ptr (stmt), wi);
1738 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1739 info, gimple_omp_body_ptr (stmt));
1740 info->suppress_expansion = save_suppress;
1743 case GIMPLE_OMP_TEAMS:
1744 save_suppress = info->suppress_expansion;
1745 convert_local_omp_clauses (gimple_omp_teams_clauses_ptr (stmt), wi);
1746 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1747 info, gimple_omp_body_ptr (stmt));
1748 info->suppress_expansion = save_suppress;
1751 case GIMPLE_OMP_SECTION:
1752 case GIMPLE_OMP_MASTER:
1753 case GIMPLE_OMP_TASKGROUP:
1754 case GIMPLE_OMP_ORDERED:
1755 walk_body (convert_local_reference_stmt, convert_local_reference_op,
1756 info, gimple_omp_body_ptr (stmt));
1760 wi->val_only = true;
1762 *handled_ops_p = false;
1766 if (gimple_clobber_p (stmt))
1768 tree lhs = gimple_assign_lhs (stmt);
1769 if (!use_pointer_in_frame (lhs)
1770 && lookup_field_for_decl (info, lhs, NO_INSERT))
1772 gsi_replace (gsi, gimple_build_nop (), true);
1776 *handled_ops_p = false;
1780 /* For every other statement that we are not interested in
1781 handling here, let the walker traverse the operands. */
1782 *handled_ops_p = false;
1786 /* Indicate that we have handled all the operands ourselves. */
1787 *handled_ops_p = true;
1792 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_GOTOs
1793 that reference labels from outer functions. The rewrite will be a
1794 call to __builtin_nonlocal_goto. */
1797 convert_nl_goto_reference (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1798 struct walk_stmt_info *wi)
1800 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
1801 tree label, new_label, target_context, x, field;
1804 gimple stmt = gsi_stmt (*gsi);
1806 if (gimple_code (stmt) != GIMPLE_GOTO)
1808 *handled_ops_p = false;
1812 label = gimple_goto_dest (stmt);
1813 if (TREE_CODE (label) != LABEL_DECL)
1815 *handled_ops_p = false;
1819 target_context = decl_function_context (label);
1820 if (target_context == info->context)
1822 *handled_ops_p = false;
1826 for (i = info->outer; target_context != i->context; i = i->outer)
1829 /* The original user label may also be use for a normal goto, therefore
1830 we must create a new label that will actually receive the abnormal
1831 control transfer. This new label will be marked LABEL_NONLOCAL; this
1832 mark will trigger proper behavior in the cfg, as well as cause the
1833 (hairy target-specific) non-local goto receiver code to be generated
1834 when we expand rtl. Enter this association into var_map so that we
1835 can insert the new label into the IL during a second pass. */
1836 slot = pointer_map_insert (i->var_map, label);
1839 new_label = create_artificial_label (UNKNOWN_LOCATION);
1840 DECL_NONLOCAL (new_label) = 1;
1844 new_label = (tree) *slot;
1846 /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field). */
1847 field = get_nl_goto_field (i);
1848 x = get_frame_field (info, target_context, field, gsi);
1849 x = build_addr (x, target_context);
1850 x = gsi_gimplify_val (info, x, gsi);
1851 call = gimple_build_call (builtin_decl_implicit (BUILT_IN_NONLOCAL_GOTO),
1852 2, build_addr (new_label, target_context), x);
1853 gsi_replace (gsi, call, false);
1855 /* We have handled all of STMT's operands, no need to keep going. */
1856 *handled_ops_p = true;
1861 /* Called via walk_function+walk_tree, rewrite all GIMPLE_LABELs whose labels
1862 are referenced via nonlocal goto from a nested function. The rewrite
1863 will involve installing a newly generated DECL_NONLOCAL label, and
1864 (potentially) a branch around the rtl gunk that is assumed to be
1865 attached to such a label. */
1868 convert_nl_goto_receiver (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1869 struct walk_stmt_info *wi)
1871 struct nesting_info *const info = (struct nesting_info *) wi->info;
1872 tree label, new_label;
1873 gimple_stmt_iterator tmp_gsi;
1875 gimple stmt = gsi_stmt (*gsi);
1877 if (gimple_code (stmt) != GIMPLE_LABEL)
1879 *handled_ops_p = false;
1883 label = gimple_label_label (stmt);
1885 slot = pointer_map_contains (info->var_map, label);
1888 *handled_ops_p = false;
1892 /* If there's any possibility that the previous statement falls through,
1893 then we must branch around the new non-local label. */
1895 gsi_prev (&tmp_gsi);
1896 if (gsi_end_p (tmp_gsi) || gimple_stmt_may_fallthru (gsi_stmt (tmp_gsi)))
1898 gimple stmt = gimple_build_goto (label);
1899 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1902 new_label = (tree) *slot;
1903 stmt = gimple_build_label (new_label);
1904 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1906 *handled_ops_p = true;
1911 /* Called via walk_function+walk_stmt, rewrite all references to addresses
1912 of nested functions that require the use of trampolines. The rewrite
1913 will involve a reference a trampoline generated for the occasion. */
1916 convert_tramp_reference_op (tree *tp, int *walk_subtrees, void *data)
1918 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1919 struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
1920 tree t = *tp, decl, target_context, x, builtin;
1924 switch (TREE_CODE (t))
1928 T.1 = &CHAIN->tramp;
1929 T.2 = __builtin_adjust_trampoline (T.1);
1930 T.3 = (func_type)T.2;
1933 decl = TREE_OPERAND (t, 0);
1934 if (TREE_CODE (decl) != FUNCTION_DECL)
1937 /* Only need to process nested functions. */
1938 target_context = decl_function_context (decl);
1939 if (!target_context)
1942 /* If the nested function doesn't use a static chain, then
1943 it doesn't need a trampoline. */
1944 if (!DECL_STATIC_CHAIN (decl))
1947 /* If we don't want a trampoline, then don't build one. */
1948 if (TREE_NO_TRAMPOLINE (t))
1951 /* Lookup the immediate parent of the callee, as that's where
1952 we need to insert the trampoline. */
1953 for (i = info; i->context != target_context; i = i->outer)
1955 x = lookup_tramp_for_decl (i, decl, INSERT);
1957 /* Compute the address of the field holding the trampoline. */
1958 x = get_frame_field (info, target_context, x, &wi->gsi);
1959 x = build_addr (x, target_context);
1960 x = gsi_gimplify_val (info, x, &wi->gsi);
1962 /* Do machine-specific ugliness. Normally this will involve
1963 computing extra alignment, but it can really be anything. */
1964 builtin = builtin_decl_implicit (BUILT_IN_ADJUST_TRAMPOLINE);
1965 call = gimple_build_call (builtin, 1, x);
1966 x = init_tmp_var_with_call (info, &wi->gsi, call);
1968 /* Cast back to the proper function type. */
1969 x = build1 (NOP_EXPR, TREE_TYPE (t), x);
1970 x = init_tmp_var (info, x, &wi->gsi);
1976 if (!IS_TYPE_OR_DECL_P (t))
1985 /* Called via walk_function+walk_gimple_stmt, rewrite all references
1986 to addresses of nested functions that require the use of
1987 trampolines. The rewrite will involve a reference a trampoline
1988 generated for the occasion. */
1991 convert_tramp_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1992 struct walk_stmt_info *wi)
1994 struct nesting_info *info = (struct nesting_info *) wi->info;
1995 gimple stmt = gsi_stmt (*gsi);
1997 switch (gimple_code (stmt))
2001 /* Only walk call arguments, lest we generate trampolines for
2003 unsigned long i, nargs = gimple_call_num_args (stmt);
2004 for (i = 0; i < nargs; i++)
2005 walk_tree (gimple_call_arg_ptr (stmt, i), convert_tramp_reference_op,
2010 case GIMPLE_OMP_PARALLEL:
2011 case GIMPLE_OMP_TASK:
2013 tree save_local_var_chain;
2014 walk_gimple_op (stmt, convert_tramp_reference_op, wi);
2015 save_local_var_chain = info->new_local_var_chain;
2016 info->new_local_var_chain = NULL;
2017 walk_body (convert_tramp_reference_stmt, convert_tramp_reference_op,
2018 info, gimple_omp_body_ptr (stmt));
2019 if (info->new_local_var_chain)
2020 declare_vars (info->new_local_var_chain,
2021 gimple_seq_first_stmt (gimple_omp_body (stmt)),
2023 info->new_local_var_chain = save_local_var_chain;
2028 *handled_ops_p = false;
2033 *handled_ops_p = true;
2039 /* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_CALLs
2040 that reference nested functions to make sure that the static chain
2041 is set up properly for the call. */
2044 convert_gimple_call (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2045 struct walk_stmt_info *wi)
2047 struct nesting_info *const info = (struct nesting_info *) wi->info;
2048 tree decl, target_context;
2049 char save_static_chain_added;
2051 gimple stmt = gsi_stmt (*gsi);
2053 switch (gimple_code (stmt))
2056 if (gimple_call_chain (stmt))
2058 decl = gimple_call_fndecl (stmt);
2061 target_context = decl_function_context (decl);
2062 if (target_context && DECL_STATIC_CHAIN (decl))
2064 gimple_call_set_chain (stmt, get_static_chain (info, target_context,
2066 info->static_chain_added |= (1 << (info->context != target_context));
2070 case GIMPLE_OMP_PARALLEL:
2071 case GIMPLE_OMP_TASK:
2072 save_static_chain_added = info->static_chain_added;
2073 info->static_chain_added = 0;
2074 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2075 for (i = 0; i < 2; i++)
2078 if ((info->static_chain_added & (1 << i)) == 0)
2080 decl = i ? get_chain_decl (info) : info->frame_decl;
2081 /* Don't add CHAIN.* or FRAME.* twice. */
2082 for (c = gimple_omp_taskreg_clauses (stmt);
2084 c = OMP_CLAUSE_CHAIN (c))
2085 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
2086 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
2087 && OMP_CLAUSE_DECL (c) == decl)
2091 c = build_omp_clause (gimple_location (stmt),
2092 i ? OMP_CLAUSE_FIRSTPRIVATE
2093 : OMP_CLAUSE_SHARED);
2094 OMP_CLAUSE_DECL (c) = decl;
2095 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2096 gimple_omp_taskreg_set_clauses (stmt, c);
2099 info->static_chain_added |= save_static_chain_added;
2102 case GIMPLE_OMP_FOR:
2103 walk_body (convert_gimple_call, NULL, info,
2104 gimple_omp_for_pre_body_ptr (stmt));
2106 case GIMPLE_OMP_SECTIONS:
2107 case GIMPLE_OMP_SECTION:
2108 case GIMPLE_OMP_SINGLE:
2109 case GIMPLE_OMP_TARGET:
2110 case GIMPLE_OMP_TEAMS:
2111 case GIMPLE_OMP_MASTER:
2112 case GIMPLE_OMP_TASKGROUP:
2113 case GIMPLE_OMP_ORDERED:
2114 case GIMPLE_OMP_CRITICAL:
2115 walk_body (convert_gimple_call, NULL, info, gimple_omp_body_ptr (stmt));
2119 /* Keep looking for other operands. */
2120 *handled_ops_p = false;
2124 *handled_ops_p = true;
2128 /* Walk the nesting tree starting with ROOT. Convert all trampolines and
2129 call expressions. At the same time, determine if a nested function
2130 actually uses its static chain; if not, remember that. */
2133 convert_all_function_calls (struct nesting_info *root)
2135 unsigned int chain_count = 0, old_chain_count, iter_count;
2136 struct nesting_info *n;
2138 /* First, optimistically clear static_chain for all decls that haven't
2139 used the static chain already for variable access. */
2140 FOR_EACH_NEST_INFO (n, root)
2142 tree decl = n->context;
2143 if (!n->outer || (!n->chain_decl && !n->chain_field))
2145 DECL_STATIC_CHAIN (decl) = 0;
2146 if (dump_file && (dump_flags & TDF_DETAILS))
2147 fprintf (dump_file, "Guessing no static-chain for %s\n",
2148 lang_hooks.decl_printable_name (decl, 2));
2151 DECL_STATIC_CHAIN (decl) = 1;
2152 chain_count += DECL_STATIC_CHAIN (decl);
2155 /* Walk the functions and perform transformations. Note that these
2156 transformations can induce new uses of the static chain, which in turn
2157 require re-examining all users of the decl. */
2158 /* ??? It would make sense to try to use the call graph to speed this up,
2159 but the call graph hasn't really been built yet. Even if it did, we
2160 would still need to iterate in this loop since address-of references
2161 wouldn't show up in the callgraph anyway. */
2165 old_chain_count = chain_count;
2169 if (dump_file && (dump_flags & TDF_DETAILS))
2170 fputc ('\n', dump_file);
2172 FOR_EACH_NEST_INFO (n, root)
2174 tree decl = n->context;
2175 walk_function (convert_tramp_reference_stmt,
2176 convert_tramp_reference_op, n);
2177 walk_function (convert_gimple_call, NULL, n);
2178 chain_count += DECL_STATIC_CHAIN (decl);
2181 while (chain_count != old_chain_count);
2183 if (dump_file && (dump_flags & TDF_DETAILS))
2184 fprintf (dump_file, "convert_all_function_calls iterations: %u\n\n",
2188 struct nesting_copy_body_data
2191 struct nesting_info *root;
2194 /* A helper subroutine for debug_var_chain type remapping. */
2197 nesting_copy_decl (tree decl, copy_body_data *id)
2199 struct nesting_copy_body_data *nid = (struct nesting_copy_body_data *) id;
2200 void **slot = pointer_map_contains (nid->root->var_map, decl);
2203 return (tree) *slot;
2205 if (TREE_CODE (decl) == TYPE_DECL && DECL_ORIGINAL_TYPE (decl))
2207 tree new_decl = copy_decl_no_change (decl, id);
2208 DECL_ORIGINAL_TYPE (new_decl)
2209 = remap_type (DECL_ORIGINAL_TYPE (decl), id);
2213 if (TREE_CODE (decl) == VAR_DECL
2214 || TREE_CODE (decl) == PARM_DECL
2215 || TREE_CODE (decl) == RESULT_DECL)
2218 return copy_decl_no_change (decl, id);
2221 /* A helper function for remap_vla_decls. See if *TP contains
2222 some remapped variables. */
2225 contains_remapped_vars (tree *tp, int *walk_subtrees, void *data)
2227 struct nesting_info *root = (struct nesting_info *) data;
2234 slot = pointer_map_contains (root->var_map, t);
2237 return (tree) *slot;
2242 /* Remap VLA decls in BLOCK and subblocks if remapped variables are
2246 remap_vla_decls (tree block, struct nesting_info *root)
2248 tree var, subblock, val, type;
2249 struct nesting_copy_body_data id;
2251 for (subblock = BLOCK_SUBBLOCKS (block);
2253 subblock = BLOCK_CHAIN (subblock))
2254 remap_vla_decls (subblock, root);
2256 for (var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
2257 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
2259 val = DECL_VALUE_EXPR (var);
2260 type = TREE_TYPE (var);
2262 if (!(TREE_CODE (val) == INDIRECT_REF
2263 && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
2264 && variably_modified_type_p (type, NULL)))
2267 if (pointer_map_contains (root->var_map, TREE_OPERAND (val, 0))
2268 || walk_tree (&type, contains_remapped_vars, root, NULL))
2272 if (var == NULL_TREE)
2275 memset (&id, 0, sizeof (id));
2276 id.cb.copy_decl = nesting_copy_decl;
2277 id.cb.decl_map = pointer_map_create ();
2280 for (; var; var = DECL_CHAIN (var))
2281 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
2283 struct nesting_info *i;
2287 val = DECL_VALUE_EXPR (var);
2288 type = TREE_TYPE (var);
2290 if (!(TREE_CODE (val) == INDIRECT_REF
2291 && TREE_CODE (TREE_OPERAND (val, 0)) == VAR_DECL
2292 && variably_modified_type_p (type, NULL)))
2295 slot = pointer_map_contains (root->var_map, TREE_OPERAND (val, 0));
2296 if (!slot && !walk_tree (&type, contains_remapped_vars, root, NULL))
2299 context = decl_function_context (var);
2300 for (i = root; i; i = i->outer)
2301 if (i->context == context)
2307 /* Fully expand value expressions. This avoids having debug variables
2308 only referenced from them and that can be swept during GC. */
2311 tree t = (tree) *slot;
2312 gcc_assert (DECL_P (t) && DECL_HAS_VALUE_EXPR_P (t));
2313 val = build1 (INDIRECT_REF, TREE_TYPE (val), DECL_VALUE_EXPR (t));
2316 id.cb.src_fn = i->context;
2317 id.cb.dst_fn = i->context;
2318 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
2320 TREE_TYPE (var) = newt = remap_type (type, &id.cb);
2321 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
2323 newt = TREE_TYPE (newt);
2324 type = TREE_TYPE (type);
2326 if (TYPE_NAME (newt)
2327 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
2328 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
2330 && TYPE_NAME (newt) == TYPE_NAME (type))
2331 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
2333 walk_tree (&val, copy_tree_body_r, &id.cb, NULL);
2334 if (val != DECL_VALUE_EXPR (var))
2335 SET_DECL_VALUE_EXPR (var, val);
2338 pointer_map_destroy (id.cb.decl_map);
2341 /* Fold the MEM_REF *E. */
2343 fold_mem_refs (const void *e, void *data ATTRIBUTE_UNUSED)
2345 tree *ref_p = CONST_CAST2 (tree *, const tree *, (const tree *)e);
2346 *ref_p = fold (*ref_p);
2350 /* Do "everything else" to clean up or complete state collected by the
2351 various walking passes -- lay out the types and decls, generate code
2352 to initialize the frame decl, store critical expressions in the
2353 struct function for rtl to find. */
2356 finalize_nesting_tree_1 (struct nesting_info *root)
2358 gimple_seq stmt_list;
2360 tree context = root->context;
2361 struct function *sf;
2365 /* If we created a non-local frame type or decl, we need to lay them
2366 out at this time. */
2367 if (root->frame_type)
2369 /* In some cases the frame type will trigger the -Wpadded warning.
2370 This is not helpful; suppress it. */
2371 int save_warn_padded = warn_padded;
2375 layout_type (root->frame_type);
2376 warn_padded = save_warn_padded;
2377 layout_decl (root->frame_decl, 0);
2379 /* Remove root->frame_decl from root->new_local_var_chain, so
2380 that we can declare it also in the lexical blocks, which
2381 helps ensure virtual regs that end up appearing in its RTL
2382 expression get substituted in instantiate_virtual_regs(). */
2383 for (adjust = &root->new_local_var_chain;
2384 *adjust != root->frame_decl;
2385 adjust = &DECL_CHAIN (*adjust))
2386 gcc_assert (DECL_CHAIN (*adjust));
2387 *adjust = DECL_CHAIN (*adjust);
2389 DECL_CHAIN (root->frame_decl) = NULL_TREE;
2390 declare_vars (root->frame_decl,
2391 gimple_seq_first_stmt (gimple_body (context)), true);
2394 /* If any parameters were referenced non-locally, then we need to
2395 insert a copy. Likewise, if any variables were referenced by
2396 pointer, we need to initialize the address. */
2397 if (root->any_parm_remapped)
2400 for (p = DECL_ARGUMENTS (context); p ; p = DECL_CHAIN (p))
2404 field = lookup_field_for_decl (root, p, NO_INSERT);
2408 if (use_pointer_in_frame (p))
2409 x = build_addr (p, context);
2413 y = build3 (COMPONENT_REF, TREE_TYPE (field),
2414 root->frame_decl, field, NULL_TREE);
2415 stmt = gimple_build_assign (y, x);
2416 gimple_seq_add_stmt (&stmt_list, stmt);
2417 /* If the assignment is from a non-register the stmt is
2418 not valid gimple. Make it so by using a temporary instead. */
2419 if (!is_gimple_reg (x)
2420 && is_gimple_reg_type (TREE_TYPE (x)))
2422 gimple_stmt_iterator gsi = gsi_last (stmt_list);
2423 x = init_tmp_var (root, x, &gsi);
2424 gimple_assign_set_rhs1 (stmt, x);
2429 /* If a chain_field was created, then it needs to be initialized
2431 if (root->chain_field)
2433 tree x = build3 (COMPONENT_REF, TREE_TYPE (root->chain_field),
2434 root->frame_decl, root->chain_field, NULL_TREE);
2435 stmt = gimple_build_assign (x, get_chain_decl (root));
2436 gimple_seq_add_stmt (&stmt_list, stmt);
2439 /* If trampolines were created, then we need to initialize them. */
2440 if (root->any_tramp_created)
2442 struct nesting_info *i;
2443 for (i = root->inner; i ; i = i->next)
2445 tree arg1, arg2, arg3, x, field;
2447 field = lookup_tramp_for_decl (root, i->context, NO_INSERT);
2451 gcc_assert (DECL_STATIC_CHAIN (i->context));
2452 arg3 = build_addr (root->frame_decl, context);
2454 arg2 = build_addr (i->context, context);
2456 x = build3 (COMPONENT_REF, TREE_TYPE (field),
2457 root->frame_decl, field, NULL_TREE);
2458 arg1 = build_addr (x, context);
2460 x = builtin_decl_implicit (BUILT_IN_INIT_TRAMPOLINE);
2461 stmt = gimple_build_call (x, 3, arg1, arg2, arg3);
2462 gimple_seq_add_stmt (&stmt_list, stmt);
2466 /* If we created initialization statements, insert them. */
2470 annotate_all_with_location (stmt_list, DECL_SOURCE_LOCATION (context));
2471 bind = gimple_seq_first_stmt (gimple_body (context));
2472 gimple_seq_add_seq (&stmt_list, gimple_bind_body (bind));
2473 gimple_bind_set_body (bind, stmt_list);
2476 /* If a chain_decl was created, then it needs to be registered with
2477 struct function so that it gets initialized from the static chain
2478 register at the beginning of the function. */
2479 sf = DECL_STRUCT_FUNCTION (root->context);
2480 sf->static_chain_decl = root->chain_decl;
2482 /* Similarly for the non-local goto save area. */
2483 if (root->nl_goto_field)
2485 sf->nonlocal_goto_save_area
2486 = get_frame_field (root, context, root->nl_goto_field, NULL);
2487 sf->has_nonlocal_label = 1;
2490 /* Make sure all new local variables get inserted into the
2491 proper BIND_EXPR. */
2492 if (root->new_local_var_chain)
2493 declare_vars (root->new_local_var_chain,
2494 gimple_seq_first_stmt (gimple_body (root->context)),
2497 if (root->debug_var_chain)
2502 remap_vla_decls (DECL_INITIAL (root->context), root);
2504 for (debug_var = root->debug_var_chain; debug_var;
2505 debug_var = DECL_CHAIN (debug_var))
2506 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
2509 /* If there are any debug decls with variable length types,
2510 remap those types using other debug_var_chain variables. */
2513 struct nesting_copy_body_data id;
2515 memset (&id, 0, sizeof (id));
2516 id.cb.copy_decl = nesting_copy_decl;
2517 id.cb.decl_map = pointer_map_create ();
2520 for (; debug_var; debug_var = DECL_CHAIN (debug_var))
2521 if (variably_modified_type_p (TREE_TYPE (debug_var), NULL))
2523 tree type = TREE_TYPE (debug_var);
2524 tree newt, t = type;
2525 struct nesting_info *i;
2527 for (i = root; i; i = i->outer)
2528 if (variably_modified_type_p (type, i->context))
2534 id.cb.src_fn = i->context;
2535 id.cb.dst_fn = i->context;
2536 id.cb.src_cfun = DECL_STRUCT_FUNCTION (root->context);
2538 TREE_TYPE (debug_var) = newt = remap_type (type, &id.cb);
2539 while (POINTER_TYPE_P (newt) && !TYPE_NAME (newt))
2541 newt = TREE_TYPE (newt);
2544 if (TYPE_NAME (newt)
2545 && TREE_CODE (TYPE_NAME (newt)) == TYPE_DECL
2546 && DECL_ORIGINAL_TYPE (TYPE_NAME (newt))
2548 && TYPE_NAME (newt) == TYPE_NAME (t))
2549 TYPE_NAME (newt) = remap_decl (TYPE_NAME (newt), &id.cb);
2552 pointer_map_destroy (id.cb.decl_map);
2555 scope = gimple_seq_first_stmt (gimple_body (root->context));
2556 if (gimple_bind_block (scope))
2557 declare_vars (root->debug_var_chain, scope, true);
2559 BLOCK_VARS (DECL_INITIAL (root->context))
2560 = chainon (BLOCK_VARS (DECL_INITIAL (root->context)),
2561 root->debug_var_chain);
2564 /* Fold the rewritten MEM_REF trees. */
2565 pointer_set_traverse (root->mem_refs, fold_mem_refs, NULL);
2567 /* Dump the translated tree function. */
2570 fputs ("\n\n", dump_file);
2571 dump_function_to_file (root->context, dump_file, dump_flags);
2576 finalize_nesting_tree (struct nesting_info *root)
2578 struct nesting_info *n;
2579 FOR_EACH_NEST_INFO (n, root)
2580 finalize_nesting_tree_1 (n);
2583 /* Unnest the nodes and pass them to cgraph. */
2586 unnest_nesting_tree_1 (struct nesting_info *root)
2588 struct cgraph_node *node = cgraph_get_node (root->context);
2590 /* For nested functions update the cgraph to reflect unnesting.
2591 We also delay finalizing of these functions up to this point. */
2594 cgraph_unnest_node (node);
2595 cgraph_finalize_function (root->context, true);
2600 unnest_nesting_tree (struct nesting_info *root)
2602 struct nesting_info *n;
2603 FOR_EACH_NEST_INFO (n, root)
2604 unnest_nesting_tree_1 (n);
2607 /* Free the data structures allocated during this pass. */
2610 free_nesting_tree (struct nesting_info *root)
2612 struct nesting_info *node, *next;
2614 node = iter_nestinfo_start (root);
2617 next = iter_nestinfo_next (node);
2618 pointer_map_destroy (node->var_map);
2619 pointer_map_destroy (node->field_map);
2620 pointer_set_destroy (node->mem_refs);
2627 /* Gimplify a function and all its nested functions. */
2629 gimplify_all_functions (struct cgraph_node *root)
2631 struct cgraph_node *iter;
2632 if (!gimple_body (root->symbol.decl))
2633 gimplify_function_tree (root->symbol.decl);
2634 for (iter = root->nested; iter; iter = iter->next_nested)
2635 gimplify_all_functions (iter);
2638 /* Main entry point for this pass. Process FNDECL and all of its nested
2639 subroutines and turn them into something less tightly bound. */
2642 lower_nested_functions (tree fndecl)
2644 struct cgraph_node *cgn;
2645 struct nesting_info *root;
2647 /* If there are no nested functions, there's nothing to do. */
2648 cgn = cgraph_get_node (fndecl);
2652 gimplify_all_functions (cgn);
2654 dump_file = dump_begin (TDI_nested, &dump_flags);
2656 fprintf (dump_file, "\n;; Function %s\n\n",
2657 lang_hooks.decl_printable_name (fndecl, 2));
2659 bitmap_obstack_initialize (&nesting_info_bitmap_obstack);
2660 root = create_nesting_tree (cgn);
2662 walk_all_functions (convert_nonlocal_reference_stmt,
2663 convert_nonlocal_reference_op,
2665 walk_all_functions (convert_local_reference_stmt,
2666 convert_local_reference_op,
2668 walk_all_functions (convert_nl_goto_reference, NULL, root);
2669 walk_all_functions (convert_nl_goto_receiver, NULL, root);
2671 convert_all_function_calls (root);
2672 finalize_nesting_tree (root);
2673 unnest_nesting_tree (root);
2675 free_nesting_tree (root);
2676 bitmap_obstack_release (&nesting_info_bitmap_obstack);
2680 dump_end (TDI_nested, dump_file);
2685 #include "gt-tree-nested.h"