1 /* Language independent return value optimizations
2 Copyright (C) 2004-2022 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
26 #include "tree-pass.h"
28 #include "tree-pretty-print.h"
29 #include "gimple-iterator.h"
30 #include "gimple-walk.h"
31 #include "internal-fn.h"
33 /* This file implements return value optimizations for functions which
34 return aggregate types.
36 Basically this pass searches the function for return statements which
37 return a local aggregate. When converted to RTL such statements will
38 generate a copy from the local aggregate to final return value destination
39 mandated by the target's ABI.
41 That copy can often be avoided by directly constructing the return value
42 into the final destination mandated by the target's ABI.
44 This is basically a generic equivalent to the C++ front-end's
45 Named Return Value optimization. */
49 /* This is the temporary (a VAR_DECL) which appears in all of
50 this function's RETURN_EXPR statements. */
53 /* This is the function's RESULT_DECL. We will replace all occurrences
54 of VAR with RESULT_DECL when we apply this optimization. */
59 static tree finalize_nrv_r (tree *, int *, void *);
61 /* Callback for the tree walker.
63 If TP refers to a RETURN_EXPR, then set the expression being returned
66 If TP refers to nrv_data->var, then replace nrv_data->var with
69 If we reach a node where we know all the subtrees are uninteresting,
70 then set *WALK_SUBTREES to zero. */
73 finalize_nrv_r (tree *tp, int *walk_subtrees, void *data)
75 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
76 struct nrv_data_t *dp = (struct nrv_data_t *) wi->info;
78 /* No need to walk into types. */
82 /* Otherwise replace all occurrences of VAR with RESULT. */
83 else if (*tp == dp->var)
93 /* Main entry point for return value optimizations.
95 If this function always returns the same local variable, and that
96 local variable is an aggregate type, then replace the variable with
97 the function's DECL_RESULT.
99 This is the equivalent of the C++ named return value optimization
100 applied to optimized trees in a language independent form. If we
101 ever encounter languages which prevent this kind of optimization,
102 then we could either have the languages register the optimization or
103 we could change the gating function to check the current language. */
107 const pass_data pass_data_nrv =
109 GIMPLE_PASS, /* type */
111 OPTGROUP_NONE, /* optinfo_flags */
112 TV_TREE_NRV, /* tv_id */
113 ( PROP_ssa | PROP_cfg ), /* properties_required */
114 0, /* properties_provided */
115 0, /* properties_destroyed */
116 0, /* todo_flags_start */
117 0, /* todo_flags_finish */
120 class pass_nrv : public gimple_opt_pass
123 pass_nrv (gcc::context *ctxt)
124 : gimple_opt_pass (pass_data_nrv, ctxt)
127 /* opt_pass methods: */
128 bool gate (function *) final override { return optimize > 0; }
130 unsigned int execute (function *) final override;
135 pass_nrv::execute (function *fun)
137 tree result = DECL_RESULT (current_function_decl);
138 tree result_type = TREE_TYPE (result);
141 gimple_stmt_iterator gsi;
142 struct nrv_data_t data;
144 /* If this function does not return an aggregate type in memory, then
145 there is nothing to do. */
146 if (!aggregate_value_p (result, current_function_decl))
149 /* If a GIMPLE type is returned in memory, finalize_nrv_r might create
151 if (is_gimple_reg_type (result_type))
154 /* If the front end already did something like this, don't do it here. */
155 if (DECL_NAME (result))
158 /* If the result has its address taken then it might be modified
159 by means not detected in the following loop. Bail out in this
161 if (TREE_ADDRESSABLE (result))
164 /* Look through each block for assignments to the RESULT_DECL. */
165 FOR_EACH_BB_FN (bb, fun)
167 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
169 gimple *stmt = gsi_stmt (gsi);
172 if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
174 /* In a function with an aggregate return value, the
175 gimplifier has changed all non-empty RETURN_EXPRs to
176 return the RESULT_DECL. */
177 ret_val = gimple_return_retval (return_stmt);
179 gcc_assert (ret_val == result);
181 else if (gimple_has_lhs (stmt)
182 && gimple_get_lhs (stmt) == result)
186 if (!gimple_assign_copy_p (stmt))
189 rhs = gimple_assign_rhs1 (stmt);
191 /* Now verify that this return statement uses the same value
192 as any previously encountered return statement. */
195 /* If we found a return statement using a different variable
196 than previous return statements, then we cannot perform
197 NRV optimizations. */
204 /* The returned value must be a local automatic variable of the
205 same type and alignment as the function's result. */
207 || TREE_THIS_VOLATILE (found)
208 || !auto_var_in_fn_p (found, current_function_decl)
209 || TREE_ADDRESSABLE (found)
210 || DECL_ALIGN (found) > DECL_ALIGN (result)
211 || !useless_type_conversion_p (result_type,
215 else if (gimple_has_lhs (stmt))
217 tree addr = get_base_address (gimple_get_lhs (stmt));
218 /* If there's any MODIFY of component of RESULT,
220 if (addr && addr == result)
229 /* If dumping details, then note once and only the NRV replacement. */
230 if (dump_file && (dump_flags & TDF_DETAILS))
232 fprintf (dump_file, "NRV Replaced: ");
233 print_generic_expr (dump_file, found, dump_flags);
234 fprintf (dump_file, " with: ");
235 print_generic_expr (dump_file, result, dump_flags);
236 fprintf (dump_file, "\n");
239 TREE_ADDRESSABLE (result) |= TREE_ADDRESSABLE (found);
241 /* Now walk through the function changing all references to VAR to be
244 data.result = result;
245 FOR_EACH_BB_FN (bb, fun)
247 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
249 gimple *stmt = gsi_stmt (gsi);
250 /* If this is a copy from VAR to RESULT, remove it. */
251 if (gimple_assign_copy_p (stmt)
252 && gimple_assign_lhs (stmt) == result
253 && gimple_assign_rhs1 (stmt) == found)
255 unlink_stmt_vdef (stmt);
256 gsi_remove (&gsi, true);
261 struct walk_stmt_info wi;
262 memset (&wi, 0, sizeof (wi));
265 walk_gimple_op (stmt, finalize_nrv_r, &wi);
273 SET_DECL_VALUE_EXPR (found, result);
274 DECL_HAS_VALUE_EXPR_P (found) = 1;
282 make_pass_nrv (gcc::context *ctxt)
284 return new pass_nrv (ctxt);
287 /* Determine (pessimistically) whether DEST is available for NRV
288 optimization, where DEST is expected to be the LHS of a modify
289 expression where the RHS is a function returning an aggregate.
291 DEST is available if it is not clobbered or used by the call. */
294 dest_safe_for_nrv_p (gcall *call)
296 tree dest = gimple_call_lhs (call);
298 dest = get_base_address (dest);
302 if (TREE_CODE (dest) == SSA_NAME)
305 if (call_may_clobber_ref_p (call, dest, false)
306 || ref_maybe_used_by_stmt_p (call, dest, false))
312 /* Walk through the function looking for GIMPLE_ASSIGNs with calls that
313 return in memory on the RHS. For each of these, determine whether it is
314 safe to pass the address of the LHS as the return slot, and mark the
315 call appropriately if so.
317 The NRV shares the return slot with a local variable in the callee; this
318 optimization shares the return slot with the target of the call within
319 the caller. If the NRV is performed (which we can't know in general),
320 this optimization is safe if the address of the target has not
321 escaped prior to the call. If it has, modifications to the local
322 variable will produce visible changes elsewhere, as in PR c++/19317. */
326 const pass_data pass_data_return_slot =
328 GIMPLE_PASS, /* type */
329 "retslot", /* name */
330 OPTGROUP_NONE, /* optinfo_flags */
332 PROP_ssa, /* properties_required */
333 0, /* properties_provided */
334 0, /* properties_destroyed */
335 0, /* todo_flags_start */
336 0, /* todo_flags_finish */
339 class pass_return_slot : public gimple_opt_pass
342 pass_return_slot (gcc::context *ctxt)
343 : gimple_opt_pass (pass_data_return_slot, ctxt)
346 /* opt_pass methods: */
347 unsigned int execute (function *) final override;
349 }; // class pass_return_slot
352 pass_return_slot::execute (function *fun)
356 FOR_EACH_BB_FN (bb, fun)
358 gimple_stmt_iterator gsi;
359 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
364 stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
366 && gimple_call_lhs (stmt)
367 && !gimple_call_return_slot_opt_p (stmt)
368 /* Ignore internal functions, those are expanded specially
369 and aggregate_value_p on their result might result in
370 undesirable warnings with some backends. */
371 && !gimple_call_internal_p (stmt)
372 && aggregate_value_p (TREE_TYPE (gimple_call_lhs (stmt)),
373 gimple_call_fndecl (stmt)))
375 /* Check if the location being assigned to is
376 clobbered by the call. */
377 slot_opt_p = dest_safe_for_nrv_p (stmt);
378 gimple_call_set_return_slot_opt (stmt, slot_opt_p);
388 make_pass_return_slot (gcc::context *ctxt)
390 return new pass_return_slot (ctxt);