Fix xfail for 32-bit hppa*-*-* in gcc.dg/pr84877.c
[official-gcc.git] / gcc / tree-nrv.cc
blob0a8f359790a82fd378e458135baa4fc79738fb0c
1 /* Language independent return value optimizations
2 Copyright (C) 2004-2024 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "tree.h"
25 #include "gimple.h"
26 #include "tree-pass.h"
27 #include "ssa.h"
28 #include "tree-pretty-print.h"
29 #include "gimple-iterator.h"
30 #include "gimple-walk.h"
31 #include "internal-fn.h"
33 /* This file implements return value optimizations for functions which
34 return aggregate types.
36 Basically this pass searches the function for return statements which
37 return a local aggregate. When converted to RTL such statements will
38 generate a copy from the local aggregate to final return value destination
39 mandated by the target's ABI.
41 That copy can often be avoided by directly constructing the return value
42 into the final destination mandated by the target's ABI.
44 This is basically a generic equivalent to the C++ front-end's
45 Named Return Value optimization. */
47 struct nrv_data_t
49 /* This is the temporary (a VAR_DECL) which appears in all of
50 this function's RETURN_EXPR statements. */
51 tree var;
53 /* This is the function's RESULT_DECL. We will replace all occurrences
54 of VAR with RESULT_DECL when we apply this optimization. */
55 tree result;
56 int modified;
59 static tree finalize_nrv_r (tree *, int *, void *);
61 /* Callback for the tree walker.
63 If TP refers to a RETURN_EXPR, then set the expression being returned
64 to nrv_data->result.
66 If TP refers to nrv_data->var, then replace nrv_data->var with
67 nrv_data->result.
69 If we reach a node where we know all the subtrees are uninteresting,
70 then set *WALK_SUBTREES to zero. */
72 static tree
73 finalize_nrv_r (tree *tp, int *walk_subtrees, void *data)
75 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
76 struct nrv_data_t *dp = (struct nrv_data_t *) wi->info;
78 /* No need to walk into types. */
79 if (TYPE_P (*tp))
80 *walk_subtrees = 0;
82 /* Otherwise replace all occurrences of VAR with RESULT. */
83 else if (*tp == dp->var)
85 *tp = dp->result;
86 dp->modified = 1;
89 /* Keep iterating. */
90 return NULL_TREE;
93 /* Main entry point for return value optimizations.
95 If this function always returns the same local variable, and that
96 local variable is an aggregate type, then replace the variable with
97 the function's DECL_RESULT.
99 This is the equivalent of the C++ named return value optimization
100 applied to optimized trees in a language independent form. If we
101 ever encounter languages which prevent this kind of optimization,
102 then we could either have the languages register the optimization or
103 we could change the gating function to check the current language. */
105 namespace {
107 const pass_data pass_data_nrv =
109 GIMPLE_PASS, /* type */
110 "nrv", /* name */
111 OPTGROUP_NONE, /* optinfo_flags */
112 TV_TREE_NRV, /* tv_id */
113 ( PROP_ssa | PROP_cfg ), /* properties_required */
114 0, /* properties_provided */
115 0, /* properties_destroyed */
116 0, /* todo_flags_start */
117 0, /* todo_flags_finish */
120 class pass_nrv : public gimple_opt_pass
122 public:
123 pass_nrv (gcc::context *ctxt)
124 : gimple_opt_pass (pass_data_nrv, ctxt)
127 /* opt_pass methods: */
128 bool gate (function *) final override { return optimize > 0; }
130 unsigned int execute (function *) final override;
132 }; // class pass_nrv
134 unsigned int
135 pass_nrv::execute (function *fun)
137 tree result = DECL_RESULT (current_function_decl);
138 tree result_type = TREE_TYPE (result);
139 tree found = NULL;
140 basic_block bb;
141 gimple_stmt_iterator gsi;
142 struct nrv_data_t data;
144 /* If this function does not return an aggregate type in memory, then
145 there is nothing to do. */
146 if (!aggregate_value_p (result, current_function_decl))
147 return 0;
149 /* If a GIMPLE type is returned in memory, finalize_nrv_r might create
150 non-GIMPLE. */
151 if (is_gimple_reg_type (result_type))
152 return 0;
154 /* If the front end already did something like this, don't do it here. */
155 if (DECL_NAME (result))
156 return 0;
158 /* If the result has its address taken then it might be modified
159 by means not detected in the following loop. Bail out in this
160 case. */
161 if (TREE_ADDRESSABLE (result))
162 return 0;
164 /* Look through each block for assignments to the RESULT_DECL. */
165 FOR_EACH_BB_FN (bb, fun)
167 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
169 gimple *stmt = gsi_stmt (gsi);
170 tree ret_val;
172 if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
174 /* In a function with an aggregate return value, the
175 gimplifier has changed all non-empty RETURN_EXPRs to
176 return the RESULT_DECL. */
177 ret_val = gimple_return_retval (return_stmt);
178 if (ret_val)
179 gcc_assert (ret_val == result);
181 else if (gimple_has_lhs (stmt)
182 && gimple_get_lhs (stmt) == result)
184 tree rhs;
186 if (!gimple_assign_copy_p (stmt))
187 return 0;
189 rhs = gimple_assign_rhs1 (stmt);
191 /* Now verify that this return statement uses the same value
192 as any previously encountered return statement. */
193 if (found != NULL)
195 /* If we found a return statement using a different variable
196 than previous return statements, then we cannot perform
197 NRV optimizations. */
198 if (found != rhs)
199 return 0;
201 else
202 found = rhs;
204 /* The returned value must be a local automatic variable of the
205 same type and alignment as the function's result. */
206 if (!VAR_P (found)
207 || TREE_THIS_VOLATILE (found)
208 || !auto_var_in_fn_p (found, current_function_decl)
209 || TREE_ADDRESSABLE (found)
210 || DECL_ALIGN (found) > DECL_ALIGN (result)
211 || !useless_type_conversion_p (result_type,
212 TREE_TYPE (found)))
213 return 0;
215 else if (gimple_has_lhs (stmt))
217 tree addr = get_base_address (gimple_get_lhs (stmt));
218 /* If there's any MODIFY of component of RESULT,
219 then bail out. */
220 if (addr && addr == result)
221 return 0;
226 if (!found)
227 return 0;
229 /* If dumping details, then note once and only the NRV replacement. */
230 if (dump_file && (dump_flags & TDF_DETAILS))
232 fprintf (dump_file, "NRV Replaced: ");
233 print_generic_expr (dump_file, found, dump_flags);
234 fprintf (dump_file, " with: ");
235 print_generic_expr (dump_file, result, dump_flags);
236 fprintf (dump_file, "\n");
239 TREE_ADDRESSABLE (result) |= TREE_ADDRESSABLE (found);
241 /* Now walk through the function changing all references to VAR to be
242 RESULT. */
243 data.var = found;
244 data.result = result;
245 FOR_EACH_BB_FN (bb, fun)
247 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
249 gimple *stmt = gsi_stmt (gsi);
250 /* If this is a copy from VAR to RESULT, remove it. */
251 if (gimple_assign_copy_p (stmt)
252 && gimple_assign_lhs (stmt) == result
253 && gimple_assign_rhs1 (stmt) == found)
255 unlink_stmt_vdef (stmt);
256 gsi_remove (&gsi, true);
257 release_defs (stmt);
259 else
261 struct walk_stmt_info wi;
262 memset (&wi, 0, sizeof (wi));
263 wi.info = &data;
264 data.modified = 0;
265 walk_gimple_op (stmt, finalize_nrv_r, &wi);
266 if (data.modified)
268 /* If this is a CLOBBER of VAR, remove it. */
269 if (gimple_clobber_p (stmt))
271 unlink_stmt_vdef (stmt);
272 gsi_remove (&gsi, true);
273 release_defs (stmt);
274 continue;
276 update_stmt (stmt);
278 gsi_next (&gsi);
283 SET_DECL_VALUE_EXPR (found, result);
284 DECL_HAS_VALUE_EXPR_P (found) = 1;
286 return 0;
289 } // anon namespace
291 gimple_opt_pass *
292 make_pass_nrv (gcc::context *ctxt)
294 return new pass_nrv (ctxt);
297 /* Determine (pessimistically) whether DEST is available for NRV
298 optimization, where DEST is expected to be the LHS of a modify
299 expression where the RHS is a function returning an aggregate.
301 DEST is available if it is not clobbered or used by the call. */
303 static bool
304 dest_safe_for_nrv_p (gcall *call)
306 tree dest = gimple_call_lhs (call);
308 dest = get_base_address (dest);
309 if (! dest)
310 return false;
312 if (TREE_CODE (dest) == SSA_NAME)
313 return true;
315 if (call_may_clobber_ref_p (call, dest, false)
316 || ref_maybe_used_by_stmt_p (call, dest, false))
317 return false;
319 return true;
322 /* Walk through the function looking for GIMPLE_ASSIGNs with calls that
323 return in memory on the RHS. For each of these, determine whether it is
324 safe to pass the address of the LHS as the return slot, and mark the
325 call appropriately if so.
327 The NRV shares the return slot with a local variable in the callee; this
328 optimization shares the return slot with the target of the call within
329 the caller. If the NRV is performed (which we can't know in general),
330 this optimization is safe if the address of the target has not
331 escaped prior to the call. If it has, modifications to the local
332 variable will produce visible changes elsewhere, as in PR c++/19317. */
334 namespace {
336 const pass_data pass_data_return_slot =
338 GIMPLE_PASS, /* type */
339 "retslot", /* name */
340 OPTGROUP_NONE, /* optinfo_flags */
341 TV_NONE, /* tv_id */
342 PROP_ssa, /* properties_required */
343 0, /* properties_provided */
344 0, /* properties_destroyed */
345 0, /* todo_flags_start */
346 0, /* todo_flags_finish */
349 class pass_return_slot : public gimple_opt_pass
351 public:
352 pass_return_slot (gcc::context *ctxt)
353 : gimple_opt_pass (pass_data_return_slot, ctxt)
356 /* opt_pass methods: */
357 unsigned int execute (function *) final override;
359 }; // class pass_return_slot
361 unsigned int
362 pass_return_slot::execute (function *fun)
364 basic_block bb;
366 FOR_EACH_BB_FN (bb, fun)
368 gimple_stmt_iterator gsi;
369 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
371 gcall *stmt;
372 bool slot_opt_p;
374 stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
375 if (stmt
376 && gimple_call_lhs (stmt)
377 && !gimple_call_return_slot_opt_p (stmt)
378 /* Ignore internal functions, those are expanded specially
379 and aggregate_value_p on their result might result in
380 undesirable warnings with some backends. */
381 && !gimple_call_internal_p (stmt)
382 && aggregate_value_p (TREE_TYPE (gimple_call_lhs (stmt)),
383 gimple_call_fndecl (stmt)))
385 /* Check if the location being assigned to is
386 clobbered by the call. */
387 slot_opt_p = dest_safe_for_nrv_p (stmt);
388 gimple_call_set_return_slot_opt (stmt, slot_opt_p);
392 return 0;
395 } // anon namespace
397 gimple_opt_pass *
398 make_pass_return_slot (gcc::context *ctxt)
400 return new pass_return_slot (ctxt);