1 /* GIMPLE lowering pass. Converts High GIMPLE into Low GIMPLE.
3 Copyright (C) 2003-2023 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "tree-pass.h"
28 #include "fold-const.h"
29 #include "tree-nested.h"
31 #include "gimple-iterator.h"
32 #include "gimple-low.h"
34 #include "gimple-predict.h"
35 #include "gimple-fold.h"
38 #include "value-range.h"
39 #include "stringpool.h"
40 #include "tree-ssanames.h"
41 #include "tree-inline.h"
42 #include "gimple-walk.h"
45 /* The differences between High GIMPLE and Low GIMPLE are the
48 1- Lexical scopes are removed (i.e., GIMPLE_BIND disappears).
50 2- GIMPLE_TRY and GIMPLE_CATCH are converted to abnormal control
51 flow and exception regions are built as an on-the-side region
52 hierarchy (See tree-eh.cc:lower_eh_constructs).
54 3- Multiple identical return statements are grouped into a single
55 return and gotos to the unique return site. */
57 /* Match a return statement with a label. During lowering, we identify
58 identical return statements and replace duplicates with a jump to
59 the corresponding label. */
60 struct return_statements_t
65 typedef struct return_statements_t return_statements_t
;
70 /* Block the current statement belongs to. */
73 /* A vector of label and return statements to be moved to the end
75 vec
<return_statements_t
> return_statements
;
77 /* True if the current statement cannot fall through. */
81 static void lower_stmt (gimple_stmt_iterator
*, struct lower_data
*);
82 static void lower_gimple_bind (gimple_stmt_iterator
*, struct lower_data
*);
83 static void lower_try_catch (gimple_stmt_iterator
*, struct lower_data
*);
84 static void lower_gimple_return (gimple_stmt_iterator
*, struct lower_data
*);
85 static void lower_builtin_setjmp (gimple_stmt_iterator
*);
86 static void lower_builtin_posix_memalign (gimple_stmt_iterator
*);
87 static void lower_builtin_assume_aligned (gimple_stmt_iterator
*);
90 /* Lower the body of current_function_decl from High GIMPLE into Low
94 lower_function_body (void)
96 struct lower_data data
;
97 gimple_seq body
= gimple_body (current_function_decl
);
98 gimple_seq lowered_body
;
99 gimple_stmt_iterator i
;
103 /* The gimplifier should've left a body of exactly one statement,
104 namely a GIMPLE_BIND. */
105 gcc_assert (gimple_seq_first (body
) == gimple_seq_last (body
)
106 && gimple_code (gimple_seq_first_stmt (body
)) == GIMPLE_BIND
);
108 memset (&data
, 0, sizeof (data
));
109 data
.block
= DECL_INITIAL (current_function_decl
);
110 BLOCK_SUBBLOCKS (data
.block
) = NULL_TREE
;
111 BLOCK_CHAIN (data
.block
) = NULL_TREE
;
112 TREE_ASM_WRITTEN (data
.block
) = 1;
113 data
.return_statements
.create (8);
115 bind
= gimple_seq_first_stmt (body
);
117 gimple_seq_add_stmt (&lowered_body
, bind
);
118 i
= gsi_start (lowered_body
);
119 lower_gimple_bind (&i
, &data
);
121 i
= gsi_last (lowered_body
);
123 /* If we had begin stmt markers from e.g. PCH, but this compilation
124 doesn't want them, lower_stmt will have cleaned them up; we can
125 now clear the flag that indicates we had them. */
126 if (!MAY_HAVE_DEBUG_MARKER_STMTS
&& cfun
->debug_nonbind_markers
)
128 /* This counter needs not be exact, but before lowering it will
129 most certainly be. */
130 gcc_assert (cfun
->debug_marker_count
== 0);
131 cfun
->debug_nonbind_markers
= false;
134 /* If the function falls off the end, we need a null return statement.
135 If we've already got one in the return_statements vector, we don't
136 need to do anything special. Otherwise build one by hand. */
137 bool may_fallthru
= gimple_seq_may_fallthru (lowered_body
);
139 && (data
.return_statements
.is_empty ()
140 || (gimple_return_retval (data
.return_statements
.last().stmt
)
143 x
= gimple_build_return (NULL
);
144 gimple_set_location (x
, cfun
->function_end_locus
);
145 gimple_set_block (x
, DECL_INITIAL (current_function_decl
));
146 gsi_insert_after (&i
, x
, GSI_CONTINUE_LINKING
);
147 may_fallthru
= false;
150 /* If we lowered any return statements, emit the representative
151 at the end of the function. */
152 while (!data
.return_statements
.is_empty ())
154 return_statements_t t
= data
.return_statements
.pop ();
155 x
= gimple_build_label (t
.label
);
156 gsi_insert_after (&i
, x
, GSI_CONTINUE_LINKING
);
157 gsi_insert_after (&i
, t
.stmt
, GSI_CONTINUE_LINKING
);
160 /* Remove the line number from the representative return statement.
161 It now fills in for the fallthru too. Failure to remove this
162 will result in incorrect results for coverage analysis. */
163 gimple_set_location (t
.stmt
, UNKNOWN_LOCATION
);
164 may_fallthru
= false;
168 /* Once the old body has been lowered, replace it with the new
170 gimple_set_body (current_function_decl
, lowered_body
);
172 gcc_assert (data
.block
== DECL_INITIAL (current_function_decl
));
173 BLOCK_SUBBLOCKS (data
.block
)
174 = blocks_nreverse (BLOCK_SUBBLOCKS (data
.block
));
176 clear_block_marks (data
.block
);
177 data
.return_statements
.release ();
183 const pass_data pass_data_lower_cf
=
185 GIMPLE_PASS
, /* type */
187 OPTGROUP_NONE
, /* optinfo_flags */
189 PROP_gimple_any
, /* properties_required */
190 PROP_gimple_lcf
, /* properties_provided */
191 0, /* properties_destroyed */
192 0, /* todo_flags_start */
193 0, /* todo_flags_finish */
196 class pass_lower_cf
: public gimple_opt_pass
199 pass_lower_cf (gcc::context
*ctxt
)
200 : gimple_opt_pass (pass_data_lower_cf
, ctxt
)
203 /* opt_pass methods: */
204 unsigned int execute (function
*) final override
206 return lower_function_body ();
209 }; // class pass_lower_cf
214 make_pass_lower_cf (gcc::context
*ctxt
)
216 return new pass_lower_cf (ctxt
);
219 /* Lower sequence SEQ. Unlike gimplification the statements are not relowered
220 when they are changed -- if this has to be done, the lowering routine must
221 do it explicitly. DATA is passed through the recursion. */
224 lower_sequence (gimple_seq
*seq
, struct lower_data
*data
)
226 gimple_stmt_iterator gsi
;
228 for (gsi
= gsi_start (*seq
); !gsi_end_p (gsi
); )
229 lower_stmt (&gsi
, data
);
233 /* Lower the OpenMP directive statement pointed by GSI. DATA is
234 passed through the recursion. */
237 lower_omp_directive (gimple_stmt_iterator
*gsi
, struct lower_data
*data
)
241 stmt
= gsi_stmt (*gsi
);
243 lower_sequence (gimple_omp_body_ptr (stmt
), data
);
244 gsi_insert_seq_after (gsi
, gimple_omp_body (stmt
), GSI_CONTINUE_LINKING
);
245 gimple_omp_set_body (stmt
, NULL
);
249 /* Create an artificial FUNCTION_DECL for assumption at LOC. */
252 create_assumption_fn (location_t loc
)
254 tree name
= clone_function_name_numbered (current_function_decl
, "_assume");
255 /* Temporarily, until we determine all the arguments. */
256 tree type
= build_varargs_function_type_list (boolean_type_node
, NULL_TREE
);
257 tree decl
= build_decl (loc
, FUNCTION_DECL
, name
, type
);
258 TREE_STATIC (decl
) = 1;
259 TREE_USED (decl
) = 1;
260 DECL_ARTIFICIAL (decl
) = 1;
261 DECL_IGNORED_P (decl
) = 1;
262 DECL_NAMELESS (decl
) = 1;
263 TREE_PUBLIC (decl
) = 0;
264 DECL_UNINLINABLE (decl
) = 1;
265 DECL_EXTERNAL (decl
) = 0;
266 DECL_CONTEXT (decl
) = NULL_TREE
;
267 DECL_INITIAL (decl
) = make_node (BLOCK
);
268 tree attributes
= DECL_ATTRIBUTES (current_function_decl
);
269 if (lookup_attribute ("noipa", attributes
) == NULL
)
271 attributes
= tree_cons (get_identifier ("noipa"), NULL
, attributes
);
272 if (lookup_attribute ("noinline", attributes
) == NULL
)
273 attributes
= tree_cons (get_identifier ("noinline"), NULL
, attributes
);
274 if (lookup_attribute ("noclone", attributes
) == NULL
)
275 attributes
= tree_cons (get_identifier ("noclone"), NULL
, attributes
);
276 if (lookup_attribute ("no_icf", attributes
) == NULL
)
277 attributes
= tree_cons (get_identifier ("no_icf"), NULL
, attributes
);
279 DECL_ATTRIBUTES (decl
) = attributes
;
280 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl
)) = decl
;
281 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl
)
282 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl
);
283 DECL_FUNCTION_SPECIFIC_TARGET (decl
)
284 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl
);
285 tree t
= build_decl (DECL_SOURCE_LOCATION (decl
),
286 RESULT_DECL
, NULL_TREE
, boolean_type_node
);
287 DECL_ARTIFICIAL (t
) = 1;
288 DECL_IGNORED_P (t
) = 1;
289 DECL_CONTEXT (t
) = decl
;
290 DECL_RESULT (decl
) = t
;
291 push_struct_function (decl
);
292 cfun
->function_end_locus
= loc
;
293 init_tree_ssa (cfun
);
297 struct lower_assumption_data
300 tree return_false_label
;
302 auto_vec
<tree
> decls
;
305 /* Helper function for lower_assumptions. Find local vars and labels
306 in the assumption sequence and remove debug stmts. */
309 find_assumption_locals_r (gimple_stmt_iterator
*gsi_p
, bool *,
310 struct walk_stmt_info
*wi
)
312 lower_assumption_data
*data
= (lower_assumption_data
*) wi
->info
;
313 gimple
*stmt
= gsi_stmt (*gsi_p
);
314 tree lhs
= gimple_get_lhs (stmt
);
315 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
317 gcc_assert (SSA_NAME_VAR (lhs
) == NULL_TREE
);
318 data
->id
.decl_map
->put (lhs
, NULL_TREE
);
319 data
->decls
.safe_push (lhs
);
321 switch (gimple_code (stmt
))
324 for (tree var
= gimple_bind_vars (as_a
<gbind
*> (stmt
));
325 var
; var
= DECL_CHAIN (var
))
327 && !DECL_EXTERNAL (var
)
328 && DECL_CONTEXT (var
) == data
->id
.src_fn
)
330 data
->id
.decl_map
->put (var
, var
);
331 data
->decls
.safe_push (var
);
336 tree label
= gimple_label_label (as_a
<glabel
*> (stmt
));
337 data
->id
.decl_map
->put (label
, label
);
341 /* If something in assumption tries to return from parent function,
342 if it would be reached in hypothetical evaluation, it would be UB,
343 so transform such returns into return false; */
345 gimple
*g
= gimple_build_assign (data
->guard_copy
, boolean_false_node
);
346 gsi_insert_before (gsi_p
, g
, GSI_SAME_STMT
);
347 gimple_return_set_retval (as_a
<greturn
*> (stmt
), data
->guard_copy
);
351 /* As assumptions won't be emitted, debug info stmts in them
353 gsi_remove (gsi_p
, true);
354 wi
->removed_stmt
= true;
362 /* Create a new PARM_DECL that is indentical in all respect to DECL except that
363 DECL can be either a VAR_DECL, a PARM_DECL or RESULT_DECL. The original
364 DECL must come from ID->src_fn and the copy will be part of ID->dst_fn. */
367 assumption_copy_decl (tree decl
, copy_body_data
*id
)
369 tree type
= TREE_TYPE (decl
);
371 if (is_global_var (decl
))
374 gcc_assert (VAR_P (decl
)
375 || TREE_CODE (decl
) == PARM_DECL
376 || TREE_CODE (decl
) == RESULT_DECL
);
377 tree copy
= build_decl (DECL_SOURCE_LOCATION (decl
),
378 PARM_DECL
, DECL_NAME (decl
), type
);
379 if (DECL_PT_UID_SET_P (decl
))
380 SET_DECL_PT_UID (copy
, DECL_PT_UID (decl
));
381 TREE_ADDRESSABLE (copy
) = TREE_ADDRESSABLE (decl
);
382 TREE_READONLY (copy
) = TREE_READONLY (decl
);
383 TREE_THIS_VOLATILE (copy
) = TREE_THIS_VOLATILE (decl
);
384 DECL_NOT_GIMPLE_REG_P (copy
) = DECL_NOT_GIMPLE_REG_P (decl
);
385 DECL_BY_REFERENCE (copy
) = DECL_BY_REFERENCE (decl
);
386 DECL_ARG_TYPE (copy
) = type
;
387 ((lower_assumption_data
*) id
)->decls
.safe_push (decl
);
388 return copy_decl_for_dup_finish (id
, decl
, copy
);
391 /* Transform gotos out of the assumption into return false. */
394 adjust_assumption_stmt_r (gimple_stmt_iterator
*gsi_p
, bool *,
395 struct walk_stmt_info
*wi
)
397 lower_assumption_data
*data
= (lower_assumption_data
*) wi
->info
;
398 gimple
*stmt
= gsi_stmt (*gsi_p
);
399 tree lab
= NULL_TREE
;
400 unsigned int idx
= 0;
401 if (gimple_code (stmt
) == GIMPLE_GOTO
)
402 lab
= gimple_goto_dest (stmt
);
403 else if (gimple_code (stmt
) == GIMPLE_COND
)
407 lab
= gimple_cond_true_label (as_a
<gcond
*> (stmt
));
409 lab
= gimple_cond_false_label (as_a
<gcond
*> (stmt
));
411 else if (gimple_code (stmt
) == GIMPLE_LABEL
)
413 tree label
= gimple_label_label (as_a
<glabel
*> (stmt
));
414 DECL_CONTEXT (label
) = current_function_decl
;
418 if (!data
->id
.decl_map
->get (lab
))
420 if (!data
->return_false_label
)
421 data
->return_false_label
422 = create_artificial_label (UNKNOWN_LOCATION
);
423 if (gimple_code (stmt
) == GIMPLE_GOTO
)
424 gimple_goto_set_dest (as_a
<ggoto
*> (stmt
),
425 data
->return_false_label
);
427 gimple_cond_set_true_label (as_a
<gcond
*> (stmt
),
428 data
->return_false_label
);
430 gimple_cond_set_false_label (as_a
<gcond
*> (stmt
),
431 data
->return_false_label
);
433 if (gimple_code (stmt
) == GIMPLE_COND
&& idx
== 0)
442 /* Adjust trees in the assumption body. Called through walk_tree. */
445 adjust_assumption_stmt_op (tree
*tp
, int *, void *datap
)
447 struct walk_stmt_info
*wi
= (struct walk_stmt_info
*) datap
;
448 lower_assumption_data
*data
= (lower_assumption_data
*) wi
->info
;
451 switch (TREE_CODE (t
))
454 newt
= data
->id
.decl_map
->get (t
);
455 /* There shouldn't be SSA_NAMEs other than ones defined in the
456 assumption's body. */
461 newt
= data
->id
.decl_map
->get (t
);
468 *tp
= remap_decl (t
, &data
->id
);
477 The gimplifier transformed:
484 which we should transform into:
485 .ASSUME (&artificial_fn, args...);
486 where artificial_fn will look like:
487 bool artificial_fn (args...)
492 with any debug stmts in the block removed and jumps out of
493 the block or return stmts replaced with return false; */
496 lower_assumption (gimple_stmt_iterator
*gsi
, struct lower_data
*data
)
498 gimple
*stmt
= gsi_stmt (*gsi
);
499 tree guard
= gimple_assume_guard (stmt
);
500 gimple
*bind
= gimple_assume_body (stmt
);
501 location_t loc
= gimple_location (stmt
);
502 gcc_assert (gimple_code (bind
) == GIMPLE_BIND
);
504 lower_assumption_data lad
;
505 hash_map
<tree
, tree
> decl_map
;
506 memset (&lad
.id
, 0, sizeof (lad
.id
));
507 lad
.return_false_label
= NULL_TREE
;
508 lad
.id
.src_fn
= current_function_decl
;
509 lad
.id
.dst_fn
= create_assumption_fn (loc
);
510 lad
.id
.src_cfun
= DECL_STRUCT_FUNCTION (lad
.id
.src_fn
);
511 lad
.id
.decl_map
= &decl_map
;
512 lad
.id
.copy_decl
= assumption_copy_decl
;
513 lad
.id
.transform_call_graph_edges
= CB_CGE_DUPLICATE
;
514 lad
.id
.transform_parameter
= true;
515 lad
.id
.do_not_unshare
= true;
516 lad
.id
.do_not_fold
= true;
517 cfun
->curr_properties
= lad
.id
.src_cfun
->curr_properties
;
518 lad
.guard_copy
= create_tmp_var (boolean_type_node
);
519 decl_map
.put (lad
.guard_copy
, lad
.guard_copy
);
520 decl_map
.put (guard
, lad
.guard_copy
);
521 cfun
->assume_function
= 1;
523 /* Find variables, labels and SSA_NAMEs local to the assume GIMPLE_BIND. */
524 gimple_stmt_iterator gsi2
= gsi_start (*gimple_assume_body_ptr (stmt
));
525 struct walk_stmt_info wi
;
526 memset (&wi
, 0, sizeof (wi
));
527 wi
.info
= (void *) &lad
;
528 walk_gimple_stmt (&gsi2
, find_assumption_locals_r
, NULL
, &wi
);
529 unsigned int sz
= lad
.decls
.length ();
530 for (unsigned i
= 0; i
< sz
; ++i
)
532 tree v
= lad
.decls
[i
];
534 /* SSA_NAMEs defined in the assume condition should be replaced
535 by new SSA_NAMEs in the artificial function. */
536 if (TREE_CODE (v
) == SSA_NAME
)
538 newv
= make_ssa_name (remap_type (TREE_TYPE (v
), &lad
.id
));
539 decl_map
.put (v
, newv
);
541 /* Local vars should have context and type adjusted to the
542 new artificial function. */
545 if (is_global_var (v
) && !DECL_ASSEMBLER_NAME_SET_P (v
))
546 DECL_ASSEMBLER_NAME (v
);
547 TREE_TYPE (v
) = remap_type (TREE_TYPE (v
), &lad
.id
);
548 DECL_CONTEXT (v
) = current_function_decl
;
551 /* References to other automatic vars should be replaced by
552 PARM_DECLs to the artificial function. */
553 memset (&wi
, 0, sizeof (wi
));
554 wi
.info
= (void *) &lad
;
555 walk_gimple_stmt (&gsi2
, adjust_assumption_stmt_r
,
556 adjust_assumption_stmt_op
, &wi
);
558 /* At the start prepend guard = false; */
559 gimple_seq body
= NULL
;
560 gimple
*g
= gimple_build_assign (lad
.guard_copy
, boolean_false_node
);
561 gimple_seq_add_stmt (&body
, g
);
562 gimple_seq_add_stmt (&body
, bind
);
563 /* At the end add return guard; */
564 greturn
*gr
= gimple_build_return (lad
.guard_copy
);
565 gimple_seq_add_stmt (&body
, gr
);
566 /* If there were any jumps to labels outside of the condition,
567 replace them with a jump to
571 if (lad
.return_false_label
)
573 g
= gimple_build_label (lad
.return_false_label
);
574 gimple_seq_add_stmt (&body
, g
);
575 g
= gimple_build_assign (lad
.guard_copy
, boolean_false_node
);
576 gimple_seq_add_stmt (&body
, g
);
577 gr
= gimple_build_return (lad
.guard_copy
);
578 gimple_seq_add_stmt (&body
, gr
);
580 bind
= gimple_build_bind (NULL_TREE
, body
, NULL_TREE
);
582 gimple_seq_add_stmt (&body
, bind
);
583 gimple_set_body (current_function_decl
, body
);
586 tree parms
= NULL_TREE
;
587 tree parmt
= void_list_node
;
588 auto_vec
<tree
, 8> vargs
;
589 vargs
.safe_grow (1 + (lad
.decls
.length () - sz
), true);
590 /* First argument to IFN_ASSUME will be address of the
591 artificial function. */
592 vargs
[0] = build_fold_addr_expr (lad
.id
.dst_fn
);
593 for (unsigned i
= lad
.decls
.length (); i
> sz
; --i
)
595 tree
*v
= decl_map
.get (lad
.decls
[i
- 1]);
596 gcc_assert (v
&& TREE_CODE (*v
) == PARM_DECL
);
597 DECL_CHAIN (*v
) = parms
;
599 parmt
= tree_cons (NULL_TREE
, TREE_TYPE (*v
), parmt
);
600 /* Remaining arguments will be the variables/parameters
601 mentioned in the condition. */
602 vargs
[i
- sz
] = lad
.decls
[i
- 1];
603 /* If they have gimple types, we might need to regimplify
604 them to make the IFN_ASSUME call valid. */
605 if (is_gimple_reg_type (TREE_TYPE (vargs
[i
- sz
]))
606 && !is_gimple_val (vargs
[i
- sz
]))
608 tree t
= make_ssa_name (TREE_TYPE (vargs
[i
- sz
]));
609 g
= gimple_build_assign (t
, vargs
[i
- sz
]);
610 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
614 DECL_ARGUMENTS (lad
.id
.dst_fn
) = parms
;
615 TREE_TYPE (lad
.id
.dst_fn
) = build_function_type (boolean_type_node
, parmt
);
617 cgraph_node::add_new_function (lad
.id
.dst_fn
, false);
619 for (unsigned i
= 0; i
< sz
; ++i
)
621 tree v
= lad
.decls
[i
];
622 if (TREE_CODE (v
) == SSA_NAME
)
623 release_ssa_name (v
);
626 data
->cannot_fallthru
= false;
627 /* Replace GIMPLE_ASSUME statement with IFN_ASSUME call. */
628 gcall
*call
= gimple_build_call_internal_vec (IFN_ASSUME
, vargs
);
629 gimple_set_location (call
, loc
);
630 gsi_replace (gsi
, call
, true);
633 /* Lower statement GSI. DATA is passed through the recursion. We try to
634 track the fallthruness of statements and get rid of unreachable return
635 statements in order to prevent the EH lowering pass from adding useless
636 edges that can cause bogus warnings to be issued later; this guess need
637 not be 100% accurate, simply be conservative and reset cannot_fallthru
638 to false if we don't know. */
641 lower_stmt (gimple_stmt_iterator
*gsi
, struct lower_data
*data
)
643 gimple
*stmt
= gsi_stmt (*gsi
);
645 gimple_set_block (stmt
, data
->block
);
647 switch (gimple_code (stmt
))
650 lower_gimple_bind (gsi
, data
);
651 /* Propagate fallthruness. */
657 data
->cannot_fallthru
= true;
662 if (data
->cannot_fallthru
)
664 gsi_remove (gsi
, false);
665 /* Propagate fallthruness. */
669 lower_gimple_return (gsi
, data
);
670 data
->cannot_fallthru
= true;
675 if (gimple_try_kind (stmt
) == GIMPLE_TRY_CATCH
)
676 lower_try_catch (gsi
, data
);
679 /* It must be a GIMPLE_TRY_FINALLY. */
680 bool cannot_fallthru
;
681 lower_sequence (gimple_try_eval_ptr (stmt
), data
);
682 cannot_fallthru
= data
->cannot_fallthru
;
684 /* The finally clause is always executed after the try clause,
685 so if it does not fall through, then the try-finally will not
686 fall through. Otherwise, if the try clause does not fall
687 through, then when the finally clause falls through it will
688 resume execution wherever the try clause was going. So the
689 whole try-finally will only fall through if both the try
690 clause and the finally clause fall through. */
691 data
->cannot_fallthru
= false;
692 lower_sequence (gimple_try_cleanup_ptr (stmt
), data
);
693 data
->cannot_fallthru
|= cannot_fallthru
;
700 geh_else
*eh_else_stmt
= as_a
<geh_else
*> (stmt
);
701 lower_sequence (gimple_eh_else_n_body_ptr (eh_else_stmt
), data
);
702 lower_sequence (gimple_eh_else_e_body_ptr (eh_else_stmt
), data
);
707 gcc_checking_assert (cfun
->debug_nonbind_markers
);
708 /* We can't possibly have debug bind stmts before lowering, we
709 first emit them when entering SSA. */
710 gcc_checking_assert (gimple_debug_nonbind_marker_p (stmt
));
711 /* Propagate fallthruness. */
712 /* If the function (e.g. from PCH) had debug stmts, but they're
713 disabled for this compilation, remove them. */
714 if (!MAY_HAVE_DEBUG_MARKER_STMTS
)
715 gsi_remove (gsi
, true);
725 case GIMPLE_EH_MUST_NOT_THROW
:
727 case GIMPLE_OMP_SCOPE
:
728 case GIMPLE_OMP_SECTIONS
:
729 case GIMPLE_OMP_SECTIONS_SWITCH
:
730 case GIMPLE_OMP_SECTION
:
731 case GIMPLE_OMP_SINGLE
:
732 case GIMPLE_OMP_MASTER
:
733 case GIMPLE_OMP_MASKED
:
734 case GIMPLE_OMP_TASKGROUP
:
735 case GIMPLE_OMP_ORDERED
:
736 case GIMPLE_OMP_SCAN
:
737 case GIMPLE_OMP_CRITICAL
:
738 case GIMPLE_OMP_RETURN
:
739 case GIMPLE_OMP_ATOMIC_LOAD
:
740 case GIMPLE_OMP_ATOMIC_STORE
:
741 case GIMPLE_OMP_CONTINUE
:
746 tree decl
= gimple_call_fndecl (stmt
);
749 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
751 tree arg
= gimple_call_arg (stmt
, i
);
753 TREE_SET_BLOCK (arg
, data
->block
);
757 && fndecl_built_in_p (decl
, BUILT_IN_NORMAL
))
759 if (DECL_FUNCTION_CODE (decl
) == BUILT_IN_SETJMP
)
761 lower_builtin_setjmp (gsi
);
762 data
->cannot_fallthru
= false;
765 else if (DECL_FUNCTION_CODE (decl
) == BUILT_IN_POSIX_MEMALIGN
767 && gimple_builtin_call_types_compatible_p (stmt
, decl
))
769 lower_builtin_posix_memalign (gsi
);
772 else if (DECL_FUNCTION_CODE (decl
) == BUILT_IN_ASSUME_ALIGNED
775 lower_builtin_assume_aligned (gsi
);
776 data
->cannot_fallthru
= false;
782 if (decl
&& (flags_from_decl_or_type (decl
) & ECF_NORETURN
))
784 data
->cannot_fallthru
= true;
789 /* We delay folding of built calls from gimplification to
790 here so the IL is in consistent state for the diagnostic
792 if (gimple_call_builtin_p (stmt
))
797 case GIMPLE_OMP_PARALLEL
:
798 case GIMPLE_OMP_TASK
:
799 case GIMPLE_OMP_TARGET
:
800 case GIMPLE_OMP_TEAMS
:
801 data
->cannot_fallthru
= false;
802 lower_omp_directive (gsi
, data
);
803 data
->cannot_fallthru
= false;
807 lower_assumption (gsi
, data
);
810 case GIMPLE_TRANSACTION
:
811 lower_sequence (gimple_transaction_body_ptr (
812 as_a
<gtransaction
*> (stmt
)),
820 data
->cannot_fallthru
= false;
824 /* Lower a bind_expr TSI. DATA is passed through the recursion. */
827 lower_gimple_bind (gimple_stmt_iterator
*gsi
, struct lower_data
*data
)
829 tree old_block
= data
->block
;
830 gbind
*stmt
= as_a
<gbind
*> (gsi_stmt (*gsi
));
831 tree new_block
= gimple_bind_block (stmt
);
835 if (new_block
== old_block
)
837 /* The outermost block of the original function may not be the
838 outermost statement chain of the gimplified function. So we
839 may see the outermost block just inside the function. */
840 gcc_assert (new_block
== DECL_INITIAL (current_function_decl
));
845 /* We do not expect to handle duplicate blocks. */
846 gcc_assert (!TREE_ASM_WRITTEN (new_block
));
847 TREE_ASM_WRITTEN (new_block
) = 1;
849 /* Block tree may get clobbered by inlining. Normally this would
850 be fixed in rest_of_decl_compilation using block notes, but
851 since we are not going to emit them, it is up to us. */
852 BLOCK_CHAIN (new_block
) = BLOCK_SUBBLOCKS (old_block
);
853 BLOCK_SUBBLOCKS (old_block
) = new_block
;
854 BLOCK_SUBBLOCKS (new_block
) = NULL_TREE
;
855 BLOCK_SUPERCONTEXT (new_block
) = old_block
;
857 data
->block
= new_block
;
861 record_vars (gimple_bind_vars (stmt
));
863 /* Scrap DECL_CHAIN up to BLOCK_VARS to ease GC after we no longer
864 need gimple_bind_vars. */
866 /* BLOCK_VARS and gimple_bind_vars share a common sub-chain. Find
867 it by marking all BLOCK_VARS. */
868 if (gimple_bind_block (stmt
))
869 for (tree t
= BLOCK_VARS (gimple_bind_block (stmt
)); t
; t
= DECL_CHAIN (t
))
870 TREE_VISITED (t
) = 1;
871 for (tree var
= gimple_bind_vars (stmt
);
872 var
&& ! TREE_VISITED (var
); var
= next
)
874 next
= DECL_CHAIN (var
);
875 DECL_CHAIN (var
) = NULL_TREE
;
877 /* Unmark BLOCK_VARS. */
878 if (gimple_bind_block (stmt
))
879 for (tree t
= BLOCK_VARS (gimple_bind_block (stmt
)); t
; t
= DECL_CHAIN (t
))
880 TREE_VISITED (t
) = 0;
882 lower_sequence (gimple_bind_body_ptr (stmt
), data
);
886 gcc_assert (data
->block
== new_block
);
888 BLOCK_SUBBLOCKS (new_block
)
889 = blocks_nreverse (BLOCK_SUBBLOCKS (new_block
));
890 data
->block
= old_block
;
893 /* The GIMPLE_BIND no longer carries any useful information -- kill it. */
894 gsi_insert_seq_before (gsi
, gimple_bind_body (stmt
), GSI_SAME_STMT
);
895 gsi_remove (gsi
, false);
898 /* Same as above, but for a GIMPLE_TRY_CATCH. */
901 lower_try_catch (gimple_stmt_iterator
*gsi
, struct lower_data
*data
)
903 bool cannot_fallthru
;
904 gimple
*stmt
= gsi_stmt (*gsi
);
905 gimple_stmt_iterator i
;
907 /* We don't handle GIMPLE_TRY_FINALLY. */
908 gcc_assert (gimple_try_kind (stmt
) == GIMPLE_TRY_CATCH
);
910 lower_sequence (gimple_try_eval_ptr (stmt
), data
);
911 cannot_fallthru
= data
->cannot_fallthru
;
913 i
= gsi_start (*gimple_try_cleanup_ptr (stmt
));
914 switch (gimple_code (gsi_stmt (i
)))
917 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
918 catch expression and a body. The whole try/catch may fall
919 through iff any of the catch bodies falls through. */
920 for (; !gsi_end_p (i
); gsi_next (&i
))
922 data
->cannot_fallthru
= false;
923 lower_sequence (gimple_catch_handler_ptr (
924 as_a
<gcatch
*> (gsi_stmt (i
))),
926 if (!data
->cannot_fallthru
)
927 cannot_fallthru
= false;
931 case GIMPLE_EH_FILTER
:
932 /* The exception filter expression only matters if there is an
933 exception. If the exception does not match EH_FILTER_TYPES,
934 we will execute EH_FILTER_FAILURE, and we will fall through
935 if that falls through. If the exception does match
936 EH_FILTER_TYPES, the stack unwinder will continue up the
937 stack, so we will not fall through. We don't know whether we
938 will throw an exception which matches EH_FILTER_TYPES or not,
939 so we just ignore EH_FILTER_TYPES and assume that we might
940 throw an exception which doesn't match. */
941 data
->cannot_fallthru
= false;
942 lower_sequence (gimple_eh_filter_failure_ptr (gsi_stmt (i
)), data
);
943 if (!data
->cannot_fallthru
)
944 cannot_fallthru
= false;
948 gcc_checking_assert (gimple_debug_begin_stmt_p (stmt
));
952 /* This case represents statements to be executed when an
953 exception occurs. Those statements are implicitly followed
954 by a GIMPLE_RESX to resume execution after the exception. So
955 in this case the try/catch never falls through. */
956 data
->cannot_fallthru
= false;
957 lower_sequence (gimple_try_cleanup_ptr (stmt
), data
);
961 data
->cannot_fallthru
= cannot_fallthru
;
966 /* Try to determine whether a TRY_CATCH expression can fall through.
967 This is a subroutine of gimple_stmt_may_fallthru. */
970 gimple_try_catch_may_fallthru (gtry
*stmt
)
972 gimple_stmt_iterator i
;
974 /* We don't handle GIMPLE_TRY_FINALLY. */
975 gcc_assert (gimple_try_kind (stmt
) == GIMPLE_TRY_CATCH
);
977 /* If the TRY block can fall through, the whole TRY_CATCH can
979 if (gimple_seq_may_fallthru (gimple_try_eval (stmt
)))
982 i
= gsi_start (*gimple_try_cleanup_ptr (stmt
));
983 switch (gimple_code (gsi_stmt (i
)))
986 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
987 catch expression and a body. The whole try/catch may fall
988 through iff any of the catch bodies falls through. */
989 for (; !gsi_end_p (i
); gsi_next (&i
))
991 if (gimple_seq_may_fallthru (gimple_catch_handler (
992 as_a
<gcatch
*> (gsi_stmt (i
)))))
997 case GIMPLE_EH_FILTER
:
998 /* The exception filter expression only matters if there is an
999 exception. If the exception does not match EH_FILTER_TYPES,
1000 we will execute EH_FILTER_FAILURE, and we will fall through
1001 if that falls through. If the exception does match
1002 EH_FILTER_TYPES, the stack unwinder will continue up the
1003 stack, so we will not fall through. We don't know whether we
1004 will throw an exception which matches EH_FILTER_TYPES or not,
1005 so we just ignore EH_FILTER_TYPES and assume that we might
1006 throw an exception which doesn't match. */
1007 return gimple_seq_may_fallthru (gimple_eh_filter_failure (gsi_stmt (i
)));
1010 /* This case represents statements to be executed when an
1011 exception occurs. Those statements are implicitly followed
1012 by a GIMPLE_RESX to resume execution after the exception. So
1013 in this case the try/catch never falls through. */
1019 /* Try to determine if we can continue executing the statement
1020 immediately following STMT. This guess need not be 100% accurate;
1021 simply be conservative and return true if we don't know. This is
1022 used only to avoid stupidly generating extra code. If we're wrong,
1023 we'll just delete the extra code later. */
1026 gimple_stmt_may_fallthru (gimple
*stmt
)
1031 switch (gimple_code (stmt
))
1036 /* Easy cases. If the last statement of the seq implies
1037 control transfer, then we can't fall through. */
1041 /* Switch has already been lowered and represents a branch
1042 to a selected label and hence can't fall through. */
1046 /* GIMPLE_COND's are already lowered into a two-way branch. They
1047 can't fall through. */
1051 return gimple_seq_may_fallthru (
1052 gimple_bind_body (as_a
<gbind
*> (stmt
)));
1055 if (gimple_try_kind (stmt
) == GIMPLE_TRY_CATCH
)
1056 return gimple_try_catch_may_fallthru (as_a
<gtry
*> (stmt
));
1058 /* It must be a GIMPLE_TRY_FINALLY. */
1060 /* The finally clause is always executed after the try clause,
1061 so if it does not fall through, then the try-finally will not
1062 fall through. Otherwise, if the try clause does not fall
1063 through, then when the finally clause falls through it will
1064 resume execution wherever the try clause was going. So the
1065 whole try-finally will only fall through if both the try
1066 clause and the finally clause fall through. */
1067 return (gimple_seq_may_fallthru (gimple_try_eval (stmt
))
1068 && gimple_seq_may_fallthru (gimple_try_cleanup (stmt
)));
1070 case GIMPLE_EH_ELSE
:
1072 geh_else
*eh_else_stmt
= as_a
<geh_else
*> (stmt
);
1073 return (gimple_seq_may_fallthru (gimple_eh_else_n_body (eh_else_stmt
))
1074 || gimple_seq_may_fallthru (gimple_eh_else_e_body (
1079 /* Functions that do not return do not fall through. */
1080 return !gimple_call_noreturn_p (stmt
);
1088 /* Same as gimple_stmt_may_fallthru, but for the gimple sequence SEQ. */
1091 gimple_seq_may_fallthru (gimple_seq seq
)
1093 return gimple_stmt_may_fallthru (gimple_seq_last_nondebug_stmt (seq
));
1097 /* Lower a GIMPLE_RETURN GSI. DATA is passed through the recursion. */
1100 lower_gimple_return (gimple_stmt_iterator
*gsi
, struct lower_data
*data
)
1102 greturn
*stmt
= as_a
<greturn
*> (gsi_stmt (*gsi
));
1105 return_statements_t tmp_rs
;
1107 /* Match this up with an existing return statement that's been created. */
1108 for (i
= data
->return_statements
.length () - 1;
1111 tmp_rs
= data
->return_statements
[i
];
1113 if (gimple_return_retval (stmt
) == gimple_return_retval (tmp_rs
.stmt
))
1115 /* Remove the line number from the representative return statement.
1116 It now fills in for many such returns. Failure to remove this
1117 will result in incorrect results for coverage analysis. */
1118 gimple_set_location (tmp_rs
.stmt
, UNKNOWN_LOCATION
);
1124 /* Not found. Create a new label and record the return statement. */
1125 tmp_rs
.label
= create_artificial_label (cfun
->function_end_locus
);
1127 data
->return_statements
.safe_push (tmp_rs
);
1129 /* Generate a goto statement and remove the return statement. */
1131 /* When not optimizing, make sure user returns are preserved. */
1132 if (!optimize
&& gimple_has_location (stmt
))
1133 DECL_ARTIFICIAL (tmp_rs
.label
) = 0;
1134 t
= gimple_build_goto (tmp_rs
.label
);
1135 /* location includes block. */
1136 gimple_set_location (t
, gimple_location (stmt
));
1137 gsi_insert_before (gsi
, t
, GSI_SAME_STMT
);
1138 gsi_remove (gsi
, false);
1141 /* Lower a __builtin_setjmp GSI.
1143 __builtin_setjmp is passed a pointer to an array of five words (not
1144 all will be used on all machines). It operates similarly to the C
1145 library function of the same name, but is more efficient.
1147 It is lowered into 2 other builtins, namely __builtin_setjmp_setup,
1148 __builtin_setjmp_receiver.
1150 After full lowering, the body of the function should look like:
1158 __builtin_setjmp_setup (&buf, &<D1847>);
1162 __builtin_setjmp_receiver (&<D1847>);
1165 if (D.1844 == 0) goto <D1848>; else goto <D1849>;
1169 __builtin_setjmp_setup (&buf, &<D2847>);
1173 __builtin_setjmp_receiver (&<D2847>);
1176 if (D.2844 == 0) goto <D2848>; else goto <D2849>;
1184 During cfg creation an extra per-function (or per-OpenMP region)
1185 block with ABNORMAL_DISPATCHER internal call will be added, unique
1186 destination of all the abnormal call edges and the unique source of
1187 all the abnormal edges to the receivers, thus keeping the complexity
1188 explosion localized. */
1191 lower_builtin_setjmp (gimple_stmt_iterator
*gsi
)
1193 gimple
*stmt
= gsi_stmt (*gsi
);
1194 location_t loc
= gimple_location (stmt
);
1195 tree cont_label
= create_artificial_label (loc
);
1196 tree next_label
= create_artificial_label (loc
);
1200 /* __builtin_setjmp_{setup,receiver} aren't ECF_RETURNS_TWICE and for RTL
1201 these builtins are modelled as non-local label jumps to the label
1202 that is passed to these two builtins, so pretend we have a non-local
1203 label during GIMPLE passes too. See PR60003. */
1204 cfun
->has_nonlocal_label
= 1;
1206 /* NEXT_LABEL is the label __builtin_longjmp will jump to. Its address is
1207 passed to both __builtin_setjmp_setup and __builtin_setjmp_receiver. */
1208 FORCED_LABEL (next_label
) = 1;
1210 tree orig_dest
= dest
= gimple_call_lhs (stmt
);
1211 if (orig_dest
&& TREE_CODE (orig_dest
) == SSA_NAME
)
1212 dest
= create_tmp_reg (TREE_TYPE (orig_dest
));
1214 /* Build '__builtin_setjmp_setup (BUF, NEXT_LABEL)' and insert. */
1215 arg
= build_addr (next_label
);
1216 t
= builtin_decl_implicit (BUILT_IN_SETJMP_SETUP
);
1217 g
= gimple_build_call (t
, 2, gimple_call_arg (stmt
, 0), arg
);
1218 /* location includes block. */
1219 gimple_set_location (g
, loc
);
1220 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
1222 /* Build 'DEST = 0' and insert. */
1225 g
= gimple_build_assign (dest
, build_zero_cst (TREE_TYPE (dest
)));
1226 gimple_set_location (g
, loc
);
1227 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
1230 /* Build 'goto CONT_LABEL' and insert. */
1231 g
= gimple_build_goto (cont_label
);
1232 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
1234 /* Build 'NEXT_LABEL:' and insert. */
1235 g
= gimple_build_label (next_label
);
1236 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
1238 /* Build '__builtin_setjmp_receiver (NEXT_LABEL)' and insert. */
1239 arg
= build_addr (next_label
);
1240 t
= builtin_decl_implicit (BUILT_IN_SETJMP_RECEIVER
);
1241 g
= gimple_build_call (t
, 1, arg
);
1242 gimple_set_location (g
, loc
);
1243 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
1245 /* Build 'DEST = 1' and insert. */
1248 g
= gimple_build_assign (dest
, fold_convert_loc (loc
, TREE_TYPE (dest
),
1250 gimple_set_location (g
, loc
);
1251 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
1254 /* Build 'CONT_LABEL:' and insert. */
1255 g
= gimple_build_label (cont_label
);
1256 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
1258 /* Build orig_dest = dest if necessary. */
1259 if (dest
!= orig_dest
)
1261 g
= gimple_build_assign (orig_dest
, dest
);
1262 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
1265 /* Remove the call to __builtin_setjmp. */
1266 gsi_remove (gsi
, false);
1269 /* Lower calls to posix_memalign to
1270 res = posix_memalign (ptr, align, size);
1272 *ptr = __builtin_assume_aligned (*ptr, align);
1275 res = posix_memalign (&tem, align, size);
1277 ptr = __builtin_assume_aligned (tem, align);
1278 in case the first argument was &ptr. That way we can get at the
1279 alignment of the heap pointer in CCP. */
1282 lower_builtin_posix_memalign (gimple_stmt_iterator
*gsi
)
1284 gimple
*stmt
, *call
= gsi_stmt (*gsi
);
1285 tree pptr
= gimple_call_arg (call
, 0);
1286 tree align
= gimple_call_arg (call
, 1);
1287 tree res
= gimple_call_lhs (call
);
1288 tree ptr
= create_tmp_reg (ptr_type_node
);
1289 if (TREE_CODE (pptr
) == ADDR_EXPR
)
1291 tree tem
= create_tmp_var (ptr_type_node
);
1292 TREE_ADDRESSABLE (tem
) = 1;
1293 gimple_call_set_arg (call
, 0, build_fold_addr_expr (tem
));
1294 stmt
= gimple_build_assign (ptr
, tem
);
1297 stmt
= gimple_build_assign (ptr
,
1298 fold_build2 (MEM_REF
, ptr_type_node
, pptr
,
1299 build_int_cst (ptr_type_node
, 0)));
1300 if (res
== NULL_TREE
)
1302 res
= create_tmp_reg (integer_type_node
);
1303 gimple_call_set_lhs (call
, res
);
1305 tree align_label
= create_artificial_label (UNKNOWN_LOCATION
);
1306 tree noalign_label
= create_artificial_label (UNKNOWN_LOCATION
);
1307 gimple
*cond
= gimple_build_cond (EQ_EXPR
, res
, integer_zero_node
,
1308 align_label
, noalign_label
);
1309 gsi_insert_after (gsi
, cond
, GSI_NEW_STMT
);
1310 gsi_insert_after (gsi
, gimple_build_label (align_label
), GSI_NEW_STMT
);
1311 gsi_insert_after (gsi
, stmt
, GSI_NEW_STMT
);
1312 stmt
= gimple_build_call (builtin_decl_implicit (BUILT_IN_ASSUME_ALIGNED
),
1314 gimple_call_set_lhs (stmt
, ptr
);
1315 gsi_insert_after (gsi
, stmt
, GSI_NEW_STMT
);
1316 stmt
= gimple_build_assign (fold_build2 (MEM_REF
, ptr_type_node
, pptr
,
1317 build_int_cst (ptr_type_node
, 0)),
1319 gsi_insert_after (gsi
, stmt
, GSI_NEW_STMT
);
1320 gsi_insert_after (gsi
, gimple_build_label (noalign_label
), GSI_NEW_STMT
);
1323 /* Lower calls to __builtin_assume_aligned when not optimizing. */
1326 lower_builtin_assume_aligned (gimple_stmt_iterator
*gsi
)
1328 gcall
*call
= as_a
<gcall
*> (gsi_stmt (*gsi
));
1330 tree lhs
= gimple_call_lhs (call
);
1331 if (!lhs
|| !POINTER_TYPE_P (TREE_TYPE (lhs
)) || TREE_CODE (lhs
) != SSA_NAME
)
1334 tree align
= gimple_call_arg (call
, 1);
1335 tree misalign
= (gimple_call_num_args (call
) > 2
1336 ? gimple_call_arg (call
, 2) : NULL_TREE
);
1337 if (!tree_fits_uhwi_p (align
)
1338 || (misalign
&& !tree_fits_uhwi_p (misalign
)))
1341 unsigned aligni
= TREE_INT_CST_LOW (align
);
1342 unsigned misaligni
= misalign
? TREE_INT_CST_LOW (misalign
) : 0;
1344 || (aligni
& (aligni
- 1)) != 0
1345 || (misaligni
& ~(aligni
- 1)) != 0)
1348 /* For lowering we simply transfer alignment information to the
1349 result and leave the call otherwise unchanged, it will be elided
1350 at RTL expansion time. */
1351 ptr_info_def
*pi
= get_ptr_info (lhs
);
1352 set_ptr_info_alignment (pi
, aligni
, misaligni
);
1356 /* Record the variables in VARS into function FN. */
1359 record_vars_into (tree vars
, tree fn
)
1361 for (; vars
; vars
= DECL_CHAIN (vars
))
1365 /* BIND_EXPRs contains also function/type/constant declarations
1366 we don't need to care about. */
1370 /* Nothing to do in this case. */
1371 if (DECL_EXTERNAL (var
))
1374 /* Record the variable. */
1375 add_local_decl (DECL_STRUCT_FUNCTION (fn
), var
);
1380 /* Record the variables in VARS into current_function_decl. */
1383 record_vars (tree vars
)
1385 record_vars_into (vars
, current_function_decl
);