1 /* Control flow redundancy hardening.
2 Copyright (C) 2022-2023 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <oliva@adacore.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
22 #define INCLUDE_ALGORITHM /* find */
24 #include "coretypes.h"
29 #include "fold-const.h"
32 #include "tree-pass.h"
34 #include "gimple-iterator.h"
35 #include "gimple-pretty-print.h"
37 #include "tree-cfgcleanup.h"
41 #include "basic-block.h"
48 #include "langhooks.h"
49 #include "diagnostic.h"
54 /* This pass introduces verification, at function exits, that booleans
55 set in each basic block during function execution reflect the
56 control flow graph: for each visited block, check that at least one
57 predecessor and at least one successor were also visited. This
58 sort of hardening may detect various kinds of attacks. */
60 /* Define a pass to harden code through control flow redundancy. */
62 const pass_data pass_data_harden_control_flow_redundancy
= {
67 PROP_cfg
| PROP_ssa
, // properties_required
68 0, // properties_provided
69 0, // properties_destroyed
70 TODO_cleanup_cfg
, // properties_start
71 0, // properties_finish
74 class pass_harden_control_flow_redundancy
: public gimple_opt_pass
77 pass_harden_control_flow_redundancy (gcc::context
*ctxt
)
78 : gimple_opt_pass (pass_data_harden_control_flow_redundancy
, ctxt
)
80 opt_pass
*clone () { return new pass_harden_control_flow_redundancy (m_ctxt
); }
81 virtual bool gate (function
*fun
) {
82 /* Return quickly if the pass is disabled, without checking any of
83 the conditions that might give rise to warnings that would only
84 be appropriate if hardening was requested. */
85 if (!flag_harden_control_flow_redundancy
)
88 /* Functions that return more than once, like setjmp and vfork
89 (that also gets this flag set), will start recording a path
90 after the first return, and then may take another path when
91 they return again. The unterminated path may then be flagged
92 as an error. ??? We could save the visited array before the
93 call and restore it if it returns again. */
94 if (fun
->calls_setjmp
)
96 warning_at (DECL_SOURCE_LOCATION (fun
->decl
), 0,
97 "%qD calls %<setjmp%> or similar,"
98 " %<-fharden-control-flow-redundancy%> is not supported",
103 /* Some targets bypass the abnormal dispatcher block in nonlocal
104 gotos, and then we'd miss its visited bit. It might be doable
105 to make it work uniformly, but this feature is not used often
106 enough to make it worthwhile. */
107 if (fun
->has_nonlocal_label
)
109 warning_at (DECL_SOURCE_LOCATION (fun
->decl
), 0,
110 "%qD receives nonlocal gotos,"
111 " %<-fharden-control-flow-redundancy%> is not supported",
116 if (fun
->cfg
&& param_hardcfr_max_blocks
> 0
117 && (n_basic_blocks_for_fn (fun
) - NUM_FIXED_BLOCKS
118 > param_hardcfr_max_blocks
))
120 warning_at (DECL_SOURCE_LOCATION (fun
->decl
), 0,
121 "%qD has more than %u blocks, the requested"
122 " maximum for %<-fharden-control-flow-redundancy%>",
123 fun
->decl
, param_hardcfr_max_blocks
);
129 virtual unsigned int execute (function
*);
134 /* Return TRUE iff CFR checks should be inserted before returning
138 check_returning_calls_p ()
141 flag_harden_control_flow_redundancy_check_returning_calls
> 0
142 || (flag_harden_control_flow_redundancy_check_returning_calls
< 0
143 /* Gates pass_tail_calls. */
144 && flag_optimize_sibling_calls
145 /* Gates pass_all_optimizations. */
146 && optimize
>= 1 && !optimize_debug
);
149 /* Scan BB from the end, updating *RETPTR if given as return stmts and
150 copies are found. Return a call or a stmt that cannot appear after
151 a tail call, or NULL if the top of the block is reached without
155 hardcfr_scan_block (basic_block bb
, tree
**retptr
)
157 gimple_stmt_iterator gsi
;
158 for (gsi
= gsi_last_bb (bb
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
160 gimple
*stmt
= gsi_stmt (gsi
);
162 /* Ignore labels, returns, nops, clobbers and debug stmts. */
163 if (gimple_code (stmt
) == GIMPLE_LABEL
164 || gimple_code (stmt
) == GIMPLE_NOP
165 || gimple_code (stmt
) == GIMPLE_PREDICT
166 || gimple_clobber_p (stmt
)
167 || is_gimple_debug (stmt
))
170 if (gimple_code (stmt
) == GIMPLE_RETURN
)
172 greturn
*gret
= as_a
<greturn
*> (stmt
);
175 gcc_checking_assert (!*retptr
);
176 *retptr
= gimple_return_retval_ptr (gret
);
181 /* Check for a call. */
182 if (is_gimple_call (stmt
))
185 /* Allow simple copies to the return value, updating the return
186 value to be found in earlier assignments. */
187 if (retptr
&& *retptr
&& gimple_assign_single_p (stmt
)
188 && **retptr
== gimple_assign_lhs (stmt
))
190 *retptr
= gimple_assign_rhs1_ptr (stmt
);
197 /* Any other kind of stmt will prevent a tail call. */
201 /* Return TRUE iff CALL is to be preceded by a CFR checkpoint, i.e.,
202 if it's a returning call (one whose result is ultimately returned
203 without intervening non-copy statements) and we're checking
204 returning calls, a __builtin_return call (noreturn with a path to
205 the exit block), a must-tail call, or a tail call. */
208 returning_call_p (gcall
*call
)
210 if (!(gimple_call_noreturn_p (call
)
211 || gimple_call_must_tail_p (call
)
212 || gimple_call_tail_p (call
)
213 || check_returning_calls_p ()))
216 /* Quickly check that there's a path to exit compatible with a
217 returning call. Detect infinite loops by limiting the path
218 length to the basic block count, and by looking for duplicate
219 blocks before allocating more memory for the path, for amortized
221 auto_vec
<basic_block
, 10> path
;
222 for (basic_block bb
= gimple_bb (call
);
223 bb
!= EXIT_BLOCK_PTR_FOR_FN (cfun
);
224 bb
= single_succ (bb
))
225 if (!single_succ_p (bb
)
226 || (single_succ_edge (bb
)->flags
& EDGE_EH
) != 0
227 || n_basic_blocks_for_fn (cfun
) - path
.length () <= NUM_FIXED_BLOCKS
228 || (path
.length () == path
.allocated ()
229 && std::find (path
.begin (), path
.end (), bb
) != path
.end ()))
234 /* Check the stmts in the blocks and trace the return value. */
238 gcc_checking_assert (!path
.is_empty ());
239 basic_block bb
= path
.pop ();
240 gimple
*stop
= hardcfr_scan_block (bb
, &retptr
);
245 gcc_checking_assert (path
.is_empty ());
250 if (retptr
&& *retptr
&& TREE_CODE (*retptr
) == SSA_NAME
251 && !SSA_NAME_IS_DEFAULT_DEF (*retptr
)
252 && SSA_NAME_DEF_STMT (*retptr
)
253 && is_a
<gphi
*> (SSA_NAME_DEF_STMT (*retptr
))
254 && gimple_bb (SSA_NAME_DEF_STMT (*retptr
)) == bb
)
256 retphi
= as_a
<gphi
*> (SSA_NAME_DEF_STMT (*retptr
));
257 gcc_checking_assert (gimple_phi_result (retphi
) == *retptr
);
262 gcc_checking_assert (!path
.is_empty ());
263 edge e
= single_succ_edge (path
.last ());
264 int i
= EDGE_COUNT (bb
->preds
);
266 if (EDGE_PRED (bb
, i
) == e
)
268 gcc_checking_assert (i
>= 0);
269 retptr
= gimple_phi_arg_def_ptr (retphi
, i
);
272 return (gimple_call_noreturn_p (call
)
273 || gimple_call_must_tail_p (call
)
274 || gimple_call_tail_p (call
)
275 || (gimple_call_lhs (call
) == (retptr
? *retptr
: NULL
)
276 && check_returning_calls_p ()));
279 typedef auto_vec
<edge
, 10> chk_edges_t
;
281 /* Declare for mutual recursion. */
282 static bool hardcfr_sibcall_search_preds (basic_block bb
,
283 chk_edges_t
&chk_edges
,
285 auto_sbitmap
&chkcall_blocks
,
287 auto_sbitmap
&postchk_blocks
,
290 /* Search backwards from the end of BB for a mandatory or potential
291 sibcall. Schedule the block to be handled sort-of like noreturn if
292 so. Recurse to preds, with updated RETPTR, if the block only
293 contains stmts that may follow such a call, scheduling checking at
294 edges and marking blocks as post-check as needed. Return true iff,
295 at the end of the block, a check will have already been
299 hardcfr_sibcall_search_block (basic_block bb
,
300 chk_edges_t
&chk_edges
,
302 auto_sbitmap
&chkcall_blocks
,
304 auto_sbitmap
&postchk_blocks
,
307 /* Conditionals and internal exceptions rule out tail calls. */
308 if (!single_succ_p (bb
)
309 || (single_succ_edge (bb
)->flags
& EDGE_EH
) != 0)
312 gimple
*stmt
= hardcfr_scan_block (bb
, &retptr
);
314 return hardcfr_sibcall_search_preds (bb
, chk_edges
,
315 count_chkcall
, chkcall_blocks
,
316 count_postchk
, postchk_blocks
,
319 if (!is_a
<gcall
*> (stmt
))
322 /* Avoid disrupting mandatory or early-marked tail calls,
323 inserting the check before them. This works for
324 must-tail calls, but tail calling as an optimization is
325 detected too late for us.
327 Also check for noreturn calls here. Noreturn calls won't
328 normally have edges to exit, so they won't be found here,
329 but __builtin_return does, and we must check before
330 it, so handle it like a tail call. */
331 gcall
*call
= as_a
<gcall
*> (stmt
);
332 if (!(gimple_call_noreturn_p (call
)
333 || gimple_call_must_tail_p (call
)
334 || gimple_call_tail_p (call
)
335 || (gimple_call_lhs (call
) == (retptr
? *retptr
: NULL
)
336 && check_returning_calls_p ())))
339 gcc_checking_assert (returning_call_p (call
));
341 /* We found a call that is to be preceded by checking. */
342 if (bitmap_set_bit (chkcall_blocks
, bb
->index
))
350 /* Search preds of BB for a mandatory or potential sibcall or
351 returning call, and arrange for the blocks containing them to have
352 a check inserted before the call, like noreturn calls. If any
353 preds are found to perform checking, schedule checks at the edges
354 of those that don't, and mark BB as postcheck.. */
357 hardcfr_sibcall_search_preds (basic_block bb
,
358 chk_edges_t
&chk_edges
,
360 auto_sbitmap
&chkcall_blocks
,
362 auto_sbitmap
&postchk_blocks
,
365 /* For the exit block, we wish to force a check at every
366 predecessor, so pretend we've already found a pred that had
367 checking, so that we schedule checking at every one of its pred
369 bool first
= bb
->index
>= NUM_FIXED_BLOCKS
;
370 bool postchecked
= true;
373 if (retptr
&& *retptr
&& TREE_CODE (*retptr
) == SSA_NAME
374 && !SSA_NAME_IS_DEFAULT_DEF (*retptr
)
375 && SSA_NAME_DEF_STMT (*retptr
)
376 && is_a
<gphi
*> (SSA_NAME_DEF_STMT (*retptr
))
377 && gimple_bb (SSA_NAME_DEF_STMT (*retptr
)) == bb
)
379 retphi
= as_a
<gphi
*> (SSA_NAME_DEF_STMT (*retptr
));
380 gcc_checking_assert (gimple_phi_result (retphi
) == *retptr
);
383 for (int i
= EDGE_COUNT (bb
->preds
); i
--; first
= false)
385 edge e
= EDGE_PRED (bb
, i
);
388 = hardcfr_sibcall_search_block (e
->src
, chk_edges
,
389 count_chkcall
, chkcall_blocks
,
390 count_postchk
, postchk_blocks
,
392 : gimple_phi_arg_def_ptr (retphi
, i
));
396 postchecked
= checked
;
400 /* When we first find a checked block, force a check at every
401 other incoming edge we've already visited, and those we
402 visit afterwards that don't have their own check, so that
403 when we reach BB, the check has already been performed. */
404 if (!postchecked
&& checked
)
406 for (int j
= EDGE_COUNT (bb
->preds
); --j
> i
; )
407 chk_edges
.safe_push (EDGE_PRED (bb
, j
));
410 if (postchecked
&& !checked
)
411 chk_edges
.safe_push (EDGE_PRED (bb
, i
));
414 if (postchecked
&& bb
->index
>= NUM_FIXED_BLOCKS
)
416 if (bitmap_set_bit (postchk_blocks
, bb
->index
))
428 /* Use a sufficiently wide unsigned type to hold basic block numbers. */
429 typedef size_t blknum
;
431 /* Record the original block count of the function. */
433 /* Record the number of bits per VWORD (short for VISITED WORD), an
434 efficient mode to set and test bits for blocks we visited, and to
435 encode the CFG in case out-of-line verification is used. */
438 /* Hold the unsigned integral VWORD type. */
440 /* Hold a pointer-to-VWORD type. */
443 /* Hold a growing sequence used to check, inline or out-of-line,
444 that VISITED encodes an expected execution path. */
446 /* If nonNULL, hold a growing representation of the CFG for
447 out-of-line testing. */
450 /* Hold the declaration of an array of VWORDs, used as an array of
454 /* If performing inline checking, hold a declarations of boolean
455 variables used for inline checking. CKBLK holds the result of
456 testing whether the VISITED bit corresponding to a predecessor or
457 successor is set, CKINV inverts that bit, CKPART gets cleared if
458 a block was not visited or if CKINV for any of its predecessors
459 or successors is set, and CKFAIL gets set if CKPART remains set
460 at the end of a block's predecessors or successors list. */
461 tree ckfail
, ckpart
, ckinv
, ckblk
;
463 /* If we need to deal with abnormal edges, we insert SSA_NAMEs for
464 boolean true and false. */
467 /* Convert a block index N to a block vindex, the index used to
468 identify it in the VISITED array. Check that it's in range:
469 neither ENTRY nor EXIT, but maybe one-past-the-end, to compute
470 the visited array length. */
471 blknum
num2idx (blknum n
) {
472 gcc_checking_assert (n
>= NUM_FIXED_BLOCKS
&& n
<= nblocks
);
473 return (n
- NUM_FIXED_BLOCKS
);
475 /* Return the block vindex for BB, that must not be ENTRY or
477 blknum
bb2idx (basic_block bb
) {
478 gcc_checking_assert (bb
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)
479 && bb
!= EXIT_BLOCK_PTR_FOR_FN (cfun
));
480 gcc_checking_assert (blknum (bb
->index
) < nblocks
);
481 return num2idx (bb
->index
);
484 /* Compute the type to be used for the VISITED array. */
487 blknum n
= num2idx (nblocks
);
488 return build_array_type_nelts (vword_type
,
489 (n
+ vword_bits
- 1) / vword_bits
);
492 /* Compute and return the index into VISITED for block BB. If BITP
493 is non-NULL, also compute and store the bit mask corresponding to
494 block BB in *BITP, so that (visited[index] & mask) tells whether
496 tree
vwordidx (basic_block bb
, tree
*bitp
= NULL
)
498 blknum idx
= bb2idx (bb
);
501 unsigned bit
= idx
% vword_bits
;
502 /* We don't need to adjust shifts to follow native bit
503 endianness here, all of our uses of the CFG and visited
504 bitmaps, whether at compile or runtime, are shifted bits on
505 full words. This adjustment here would require a
506 corresponding adjustment at runtime, which would be nothing
507 but undesirable overhead for us. */
508 if (0 /* && BITS_BIG_ENDIAN */)
509 bit
= vword_bits
- bit
- 1;
510 wide_int wbit
= wi::set_bit_in_zero (bit
, vword_bits
);
511 *bitp
= wide_int_to_tree (vword_type
, wbit
);
513 return build_int_cst (vword_ptr
, idx
/ vword_bits
);
516 /* Return an expr to accesses the visited element that holds
517 information about BB. If BITP is non-NULL, set it to the mask to
518 tell which bit in that expr refers to BB. */
519 tree
vword (basic_block bb
, tree
*bitp
= NULL
)
521 return build2 (MEM_REF
, vword_type
,
522 build1 (ADDR_EXPR
, vword_ptr
, visited
),
523 int_const_binop (MULT_EXPR
, vwordidx (bb
, bitp
),
524 fold_convert (vword_ptr
,
529 /* Return an expr that evaluates to true iff BB was marked as
530 VISITED. Add any gimple stmts to SEQP. */
531 tree
vindex (basic_block bb
, gimple_seq
*seqp
)
533 if (bb
== ENTRY_BLOCK_PTR_FOR_FN (cfun
)
534 || bb
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
535 return boolean_true_node
;
537 tree bit
, setme
= vword (bb
, &bit
);
538 tree temp
= create_tmp_var (vword_type
, ".cfrtemp");
540 gassign
*vload
= gimple_build_assign (temp
, setme
);
541 gimple_seq_add_stmt (seqp
, vload
);
543 gassign
*vmask
= gimple_build_assign (temp
, BIT_AND_EXPR
, temp
, bit
);
544 gimple_seq_add_stmt (seqp
, vmask
);
546 return build2 (NE_EXPR
, boolean_type_node
,
547 temp
, build_int_cst (vword_type
, 0));
550 /* Set the bit corresponding to BB in VISITED. Add to SEQ any
551 required gimple stmts, and return SEQ, possibly modified. */
552 gimple_seq
vset (basic_block bb
, gimple_seq seq
= NULL
)
554 tree bit
, setme
= vword (bb
, &bit
);
555 tree temp
= create_tmp_var (vword_type
, ".cfrtemp");
557 gassign
*vload
= gimple_build_assign (temp
, setme
);
558 gimple_seq_add_stmt (&seq
, vload
);
560 gassign
*vbitset
= gimple_build_assign (temp
, BIT_IOR_EXPR
, temp
, bit
);
561 gimple_seq_add_stmt (&seq
, vbitset
);
563 gassign
*vstore
= gimple_build_assign (unshare_expr (setme
), temp
);
564 gimple_seq_add_stmt (&seq
, vstore
);
566 /* Prevent stores into visited from being deferred, forcing
567 subsequent bitsets to reload the word rather than reusing
568 values already in register. The purpose is threefold: make the
569 bitset get to memory in this block, so that control flow
570 attacks in functions called in this block don't easily bypass
571 the bitset; prevent the bitset word from being retained in a
572 register across blocks, which could, in an attack scenario,
573 make a later block set more than one bit; and prevent hoisting
574 or sinking loads or stores of bitset words out of loops or even
575 throughout functions, which could significantly weaken the
576 verification. This is equivalent to making the bitsetting
577 volatile within the function body, but without changing its
578 type; making the bitset volatile would make inline checking far
579 less optimizable for no reason. */
580 vec
<tree
, va_gc
> *inputs
= NULL
;
581 vec
<tree
, va_gc
> *outputs
= NULL
;
582 vec_safe_push (outputs
,
585 (NULL_TREE
, build_string (2, "=m")),
587 vec_safe_push (inputs
,
590 (NULL_TREE
, build_string (1, "m")),
592 gasm
*stabilize
= gimple_build_asm_vec ("", inputs
, outputs
,
594 gimple_seq_add_stmt (&seq
, stabilize
);
600 /* Prepare to add control flow redundancy testing to CFUN. */
601 rt_bb_visited (int checkpoints
)
602 : nblocks (n_basic_blocks_for_fn (cfun
)),
603 vword_type (NULL
), ckseq (NULL
), rtcfg (NULL
),
604 vfalse (NULL
), vtrue (NULL
)
606 /* If we've already added a declaration for the builtin checker,
607 extract vword_type and vword_bits from its declaration. */
608 if (tree checkfn
= builtin_decl_explicit (BUILT_IN___HARDCFR_CHECK
))
610 tree check_arg_list
= TYPE_ARG_TYPES (TREE_TYPE (checkfn
));
611 tree vword_const_ptr_type
= TREE_VALUE (TREE_CHAIN (check_arg_list
));
612 vword_type
= TYPE_MAIN_VARIANT (TREE_TYPE (vword_const_ptr_type
));
613 vword_bits
= tree_to_shwi (TYPE_SIZE (vword_type
));
615 /* Otherwise, select vword_bits, vword_type et al, and use it to
616 declare the builtin checker. */
619 /* This setting needs to be kept in sync with libgcc/hardcfr.c.
620 We aim for at least 28 bits, which enables us to refer to as
621 many as 28 << 28 blocks in a function's CFG. That's way over
623 machine_mode VWORDmode
;
624 if (BITS_PER_UNIT
>= 28)
627 vword_bits
= BITS_PER_UNIT
;
629 else if (BITS_PER_UNIT
>= 14)
632 vword_bits
= 2 * BITS_PER_UNIT
;
637 vword_bits
= 4 * BITS_PER_UNIT
;
640 vword_type
= lang_hooks
.types
.type_for_mode (VWORDmode
, 1);
641 gcc_checking_assert (vword_bits
== tree_to_shwi (TYPE_SIZE
644 vword_type
= build_variant_type_copy (vword_type
);
645 TYPE_ALIAS_SET (vword_type
) = new_alias_set ();
647 tree vword_const
= build_qualified_type (vword_type
, TYPE_QUAL_CONST
);
648 tree vword_const_ptr
= build_pointer_type (vword_const
);
649 tree type
= build_function_type_list (void_type_node
, sizetype
,
650 vword_const_ptr
, vword_const_ptr
,
652 tree decl
= add_builtin_function_ext_scope
653 ("__builtin___hardcfr_check",
654 type
, BUILT_IN___HARDCFR_CHECK
, BUILT_IN_NORMAL
,
655 "__hardcfr_check", NULL_TREE
);
656 TREE_NOTHROW (decl
) = true;
657 set_builtin_decl (BUILT_IN___HARDCFR_CHECK
, decl
, true);
660 /* The checker uses a qualified pointer, so we can't reuse it,
661 so build a new one. */
662 vword_ptr
= build_pointer_type (vword_type
);
664 tree visited_type
= vtype ();
665 visited
= create_tmp_var (visited_type
, ".cfrvisited");
667 if (nblocks
- NUM_FIXED_BLOCKS
> blknum (param_hardcfr_max_inline_blocks
)
670 /* Make sure vword_bits is wide enough for the representation
671 of nblocks in rtcfg. Compare with vword_bits << vword_bits,
672 but avoiding overflows, shifting nblocks right instead. If
673 vword_bits is wider than HOST_WIDE_INT, assume it fits, so
674 as to avoid undefined shifts. */
675 gcc_assert (HOST_BITS_PER_WIDE_INT
<= vword_bits
676 || (((unsigned HOST_WIDE_INT
)(num2idx (nblocks
))
677 >> vword_bits
) < vword_bits
));
679 /* Build a terminator for the constructor list. */
680 rtcfg
= build_tree_list (NULL_TREE
, NULL_TREE
);
684 ckfail
= create_tmp_var (boolean_type_node
, ".cfrfail");
685 ckpart
= create_tmp_var (boolean_type_node
, ".cfrpart");
686 ckinv
= create_tmp_var (boolean_type_node
, ".cfrinv");
687 ckblk
= create_tmp_var (boolean_type_node
, ".cfrblk");
689 gassign
*ckfail_init
= gimple_build_assign (ckfail
, boolean_false_node
);
690 gimple_seq_add_stmt (&ckseq
, ckfail_init
);
693 /* Insert SEQ before a resx or a call in INSBB. */
694 void insert_exit_check_in_block (gimple_seq seq
, basic_block insbb
)
696 gimple_stmt_iterator gsi
= gsi_last_bb (insbb
);
698 while (!gsi_end_p (gsi
))
699 if (is_a
<gresx
*> (gsi_stmt (gsi
))
700 || is_a
<gcall
*> (gsi_stmt (gsi
)))
705 gsi_insert_seq_before (&gsi
, seq
, GSI_SAME_STMT
);
708 /* Insert SEQ on E. */
709 void insert_exit_check_on_edge (gimple_seq seq
, edge e
)
711 if (!(e
->flags
& EDGE_ABNORMAL
))
713 gsi_insert_seq_on_edge_immediate (e
, seq
);
717 /* Initialize SSA boolean constants for use in abnormal PHIs. */
720 vfalse
= make_ssa_name (boolean_type_node
);
721 vtrue
= make_ssa_name (boolean_type_node
);
723 gimple_seq vft_seq
= NULL
;
724 gassign
*vfalse_init
= gimple_build_assign (vfalse
, boolean_false_node
);
725 gimple_seq_add_stmt (&vft_seq
, vfalse_init
);
726 gassign
*vtrue_init
= gimple_build_assign (vtrue
, boolean_true_node
);
727 gimple_seq_add_stmt (&vft_seq
, vtrue_init
);
729 gsi_insert_seq_on_edge_immediate (single_succ_edge
730 (ENTRY_BLOCK_PTR_FOR_FN (cfun
)),
734 /* We can't insert on abnormal edges, but we can arrange for SEQ
735 to execute conditionally at dest. Add a PHI boolean with TRUE
736 from E and FALSE from other preds, split the whole block, add a
737 test for the PHI to run a new block with SEQ or skip straight
738 to the original block. If there are multiple incoming abnormal
739 edges, we'll do this multiple times. ??? Unless there are
740 multiple abnormal edges with different postcheck status, we
741 could split the block and redirect other edges, rearranging the
742 PHI nodes. Optimizers already know how to do this, so we can
743 keep things simple here. */
744 basic_block bb
= e
->dest
;
745 basic_block bb_postcheck
= split_block_after_labels (bb
)->dest
;
747 basic_block bb_check
= create_empty_bb (e
->dest
);
748 bb_check
->count
= e
->count ();
749 if (dom_info_available_p (CDI_DOMINATORS
))
750 set_immediate_dominator (CDI_DOMINATORS
, bb_check
, bb
);
752 add_bb_to_loop (bb_check
, current_loops
->tree_root
);
754 gimple_stmt_iterator chkpt
= gsi_after_labels (bb_check
);
755 gsi_insert_seq_before_without_update (&chkpt
, seq
, GSI_SAME_STMT
);
756 edge edge_postcheck
= make_edge (bb_check
, bb_postcheck
, EDGE_FALLTHRU
);
757 edge_postcheck
->probability
= profile_probability::always ();
759 tree cond_var
= make_ssa_name (boolean_type_node
);
760 gcond
*cond
= gimple_build_cond (NE_EXPR
, cond_var
, boolean_false_node
,
762 gimple_stmt_iterator condpt
= gsi_after_labels (bb
);
763 gsi_insert_before (&condpt
, cond
, GSI_SAME_STMT
);
764 edge edge_nocheck
= single_succ_edge (bb
);
765 edge_nocheck
->flags
&= ~EDGE_FALLTHRU
;
766 edge_nocheck
->flags
|= EDGE_FALSE_VALUE
;
767 edge edge_check
= make_edge (bb
, bb_check
, EDGE_TRUE_VALUE
);
768 edge_check
->probability
= e
->count ().probability_in (bb
->count
);
769 edge_nocheck
->probability
= edge_check
->probability
.invert ();
771 gphi
*cond_phi
= create_phi_node (cond_var
, bb
);
772 for (int i
= 0, ei
= EDGE_COUNT (bb
->preds
); i
< ei
; i
++)
774 edge pred
= EDGE_PRED (bb
, i
);
775 bool check_edge
= pred
== e
;
776 tree val
= check_edge
? vtrue
: vfalse
;
777 add_phi_arg (cond_phi
, val
, pred
, UNKNOWN_LOCATION
);
781 /* Add checking code to CHK_EDGES and CHKCALL_BLOCKS, and
782 initialization code on the entry edge. Before this point, the
783 CFG has been undisturbed, and all the needed data has been
784 collected and safely stowed. */
785 void check (chk_edges_t
&chk_edges
,
786 int count_chkcall
, auto_sbitmap
const &chkcall_blocks
)
788 /* If we're using out-of-line checking, create and statically
789 initialize the CFG checking representation, generate the
790 checker call for the checking sequence, and insert it in all
791 exit edges, if there's more than one. If there's only one, we
792 use the same logic as the inline case to insert the check
796 /* Unreverse the list, and drop the tail node turned into head. */
797 rtcfg
= TREE_CHAIN (nreverse (rtcfg
));
799 /* Turn the indices stored in TREE_PURPOSE into separate
800 nodes. It was useful to keep them together to enable
801 combination of masks and for clear separation of
802 terminators while constructing it, but now we have to turn
803 it into a sequence of words. */
804 for (tree node
= rtcfg
; node
; node
= TREE_CHAIN (node
))
806 tree wordidx
= TREE_PURPOSE (node
);
810 TREE_PURPOSE (node
) = NULL_TREE
;
811 TREE_CHAIN (node
) = tree_cons (NULL_TREE
,
812 fold_convert (vword_type
, wordidx
),
816 /* Build the static initializer for the array with the CFG
817 representation for out-of-line checking. */
818 tree init
= build_constructor_from_list (NULL_TREE
, rtcfg
);
819 TREE_TYPE (init
) = build_array_type_nelts (vword_type
,
820 CONSTRUCTOR_NELTS (init
));
822 ASM_GENERATE_INTERNAL_LABEL (buf
, "Lhardcfg",
823 current_function_funcdef_no
);
824 rtcfg
= build_decl (UNKNOWN_LOCATION
, VAR_DECL
,
825 get_identifier (buf
),
827 TREE_READONLY (rtcfg
) = 1;
828 TREE_STATIC (rtcfg
) = 1;
829 TREE_ADDRESSABLE (rtcfg
) = 1;
830 TREE_USED (rtcfg
) = 1;
831 DECL_ARTIFICIAL (rtcfg
) = 1;
832 DECL_IGNORED_P (rtcfg
) = 1;
833 DECL_INITIAL (rtcfg
) = init
;
834 make_decl_rtl (rtcfg
);
835 varpool_node::finalize_decl (rtcfg
);
837 /* Add the checker call to ckseq. */
838 gcall
*call_chk
= gimple_build_call (builtin_decl_explicit
839 (BUILT_IN___HARDCFR_CHECK
), 3,
840 build_int_cst (sizetype
,
842 build1 (ADDR_EXPR
, vword_ptr
,
844 build1 (ADDR_EXPR
, vword_ptr
,
846 gimple_seq_add_stmt (&ckseq
, call_chk
);
848 gimple
*clobber
= gimple_build_assign (visited
,
850 (TREE_TYPE (visited
)));
851 gimple_seq_add_stmt (&ckseq
, clobber
);
853 /* If we have multiple exit edges, insert (copies of)
854 ckseq in all of them. */
855 for (int i
= chk_edges
.length (); i
--; )
857 gimple_seq seq
= ckseq
;
858 /* Copy the sequence, unless we're dealing with the
859 last edge (we're counting down to zero). */
860 if (i
|| count_chkcall
)
861 seq
= gimple_seq_copy (seq
);
863 edge e
= chk_edges
[i
];
867 if (e
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
869 "Inserting out-of-line check in"
870 " block %i's edge to exit.\n",
874 "Inserting out-of-line check in"
875 " block %i's edge to postcheck block %i.\n",
876 e
->src
->index
, e
->dest
->index
);
879 insert_exit_check_on_edge (seq
, e
);
881 gcc_checking_assert (!bitmap_bit_p (chkcall_blocks
, e
->src
->index
));
886 EXECUTE_IF_SET_IN_BITMAP (chkcall_blocks
, 0, i
, it
)
888 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
890 gimple_seq seq
= ckseq
;
891 gcc_checking_assert (count_chkcall
> 0);
893 seq
= gimple_seq_copy (seq
);
897 "Inserting out-of-line check before stmt in block %i.\n",
900 insert_exit_check_in_block (seq
, bb
);
903 gcc_checking_assert (count_chkcall
== 0);
907 /* Inline checking requires a single exit edge. */
908 gimple
*last
= gimple_build_assign (visited
,
910 (TREE_TYPE (visited
)));
911 gimple_seq_add_stmt (&ckseq
, last
);
915 edge e
= single_pred_edge (EXIT_BLOCK_PTR_FOR_FN (cfun
));
919 if (e
->dest
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
921 "Inserting out-of-line check in"
922 " block %i's edge to postcheck block %i.\n",
923 e
->src
->index
, e
->dest
->index
);
926 "Inserting inline check in"
927 " block %i's edge to exit.\n",
931 insert_exit_check_on_edge (ckseq
, e
);
935 gcc_checking_assert (count_chkcall
== 1);
939 EXECUTE_IF_SET_IN_BITMAP (chkcall_blocks
, 0, i
, it
)
941 basic_block bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
943 gimple_seq seq
= ckseq
;
944 gcc_checking_assert (count_chkcall
> 0);
946 seq
= gimple_seq_copy (seq
);
950 "Inserting inline check before stmt in block %i.\n",
953 insert_exit_check_in_block (seq
, bb
);
956 gcc_checking_assert (count_chkcall
== 0);
959 /* The inserted ckseq computes CKFAIL at LAST. Now we have to
960 conditionally trap on it. */
961 basic_block insbb
= gimple_bb (last
);
963 /* Create a block with the unconditional trap. */
964 basic_block trp
= create_empty_bb (insbb
);
965 gimple_stmt_iterator gsit
= gsi_after_labels (trp
);
967 gcall
*trap
= gimple_build_call (builtin_decl_explicit
969 gsi_insert_before (&gsit
, trap
, GSI_SAME_STMT
);
971 if (BB_PARTITION (insbb
))
972 BB_SET_PARTITION (trp
, BB_COLD_PARTITION
);
975 add_bb_to_loop (trp
, current_loops
->tree_root
);
977 /* Insert a conditional branch to the trap block. If the
978 conditional wouldn't be the last stmt, split the block. */
979 gimple_stmt_iterator gsi
= gsi_for_stmt (last
);
980 if (!gsi_one_before_end_p (gsi
))
981 split_block (gsi_bb (gsi
), gsi_stmt (gsi
));
983 gcond
*cond
= gimple_build_cond (NE_EXPR
, ckfail
,
984 fold_convert (TREE_TYPE (ckfail
),
987 gsi_insert_after (&gsi
, cond
, GSI_SAME_STMT
);
989 /* Adjust the edges. */
990 single_succ_edge (gsi_bb (gsi
))->flags
&= ~EDGE_FALLTHRU
;
991 single_succ_edge (gsi_bb (gsi
))->flags
|= EDGE_FALSE_VALUE
;
992 single_succ_edge (gsi_bb (gsi
))->probability
993 = profile_probability::always ();
994 edge e
= make_edge (gsi_bb (gsi
), trp
, EDGE_TRUE_VALUE
);
995 e
->probability
= profile_probability::never ();
996 gcc_checking_assert (e
->dest
== trp
);
997 gcc_checking_assert (!e
->dest
->count
.initialized_p ());
998 e
->dest
->count
= e
->count ();
1000 /* Set the trap's dominator after splitting. */
1001 if (dom_info_available_p (CDI_DOMINATORS
))
1002 set_immediate_dominator (CDI_DOMINATORS
, trp
, gimple_bb (last
));
1005 /* Insert initializers for visited at the entry. Do this after
1006 other insertions, to avoid messing with block numbers. */
1007 gimple_seq iseq
= NULL
;
1009 gcall
*vinit
= gimple_build_call (builtin_decl_explicit
1010 (BUILT_IN_MEMSET
), 3,
1013 (TREE_TYPE (visited
)),
1016 TYPE_SIZE_UNIT (TREE_TYPE (visited
)));
1017 gimple_seq_add_stmt (&iseq
, vinit
);
1019 gsi_insert_seq_on_edge_immediate (single_succ_edge
1020 (ENTRY_BLOCK_PTR_FOR_FN (cfun
)),
1024 /* Push onto RTCFG a (mask, index) pair to test for IBB when BB is
1025 visited. XSELF is to be the ENTRY or EXIT block (depending on
1026 whether we're looking at preds or succs), to be remapped to BB
1027 because we can't represent them, and there's no point in testing
1028 them anyway. Return true if no further blocks need to be visited
1029 in the list, because we've already encountered a
1032 push_rtcfg_pair (basic_block ibb
, basic_block bb
,
1035 /* We don't have a bit to test for the entry and exit
1036 blocks, but it is always visited, so we test for the
1037 block itself, which gets us the right result and
1038 enables the self-test optimization below. */
1042 tree mask
, idx
= vwordidx (ibb
, &mask
);
1043 /* Combine masks with the same idx, but not if we're going
1044 to optimize for self-test. */
1045 if (ibb
!= bb
&& TREE_PURPOSE (rtcfg
)
1046 && tree_int_cst_equal (idx
, TREE_PURPOSE (rtcfg
)))
1047 TREE_VALUE (rtcfg
) = int_const_binop (BIT_IOR_EXPR
, mask
,
1048 TREE_VALUE (rtcfg
));
1050 rtcfg
= tree_cons (idx
, mask
, rtcfg
);
1052 /* For self-tests (i.e., tests that the block itself was
1053 also visited), testing anything else is pointless,
1054 because it's a tautology, so just drop other edges. */
1057 while (TREE_PURPOSE (TREE_CHAIN (rtcfg
)))
1058 TREE_CHAIN (rtcfg
) = TREE_CHAIN (TREE_CHAIN (rtcfg
));
1065 /* Add to CKSEQ stmts to clear CKPART if OBB is visited. */
1067 build_block_check (basic_block obb
)
1069 tree vobb
= fold_convert (TREE_TYPE (ckblk
),
1070 vindex (obb
, &ckseq
));
1071 gassign
*blkrunp
= gimple_build_assign (ckblk
, vobb
);
1072 gimple_seq_add_stmt (&ckseq
, blkrunp
);
1074 gassign
*blknotrunp
= gimple_build_assign (ckinv
,
1079 boolean_false_node
));
1080 gimple_seq_add_stmt (&ckseq
, blknotrunp
);
1082 gassign
*andblk
= gimple_build_assign (ckpart
,
1085 gimple_seq_add_stmt (&ckseq
, andblk
);
1088 /* Add to BB code to set its bit in VISITED, and add to RTCFG or
1089 CKSEQ the data or code needed to check BB's predecessors and
1090 successors. If CHECKPOINT, assume the block is a checkpoint,
1091 whether or not it has an edge to EXIT. If POSTCHECK, assume the
1092 block post-dominates checkpoints and therefore no bitmap setting
1093 or checks are to be performed in or for it. Do NOT change the
1095 void visit (basic_block bb
, bool checkpoint
, bool postcheck
)
1097 /* Set the bit in VISITED when entering the block. */
1098 gimple_stmt_iterator gsi
= gsi_after_labels (bb
);
1100 gsi_insert_seq_before (&gsi
, vset (bb
), GSI_SAME_STMT
);
1106 /* Build a list of (index, mask) terminated by (NULL, 0).
1107 Consolidate masks with the same index when they're
1108 adjacent. First, predecessors. Count backwards, because
1109 we're going to reverse the list. The order shouldn't
1110 matter, but let's not make it surprising. */
1111 for (int i
= EDGE_COUNT (bb
->preds
); i
--; )
1112 if (push_rtcfg_pair (EDGE_PRED (bb
, i
)->src
, bb
,
1113 ENTRY_BLOCK_PTR_FOR_FN (cfun
)))
1116 rtcfg
= tree_cons (NULL_TREE
, build_int_cst (vword_type
, 0), rtcfg
);
1120 /* Then, successors. */
1122 || !push_rtcfg_pair (EXIT_BLOCK_PTR_FOR_FN (cfun
),
1123 bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
)))
1124 for (int i
= EDGE_COUNT (bb
->succs
); i
--; )
1125 if (push_rtcfg_pair (EDGE_SUCC (bb
, i
)->dest
, bb
,
1126 EXIT_BLOCK_PTR_FOR_FN (cfun
)))
1129 rtcfg
= tree_cons (NULL_TREE
, build_int_cst (vword_type
, 0), rtcfg
);
1131 else if (!postcheck
)
1133 /* Schedule test to fail if the block was reached but somehow none
1134 of its predecessors were. */
1135 tree bit
= fold_convert (TREE_TYPE (ckpart
), vindex (bb
, &ckseq
));
1136 gassign
*blkrunp
= gimple_build_assign (ckpart
, bit
);
1137 gimple_seq_add_stmt (&ckseq
, blkrunp
);
1139 for (int i
= 0, e
= EDGE_COUNT (bb
->preds
); i
< e
; i
++)
1140 build_block_check (EDGE_PRED (bb
, i
)->src
);
1141 gimple
*orfailp
= gimple_build_assign (ckfail
, BIT_IOR_EXPR
,
1143 gimple_seq_add_stmt (&ckseq
, orfailp
);
1145 /* Likewise for successors. */
1146 gassign
*blkruns
= gimple_build_assign (ckpart
, unshare_expr (bit
));
1147 gimple_seq_add_stmt (&ckseq
, blkruns
);
1150 build_block_check (EXIT_BLOCK_PTR_FOR_FN (cfun
));
1151 for (int i
= 0, e
= EDGE_COUNT (bb
->succs
); i
< e
; i
++)
1152 build_block_check (EDGE_SUCC (bb
, i
)->dest
);
1154 gimple
*orfails
= gimple_build_assign (ckfail
, BIT_IOR_EXPR
,
1156 gimple_seq_add_stmt (&ckseq
, orfails
);
1161 /* Avoid checking before noreturn calls that are known (expected,
1162 really) to finish by throwing an exception, rather than by ending
1163 the program or looping forever. Such functions have to be
1164 annotated, with an attribute (expected_throw) or flag (ECF_XTHROW),
1165 so that exception-raising functions, such as C++'s __cxa_throw,
1166 __cxa_rethrow, and Ada's gnat_rcheck_*, gnat_reraise*,
1167 ada.exception.raise_exception*, and the language-independent
1168 unwinders could be detected here and handled differently from other
1169 noreturn functions. */
1171 always_throwing_noreturn_call_p (gimple
*stmt
)
1173 if (!is_a
<gcall
*> (stmt
))
1174 return is_a
<gresx
*> (stmt
);
1176 gcall
*call
= as_a
<gcall
*> (stmt
);
1177 return (gimple_call_noreturn_p (call
)
1178 && gimple_call_expected_throw_p (call
));
1181 /* Control flow redundancy hardening: record the execution path, and
1182 verify at exit that an expect path was taken. */
1185 pass_harden_control_flow_redundancy::execute (function
*fun
)
1187 bool const check_at_escaping_exceptions
1189 && flag_harden_control_flow_redundancy_check_exceptions
);
1190 bool const check_before_noreturn_calls
1191 = flag_harden_control_flow_redundancy_check_noreturn
> HCFRNR_NEVER
;
1192 bool const check_before_nothrow_noreturn_calls
1193 = (check_before_noreturn_calls
1194 && flag_harden_control_flow_redundancy_check_noreturn
>= HCFRNR_NOTHROW
);
1195 bool const check_before_throwing_noreturn_calls
1197 && check_before_noreturn_calls
1198 && flag_harden_control_flow_redundancy_check_noreturn
> HCFRNR_NOTHROW
);
1199 bool const check_before_always_throwing_noreturn_calls
1201 && check_before_noreturn_calls
1202 && flag_harden_control_flow_redundancy_check_noreturn
>= HCFRNR_ALWAYS
);
1204 basic_block bb_eh_cleanup
= NULL
;
1206 if (flag_harden_control_flow_redundancy_skip_leaf
)
1208 bool found_calls_p
= false;
1210 FOR_EACH_BB_FN (bb
, fun
)
1212 for (gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
1213 !gsi_end_p (gsi
); gsi_prev (&gsi
))
1214 if (is_a
<gcall
*> (gsi_stmt (gsi
)))
1216 found_calls_p
= true;
1227 "Disabling CFR for leaf function, as requested\n");
1233 if (check_at_escaping_exceptions
)
1235 int lp_eh_cleanup
= -1;
1237 /* Record the preexisting blocks, to avoid visiting newly-created
1239 auto_sbitmap
to_visit (last_basic_block_for_fn (fun
));
1240 bitmap_clear (to_visit
);
1242 FOR_EACH_BB_FN (bb
, fun
)
1243 bitmap_set_bit (to_visit
, bb
->index
);
1245 /* Scan the blocks for stmts with escaping exceptions, that
1246 wouldn't be denoted in the CFG, and associate them with an
1247 empty cleanup handler around the whole function. Walk
1248 backwards, so that even when we split the block, */
1249 sbitmap_iterator it
;
1251 EXECUTE_IF_SET_IN_BITMAP (to_visit
, 0, i
, it
)
1253 bb
= BASIC_BLOCK_FOR_FN (fun
, i
);
1255 for (gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
1256 !gsi_end_p (gsi
); gsi_prev (&gsi
))
1258 gimple
*stmt
= gsi_stmt (gsi
);
1259 if (!stmt_could_throw_p (fun
, stmt
))
1262 /* If it must not throw, or if it already has a handler,
1263 we need not worry about it. */
1264 if (lookup_stmt_eh_lp (stmt
) != 0)
1267 /* Don't split blocks at, nor add EH edges to, tail
1268 calls, we will add verification before the call
1270 if (is_a
<gcall
*> (stmt
)
1271 && (gimple_call_must_tail_p (as_a
<gcall
*> (stmt
))
1272 || gimple_call_tail_p (as_a
<gcall
*> (stmt
))
1273 || returning_call_p (as_a
<gcall
*> (stmt
))))
1276 if (!gsi_one_before_end_p (gsi
))
1277 split_block (bb
, stmt
);
1278 /* A resx or noreturn call needs not be associated with
1279 the cleanup handler if we're going to add checking
1280 before it. We only test cases that didn't require
1281 block splitting because noreturn calls would always
1282 be at the end of blocks, and we test for zero
1283 successors because if there is an edge, it's not
1284 noreturn, as any EH edges would have already been
1285 caught by the lookup_stmt_eh_lp test above. */
1286 else if (check_before_noreturn_calls
1287 && EDGE_COUNT (bb
->succs
) == 0
1288 && (is_a
<gresx
*> (stmt
)
1289 ? check_before_always_throwing_noreturn_calls
1290 : (!is_a
<gcall
*> (stmt
)
1291 || !gimple_call_noreturn_p (stmt
))
1292 ? (gcc_unreachable (), false)
1294 || gimple_call_nothrow_p (as_a
<gcall
*> (stmt
)))
1295 ? check_before_nothrow_noreturn_calls
1296 : always_throwing_noreturn_call_p (stmt
)
1297 ? check_before_always_throwing_noreturn_calls
1298 : check_before_throwing_noreturn_calls
))
1303 "Bypassing cleanup for noreturn stmt"
1306 print_gimple_stmt (dump_file
, stmt
, 0);
1313 bb_eh_cleanup
= create_empty_bb (bb
);
1314 if (dom_info_available_p (CDI_DOMINATORS
))
1315 set_immediate_dominator (CDI_DOMINATORS
, bb_eh_cleanup
, bb
);
1317 add_bb_to_loop (bb_eh_cleanup
, current_loops
->tree_root
);
1319 /* Make the new block an EH cleanup for the call. */
1320 eh_region new_r
= gen_eh_region_cleanup (NULL
);
1321 eh_landing_pad lp
= gen_eh_landing_pad (new_r
);
1322 tree label
= gimple_block_label (bb_eh_cleanup
);
1323 lp
->post_landing_pad
= label
;
1324 EH_LANDING_PAD_NR (label
) = lp_eh_cleanup
= lp
->index
;
1326 /* Just propagate the exception.
1327 We will later insert the verifier call. */
1328 gimple_stmt_iterator ehgsi
;
1329 ehgsi
= gsi_after_labels (bb_eh_cleanup
);
1330 gresx
*resx
= gimple_build_resx (new_r
->index
);
1331 gsi_insert_before (&ehgsi
, resx
, GSI_SAME_STMT
);
1335 "Created cleanup block %i:\n",
1336 bb_eh_cleanup
->index
);
1338 else if (dom_info_available_p (CDI_DOMINATORS
))
1341 immdom
= get_immediate_dominator (CDI_DOMINATORS
,
1343 if (!dominated_by_p (CDI_DOMINATORS
, bb
, immdom
))
1345 immdom
= nearest_common_dominator (CDI_DOMINATORS
,
1347 set_immediate_dominator (CDI_DOMINATORS
,
1348 bb_eh_cleanup
, immdom
);
1355 "Associated cleanup block with stmt in block %i:\n",
1357 print_gimple_stmt (dump_file
, stmt
, 0);
1360 add_stmt_to_eh_lp (stmt
, lp_eh_cleanup
);
1361 /* Finally, wire the EH cleanup block into the CFG. */
1362 edge neeh
= make_eh_edge (stmt
);
1363 neeh
->probability
= profile_probability::never ();
1364 gcc_checking_assert (neeh
->dest
== bb_eh_cleanup
);
1365 if (neeh
->dest
->count
.initialized_p ())
1366 neeh
->dest
->count
+= neeh
->count ();
1368 neeh
->dest
->count
= neeh
->count ();
1374 /* A cfg_cleanup after bb_eh_cleanup makes for a more compact
1375 rtcfg, and it avoids bb numbering differences when we split
1376 blocks because of trailing debug insns only. */
1377 cleanup_tree_cfg ();
1378 gcc_checking_assert (EDGE_COUNT (bb_eh_cleanup
->succs
) == 0);
1382 /* These record blocks with calls that are to be preceded by
1383 checkpoints, such as noreturn calls (if so chosen), must-tail
1384 calls, potential early-marked tail calls, and returning calls (if
1386 int count_chkcall
= 0;
1387 auto_sbitmap
chkcall_blocks (last_basic_block_for_fn (fun
));
1388 bitmap_clear (chkcall_blocks
);
1390 /* We wish to add verification at blocks without successors, such as
1391 noreturn calls (raising or not) and the reraise at the cleanup
1392 block, but not other reraises: they will go through the cleanup
1394 if (check_before_noreturn_calls
)
1395 FOR_EACH_BB_FN (bb
, fun
)
1397 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
1398 if (gsi_end_p (gsi
))
1400 gimple
*stmt
= gsi_stmt (gsi
);
1402 if (EDGE_COUNT (bb
->succs
) == 0)
1404 /* A stmt at the end of a block without any successors is
1405 either a resx or a noreturn call without a local
1406 handler. Check that it's one of the desired
1408 if (flag_exceptions
&& is_a
<gresx
*> (stmt
)
1409 ? (check_before_always_throwing_noreturn_calls
1410 || bb
== bb_eh_cleanup
)
1411 : (!is_a
<gcall
*> (stmt
)
1412 || !gimple_call_noreturn_p (stmt
))
1413 ? (stmt_can_make_abnormal_goto (stmt
)
1414 /* ??? Check before indirect nonlocal goto, or
1417 /* Catch cases in which successors would be
1419 : (gcc_unreachable (), false))
1421 || gimple_call_nothrow_p (as_a
<gcall
*> (stmt
)))
1422 ? check_before_nothrow_noreturn_calls
1423 : always_throwing_noreturn_call_p (stmt
)
1424 ? check_before_always_throwing_noreturn_calls
1425 : check_before_throwing_noreturn_calls
)
1430 "Scheduling check before stmt"
1431 " in succ-less block %i:\n",
1433 print_gimple_stmt (dump_file
, stmt
, 0);
1436 if (bitmap_set_bit (chkcall_blocks
, bb
->index
))
1444 /* If there are no exceptions, it would seem like any noreturn
1445 call must have zero successor edges, but __builtin_return
1446 gets successor edges. We don't want to handle it here, it
1447 will be dealt with in sibcall_search_preds. Otherwise,
1448 check for blocks without non-EH successors, but skip those
1449 with resx stmts and edges (i.e., those other than that in
1450 bb_eh_cleanup), since those will go through bb_eh_cleanup,
1451 that will have been counted as noreturn above because it
1452 has no successors. */
1453 gcc_checking_assert (bb
!= bb_eh_cleanup
1454 || !check_at_escaping_exceptions
);
1455 if (flag_exceptions
&& is_a
<gresx
*> (stmt
)
1456 ? check_before_always_throwing_noreturn_calls
1457 : (!is_a
<gcall
*> (stmt
)
1458 || !gimple_call_noreturn_p (stmt
))
1461 || gimple_call_nothrow_p (as_a
<gcall
*> (stmt
)))
1462 ? false /* rather than check_before_nothrow_noreturn_calls */
1463 : always_throwing_noreturn_call_p (stmt
)
1464 ? check_before_always_throwing_noreturn_calls
1465 : check_before_throwing_noreturn_calls
)
1467 gcc_checking_assert (single_succ_p (bb
)
1468 && (single_succ_edge (bb
)->flags
& EDGE_EH
));
1473 "Scheduling check before stmt"
1474 " in EH-succ block %i:\n",
1476 print_gimple_stmt (dump_file
, stmt
, 0);
1479 if (bitmap_set_bit (chkcall_blocks
, bb
->index
))
1485 else if (bb_eh_cleanup
)
1487 if (bitmap_set_bit (chkcall_blocks
, bb_eh_cleanup
->index
))
1493 gcc_checking_assert (!bb_eh_cleanup
1494 || bitmap_bit_p (chkcall_blocks
, bb_eh_cleanup
->index
));
1496 /* If we don't have edges to exit nor noreturn calls (including the
1497 cleanup reraise), then we may skip instrumentation: that would
1498 amount to a function that ends with an infinite loop. */
1500 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
) == 0)
1504 "Disabling CFR, no exit paths to check\n");
1509 /* Search for must-tail calls, early-marked potential tail calls,
1510 and, if requested, returning calls. As we introduce early
1512 int count_postchk
= 0;
1513 auto_sbitmap
postchk_blocks (last_basic_block_for_fn (fun
));
1514 bitmap_clear (postchk_blocks
);
1515 chk_edges_t chk_edges
;
1516 hardcfr_sibcall_search_preds (EXIT_BLOCK_PTR_FOR_FN (fun
), chk_edges
,
1517 count_chkcall
, chkcall_blocks
,
1518 count_postchk
, postchk_blocks
,
1521 rt_bb_visited
vstd (chk_edges
.length () + count_chkcall
);
1523 auto_sbitmap
combined_blocks (last_basic_block_for_fn (fun
));
1524 bitmap_copy (combined_blocks
, chkcall_blocks
);
1527 FOR_EACH_VEC_ELT (chk_edges
, i
, e
)
1528 if (!bitmap_set_bit (combined_blocks
, (*e
)->src
->index
))
1529 /* There may be multiple chk_edges with the same src block;
1530 guard againt overlaps with chkcall_blocks only. */
1531 gcc_assert (!bitmap_bit_p (chkcall_blocks
, (*e
)->src
->index
));
1533 /* Visit blocks in index order, because building rtcfg depends on
1534 that. Blocks must be compact, which the cleanup_cfg requirement
1535 ensures. This would also enable FOR_EACH_BB_FN to be used to
1536 iterate in index order, but bb_eh_cleanup block splits and
1537 insertions changes that. */
1538 gcc_checking_assert (n_basic_blocks_for_fn (fun
)
1539 == last_basic_block_for_fn (fun
));
1540 for (int i
= NUM_FIXED_BLOCKS
; i
< n_basic_blocks_for_fn (fun
); i
++)
1542 bb
= BASIC_BLOCK_FOR_FN (fun
, i
);
1543 gcc_checking_assert (bb
->index
== i
);
1544 vstd
.visit (bb
, bitmap_bit_p (combined_blocks
, i
),
1545 bitmap_bit_p (postchk_blocks
, i
));
1548 vstd
.check (chk_edges
, count_chkcall
, chkcall_blocks
);
1556 /* Instantiate a hardcfr pass. */
1559 make_pass_harden_control_flow_redundancy (gcc::context
*ctxt
)
1561 return new pass_harden_control_flow_redundancy (ctxt
);