1 /* Harden conditionals.
2 Copyright (C) 2021-2024 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <oliva@adacore.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "fold-const.h"
31 #include "tree-pass.h"
33 #include "gimple-iterator.h"
35 #include "basic-block.h"
40 #include "diagnostic.h"
45 /* These passes introduces redundant, but reversed conditionals at
46 compares, such as those used in conditional branches, and those
47 that compute boolean results. This doesn't make much sense for
48 abstract CPUs, but this kind of hardening may avoid undesirable
49 execution paths on actual CPUs under such attacks as of power
52 /* Define a pass to harden conditionals other than branches. */
54 const pass_data pass_data_harden_compares
= {
59 PROP_cfg
| PROP_ssa
, // properties_required
60 0, // properties_provided
61 0, // properties_destroyed
62 0, // properties_start
65 | TODO_verify_il
, // properties_finish
68 class pass_harden_compares
: public gimple_opt_pass
71 pass_harden_compares (gcc::context
*ctxt
)
72 : gimple_opt_pass (pass_data_harden_compares
, ctxt
)
74 opt_pass
*clone () final override
76 return new pass_harden_compares (m_ctxt
);
78 bool gate (function
*) final override
80 return flag_harden_compares
;
82 unsigned int execute (function
*) final override
;
85 /* Define a pass to harden conditionals in branches. This pass must
86 run after the above, otherwise it will re-harden the checks
87 introduced by the above. */
89 const pass_data pass_data_harden_conditional_branches
= {
94 PROP_cfg
| PROP_ssa
, // properties_required
95 0, // properties_provided
96 0, // properties_destroyed
97 0, // properties_start
100 | TODO_verify_il
, // properties_finish
103 class pass_harden_conditional_branches
: public gimple_opt_pass
106 pass_harden_conditional_branches (gcc::context
*ctxt
)
107 : gimple_opt_pass (pass_data_harden_conditional_branches
, ctxt
)
109 opt_pass
*clone () final override
111 return new pass_harden_conditional_branches (m_ctxt
);
113 bool gate (function
*) final override
115 return flag_harden_conditional_branches
;
117 unsigned int execute (function
*) final override
;
122 /* If VAL is an SSA name, return an SSA name holding the same value,
123 but without the compiler's knowing that it holds the same value, so
124 that uses thereof can't be optimized the way VAL might. Insert
125 stmts that initialize it before *GSIP, with LOC.
127 Otherwise, VAL must be an invariant, returned unchanged. */
130 detach_value (location_t loc
, gimple_stmt_iterator
*gsip
, tree val
)
132 if (TREE_CONSTANT (val
) || TREE_CODE (val
) != SSA_NAME
)
134 gcc_checking_assert (is_gimple_min_invariant (val
));
138 /* Create a SSA "copy" of VAL. It would be nice to have it named
139 after the corresponding variable, but sharing the same decl is
140 problematic when VAL is a DECL_BY_REFERENCE RESULT_DECL, and
141 copying just the identifier hits -fcompare-debug failures. */
142 tree ret
= make_ssa_name (TREE_TYPE (val
));
144 /* Some modes won't fit in general regs, so we fall back to memory
145 for them. ??? It would be ideal to try to identify an alternate,
146 wider or more suitable register class, and use the corresponding
147 constraint, but there's no logic to go from register class to
148 constraint, even if there is a corresponding constraint, and even
149 if we could enumerate constraints, we can't get to their string
150 either. So this will do for now. */
151 bool need_memory
= true;
152 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (val
));
154 for (int i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
155 if (TEST_HARD_REG_BIT (reg_class_contents
[GENERAL_REGS
], i
)
156 && targetm
.hard_regno_mode_ok (i
, mode
))
163 tree asmoutput
= ret
;
164 const char *constraint_out
= need_memory
? "=m" : "=g";
165 const char *constraint_in
= need_memory
? "m" : "0";
169 tree temp
= create_tmp_var (TREE_TYPE (val
), "dtch");
170 mark_addressable (temp
);
172 gassign
*copyin
= gimple_build_assign (temp
, asminput
);
173 gimple_set_location (copyin
, loc
);
174 gsi_insert_before (gsip
, copyin
, GSI_SAME_STMT
);
176 asminput
= asmoutput
= temp
;
179 /* Output an asm statement with matching input and output. It does
180 nothing, but after it the compiler no longer knows the output
181 still holds the same value as the input. */
182 vec
<tree
, va_gc
> *inputs
= NULL
;
183 vec
<tree
, va_gc
> *outputs
= NULL
;
184 vec_safe_push (outputs
,
187 (NULL_TREE
, build_string (strlen (constraint_out
),
190 vec_safe_push (inputs
,
193 (NULL_TREE
, build_string (strlen (constraint_in
),
196 gasm
*detach
= gimple_build_asm_vec ("", inputs
, outputs
,
198 gimple_set_location (detach
, loc
);
199 gsi_insert_before (gsip
, detach
, GSI_SAME_STMT
);
203 gassign
*copyout
= gimple_build_assign (ret
, asmoutput
);
204 gimple_set_location (copyout
, loc
);
205 gsi_insert_before (gsip
, copyout
, GSI_SAME_STMT
);
206 SSA_NAME_DEF_STMT (ret
) = copyout
;
208 gassign
*clobber
= gimple_build_assign (asmoutput
,
210 (TREE_TYPE (asmoutput
)));
211 gimple_set_location (clobber
, loc
);
212 gsi_insert_before (gsip
, clobber
, GSI_SAME_STMT
);
215 SSA_NAME_DEF_STMT (ret
) = detach
;
220 /* Build a cond stmt out of COP, LHS, RHS, insert it before *GSIP with
221 location LOC. *GSIP must be at the end of a basic block. The succ
222 edge out of the block becomes the true or false edge opposite to
223 that in FLAGS. Create a new block with a single trap stmt, in the
224 cold partition if the function is partitioned,, and a new edge to
225 it as the other edge for the cond. */
228 insert_check_and_trap (location_t loc
, gimple_stmt_iterator
*gsip
,
229 int flags
, enum tree_code cop
, tree lhs
, tree rhs
)
231 basic_block chk
= gsi_bb (*gsip
);
233 gcond
*cond
= gimple_build_cond (cop
, lhs
, rhs
, NULL
, NULL
);
234 gimple_set_location (cond
, loc
);
235 gsi_insert_before (gsip
, cond
, GSI_SAME_STMT
);
237 basic_block trp
= create_empty_bb (chk
);
238 trp
->count
= profile_count::zero ();
240 gimple_stmt_iterator gsit
= gsi_after_labels (trp
);
241 gcall
*trap
= gimple_build_call (builtin_decl_explicit (BUILT_IN_TRAP
), 0);
242 gimple_call_set_ctrl_altering (trap
, true);
243 gimple_set_location (trap
, loc
);
244 gsi_insert_before (&gsit
, trap
, GSI_SAME_STMT
);
248 "Adding reversed compare to block %i, and trap to block %i\n",
249 chk
->index
, trp
->index
);
251 if (BB_PARTITION (chk
))
252 BB_SET_PARTITION (trp
, BB_COLD_PARTITION
);
254 int true_false_flag
= flags
& (EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
);
255 gcc_assert (true_false_flag
);
256 int neg_true_false_flag
= (~flags
) & (EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
);
258 /* Remove the fallthru bit, and set the truth value for the
259 preexisting edge and for the newly-created one. In hardcbr,
260 FLAGS is taken from the edge of the original cond expr that we're
261 dealing with, so the reversed compare is expected to yield the
262 negated result, and the same result calls for a trap. In
263 hardcmp, we're comparing the boolean results of the original and
264 of the reversed compare, so we're passed FLAGS to trap on
266 single_succ_edge (chk
)->flags
&= ~EDGE_FALLTHRU
;
267 single_succ_edge (chk
)->flags
|= neg_true_false_flag
;
268 single_succ_edge (chk
)->probability
= profile_probability::always ();
269 edge e
= make_edge (chk
, trp
, true_false_flag
);
271 e
->probability
= profile_probability::never ();
273 if (dom_info_available_p (CDI_DOMINATORS
))
274 set_immediate_dominator (CDI_DOMINATORS
, trp
, chk
);
276 add_bb_to_loop (trp
, current_loops
->tree_root
);
279 /* Split edge E, and insert_check_and_trap (see above) in the
280 newly-created block, using already-detached copies of LHS's and
281 RHS's values (see detach_value above) for the COP compare. */
284 insert_edge_check_and_trap (location_t loc
, edge e
,
285 enum tree_code cop
, tree lhs
, tree rhs
)
287 int flags
= e
->flags
;
288 basic_block src
= e
->src
;
289 basic_block dest
= e
->dest
;
290 location_t eloc
= e
->goto_locus
;
292 basic_block chk
= split_edge (e
);
295 single_pred_edge (chk
)->goto_locus
= loc
;
296 single_succ_edge (chk
)->goto_locus
= eloc
;
300 "Splitting edge %i->%i into block %i\n",
301 src
->index
, dest
->index
, chk
->index
);
303 gimple_stmt_iterator gsik
= gsi_after_labels (chk
);
305 insert_check_and_trap (loc
, &gsik
, flags
, cop
, lhs
, rhs
);
308 /* Harden cond stmts at the end of FUN's blocks. */
311 pass_harden_conditional_branches::execute (function
*fun
)
313 /* Record the preexisting blocks, to avoid visiting newly-created
315 auto_sbitmap
to_visit (last_basic_block_for_fn (fun
));
316 bitmap_clear (to_visit
);
319 FOR_EACH_BB_FN (bb
, fun
)
320 bitmap_set_bit (to_visit
, bb
->index
);
324 EXECUTE_IF_SET_IN_BITMAP (to_visit
, 0, i
, it
)
326 bb
= BASIC_BLOCK_FOR_FN (fun
, i
);
328 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
333 gcond
*cond
= dyn_cast
<gcond
*> (gsi_stmt (gsi
));
339 if (x op y) goto l1; else goto l2;
343 if (x op y) goto l1'; else goto l2';
344 l1': if (x' cop y') goto l1'trap; else goto l1;
345 l1'trap: __builtin_trap ();
346 l2': if (x' cop y') goto l2; else goto l2'trap;
347 l2'trap: __builtin_trap ();
349 where cop is a complementary boolean operation to op; l1', l1'trap,
350 l2' and l2'trap are newly-created labels; and x' and y' hold the same
351 value as x and y, but in a way that does not enable the compiler to
352 optimize the redundant compare away.
355 enum tree_code op
= gimple_cond_code (cond
);
356 tree lhs
= gimple_cond_lhs (cond
);
357 tree rhs
= gimple_cond_rhs (cond
);
358 location_t loc
= gimple_location (cond
);
360 enum tree_code cop
= invert_tree_comparison (op
, HONOR_NANS (lhs
));
362 if (cop
== ERROR_MARK
)
363 /* ??? Can we do better? */
366 /* Detach the values before the compares. If we do so later,
367 the compiler may use values inferred from the compares. */
368 bool same_p
= (lhs
== rhs
);
369 lhs
= detach_value (loc
, &gsi
, lhs
);
370 rhs
= same_p
? lhs
: detach_value (loc
, &gsi
, rhs
);
372 insert_edge_check_and_trap (loc
, EDGE_SUCC (bb
, 0), cop
, lhs
, rhs
);
373 insert_edge_check_and_trap (loc
, EDGE_SUCC (bb
, 1), cop
, lhs
, rhs
);
379 /* Instantiate a hardcbr pass. */
382 make_pass_harden_conditional_branches (gcc::context
*ctxt
)
384 return new pass_harden_conditional_branches (ctxt
);
387 /* Return the fallthru edge of a block whose other edge is an EH
388 edge. If EHP is not NULL, store the EH edge in it. */
390 non_eh_succ_edge (basic_block bb
, edge
*ehp
= NULL
)
392 gcc_checking_assert (EDGE_COUNT (bb
->succs
) == 2);
394 edge ret
= find_fallthru_edge (bb
->succs
);
396 int eh_idx
= EDGE_SUCC (bb
, 0) == ret
;
397 edge eh
= EDGE_SUCC (bb
, eh_idx
);
399 gcc_checking_assert (!(ret
->flags
& EDGE_EH
)
400 && (eh
->flags
& EDGE_EH
));
408 /* Harden boolean-yielding compares in FUN. */
411 pass_harden_compares::execute (function
*fun
)
413 /* Record the preexisting blocks, to avoid visiting newly-created
415 auto_sbitmap
to_visit (last_basic_block_for_fn (fun
));
416 bitmap_clear (to_visit
);
419 FOR_EACH_BB_FN (bb
, fun
)
420 bitmap_set_bit (to_visit
, bb
->index
);
424 EXECUTE_IF_SET_IN_BITMAP (to_visit
, 0, i
, it
)
426 bb
= BASIC_BLOCK_FOR_FN (fun
, i
);
428 for (gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
429 !gsi_end_p (gsi
); gsi_prev (&gsi
))
431 gassign
*asgn
= dyn_cast
<gassign
*> (gsi_stmt (gsi
));
443 if (z == z') __builtin_trap ();
445 where cop is a complementary boolean operation to op; and x'
446 and y' hold the same value as x and y, but in a way that does
447 not enable the compiler to optimize the redundant compare
451 enum tree_code op
= gimple_assign_rhs_code (asgn
);
471 cop
= invert_tree_comparison (op
,
473 (gimple_assign_rhs1 (asgn
)));
475 if (cop
== ERROR_MARK
)
476 /* ??? Can we do better? */
481 /* ??? Maybe handle these too? */
483 /* ??? The code below assumes binary ops, it would have to
484 be adjusted for TRUTH_NOT_EXPR, since it's unary. */
485 case TRUTH_ANDIF_EXPR
:
486 case TRUTH_ORIF_EXPR
:
494 /* These are the operands for the verification. */
495 tree lhs
= gimple_assign_lhs (asgn
);
496 tree op1
= gimple_assign_rhs1 (asgn
);
497 tree op2
= gimple_assign_rhs2 (asgn
);
498 location_t loc
= gimple_location (asgn
);
500 /* Vector booleans can't be used in conditional branches. ???
501 Can we do better? How to reduce compare and
502 reversed-compare result vectors to a single boolean? */
503 if (VECTOR_TYPE_P (TREE_TYPE (op1
)))
506 /* useless_type_conversion_p enables conversions from 1-bit
507 integer types to boolean to be discarded. */
508 gcc_checking_assert (TREE_CODE (TREE_TYPE (lhs
)) == BOOLEAN_TYPE
509 || (INTEGRAL_TYPE_P (TREE_TYPE (lhs
))
510 && TYPE_PRECISION (TREE_TYPE (lhs
)) == 1));
512 tree rhs
= copy_ssa_name (lhs
);
514 /* Detach the values before the compares, so that the
515 compiler infers nothing from them, not even from a
516 throwing compare that didn't throw. */
517 bool same_p
= (op1
== op2
);
518 op1
= detach_value (loc
, &gsi
, op1
);
519 op2
= same_p
? op1
: detach_value (loc
, &gsi
, op2
);
521 gimple_stmt_iterator gsi_split
= gsi
;
522 /* Don't separate the original assignment from debug stmts
523 that might be associated with it, and arrange to split the
524 block after debug stmts, so as to make sure the split block
525 won't be debug stmts only. */
526 gsi_next_nondebug (&gsi_split
);
528 bool throwing_compare_p
= stmt_ends_bb_p (asgn
);
529 if (throwing_compare_p
)
531 basic_block nbb
= split_edge (non_eh_succ_edge
533 gsi_split
= gsi_start_bb (nbb
);
537 "Splitting non-EH edge from block %i into %i"
538 " after a throwing compare\n",
539 gimple_bb (asgn
)->index
, nbb
->index
);
542 gassign
*asgnck
= gimple_build_assign (rhs
, cop
, op1
, op2
);
543 gimple_set_location (asgnck
, loc
);
544 gsi_insert_before (&gsi_split
, asgnck
, GSI_SAME_STMT
);
546 /* We wish to insert a cond_expr after the compare, so arrange
547 for it to be at the end of a block if it isn't, and for it
548 to have a single successor in case there's more than
549 one, as in PR104975. */
550 if (!gsi_end_p (gsi_split
)
551 || !single_succ_p (gsi_bb (gsi_split
)))
553 if (!gsi_end_p (gsi_split
))
554 gsi_prev (&gsi_split
);
556 gsi_split
= gsi_last_bb (gsi_bb (gsi_split
));
557 basic_block obb
= gsi_bb (gsi_split
);
558 basic_block nbb
= split_block (obb
, gsi_stmt (gsi_split
))->dest
;
559 gsi_next (&gsi_split
);
560 gcc_checking_assert (gsi_end_p (gsi_split
));
562 single_succ_edge (bb
)->goto_locus
= loc
;
566 "Splitting block %i into %i"
567 " before the conditional trap branch\n",
568 obb
->index
, nbb
->index
);
571 /* If the check assignment must end a basic block, we can't
572 insert the conditional branch in the same block, so split
573 the block again, and prepare to insert the conditional
574 branch in the new block.
576 Also assign an EH region to the compare. Even though it's
577 unlikely that the hardening compare will throw after the
578 original compare didn't, the compiler won't even know that
579 it's the same compare operands, so add the EH edge anyway. */
580 if (throwing_compare_p
)
582 add_stmt_to_eh_lp (asgnck
, lookup_stmt_eh_lp (asgn
));
583 edge eh
= make_eh_edge (asgnck
);
584 /* This compare looks like it could raise an exception,
585 but it's dominated by the original compare, that
586 would raise an exception first, so the EH edge from
587 this one is never really taken. */
588 eh
->probability
= profile_probability::never ();
589 if (eh
->dest
->count
.initialized_p ())
590 eh
->dest
->count
+= eh
->count ();
592 eh
->dest
->count
= eh
->count ();
595 basic_block nbb
= split_edge (non_eh_succ_edge
596 (gimple_bb (asgnck
), &ckeh
));
597 gcc_checking_assert (eh
== ckeh
);
598 gsi_split
= gsi_start_bb (nbb
);
602 "Splitting non-EH edge from block %i into %i after"
603 " the newly-inserted reversed throwing compare\n",
604 gimple_bb (asgnck
)->index
, nbb
->index
);
606 if (!gimple_seq_empty_p (phi_nodes (ckeh
->dest
)))
609 non_eh_succ_edge (gimple_bb (asgn
), &aseh
);
611 gcc_checking_assert (aseh
->dest
== ckeh
->dest
);
613 for (gphi_iterator psi
= gsi_start_phis (ckeh
->dest
);
614 !gsi_end_p (psi
); gsi_next (&psi
))
616 gphi
*phi
= psi
.phi ();
617 add_phi_arg (phi
, PHI_ARG_DEF_FROM_EDGE (phi
, aseh
), ckeh
,
618 gimple_phi_arg_location_from_edge (phi
, aseh
));
623 "Copying PHI args in EH block %i from %i to %i\n",
624 aseh
->dest
->index
, aseh
->src
->index
,
629 gcc_checking_assert (single_succ_p (gsi_bb (gsi_split
)));
631 insert_check_and_trap (loc
, &gsi_split
, EDGE_TRUE_VALUE
,
639 /* Instantiate a hardcmp pass. */
642 make_pass_harden_compares (gcc::context
*ctxt
)
644 return new pass_harden_compares (ctxt
);