Require target lra in gcc.c-torture/compile/asmgoto-6.c
[official-gcc.git] / gcc / gimple-harden-conditionals.cc
blob2e5a42e9e71b100ce3cca7d0674a570c7c8eae8a
1 /* Harden conditionals.
2 Copyright (C) 2021-2023 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <oliva@adacore.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "fold-const.h"
29 #include "gimple.h"
30 #include "gimplify.h"
31 #include "tree-pass.h"
32 #include "ssa.h"
33 #include "gimple-iterator.h"
34 #include "tree-cfg.h"
35 #include "basic-block.h"
36 #include "cfghooks.h"
37 #include "cfgloop.h"
38 #include "tree-eh.h"
39 #include "sbitmap.h"
40 #include "diagnostic.h"
41 #include "intl.h"
43 namespace {
45 /* These passes introduces redundant, but reversed conditionals at
46 compares, such as those used in conditional branches, and those
47 that compute boolean results. This doesn't make much sense for
48 abstract CPUs, but this kind of hardening may avoid undesirable
49 execution paths on actual CPUs under such attacks as of power
50 deprivation. */
52 /* Define a pass to harden conditionals other than branches. */
54 const pass_data pass_data_harden_compares = {
55 GIMPLE_PASS,
56 "hardcmp",
57 OPTGROUP_NONE,
58 TV_NONE,
59 PROP_cfg | PROP_ssa, // properties_required
60 0, // properties_provided
61 0, // properties_destroyed
62 0, // properties_start
63 TODO_update_ssa
64 | TODO_cleanup_cfg
65 | TODO_verify_il, // properties_finish
68 class pass_harden_compares : public gimple_opt_pass
70 public:
71 pass_harden_compares (gcc::context *ctxt)
72 : gimple_opt_pass (pass_data_harden_compares, ctxt)
74 opt_pass *clone () final override
76 return new pass_harden_compares (m_ctxt);
78 bool gate (function *) final override
80 return flag_harden_compares;
82 unsigned int execute (function *) final override;
85 /* Define a pass to harden conditionals in branches. This pass must
86 run after the above, otherwise it will re-harden the checks
87 introduced by the above. */
89 const pass_data pass_data_harden_conditional_branches = {
90 GIMPLE_PASS,
91 "hardcbr",
92 OPTGROUP_NONE,
93 TV_NONE,
94 PROP_cfg | PROP_ssa, // properties_required
95 0, // properties_provided
96 0, // properties_destroyed
97 0, // properties_start
98 TODO_update_ssa
99 | TODO_cleanup_cfg
100 | TODO_verify_il, // properties_finish
103 class pass_harden_conditional_branches : public gimple_opt_pass
105 public:
106 pass_harden_conditional_branches (gcc::context *ctxt)
107 : gimple_opt_pass (pass_data_harden_conditional_branches, ctxt)
109 opt_pass *clone () final override
111 return new pass_harden_conditional_branches (m_ctxt);
113 bool gate (function *) final override
115 return flag_harden_conditional_branches;
117 unsigned int execute (function *) final override;
122 /* If VAL is an SSA name, return an SSA name holding the same value,
123 but without the compiler's knowing that it holds the same value, so
124 that uses thereof can't be optimized the way VAL might. Insert
125 stmts that initialize it before *GSIP, with LOC.
127 Otherwise, VAL must be an invariant, returned unchanged. */
129 static inline tree
130 detach_value (location_t loc, gimple_stmt_iterator *gsip, tree val)
132 if (TREE_CONSTANT (val) || TREE_CODE (val) != SSA_NAME)
134 gcc_checking_assert (is_gimple_min_invariant (val));
135 return val;
138 /* Create a SSA "copy" of VAL. It would be nice to have it named
139 after the corresponding variable, but sharing the same decl is
140 problematic when VAL is a DECL_BY_REFERENCE RESULT_DECL, and
141 copying just the identifier hits -fcompare-debug failures. */
142 tree ret = make_ssa_name (TREE_TYPE (val));
144 /* Some modes won't fit in general regs, so we fall back to memory
145 for them. ??? It would be ideal to try to identify an alternate,
146 wider or more suitable register class, and use the corresponding
147 constraint, but there's no logic to go from register class to
148 constraint, even if there is a corresponding constraint, and even
149 if we could enumerate constraints, we can't get to their string
150 either. So this will do for now. */
151 bool need_memory = true;
152 enum machine_mode mode = TYPE_MODE (TREE_TYPE (val));
153 if (mode != BLKmode)
154 for (int i = 0; i < FIRST_PSEUDO_REGISTER; i++)
155 if (TEST_HARD_REG_BIT (reg_class_contents[GENERAL_REGS], i)
156 && targetm.hard_regno_mode_ok (i, mode))
158 need_memory = false;
159 break;
162 tree asminput = val;
163 tree asmoutput = ret;
164 const char *constraint_out = need_memory ? "=m" : "=g";
165 const char *constraint_in = need_memory ? "m" : "0";
167 if (need_memory)
169 tree temp = create_tmp_var (TREE_TYPE (val), "dtch");
170 mark_addressable (temp);
172 gassign *copyin = gimple_build_assign (temp, asminput);
173 gimple_set_location (copyin, loc);
174 gsi_insert_before (gsip, copyin, GSI_SAME_STMT);
176 asminput = asmoutput = temp;
179 /* Output an asm statement with matching input and output. It does
180 nothing, but after it the compiler no longer knows the output
181 still holds the same value as the input. */
182 vec<tree, va_gc> *inputs = NULL;
183 vec<tree, va_gc> *outputs = NULL;
184 vec_safe_push (outputs,
185 build_tree_list
186 (build_tree_list
187 (NULL_TREE, build_string (strlen (constraint_out),
188 constraint_out)),
189 asmoutput));
190 vec_safe_push (inputs,
191 build_tree_list
192 (build_tree_list
193 (NULL_TREE, build_string (strlen (constraint_in),
194 constraint_in)),
195 asminput));
196 gasm *detach = gimple_build_asm_vec ("", inputs, outputs,
197 NULL, NULL);
198 gimple_set_location (detach, loc);
199 gsi_insert_before (gsip, detach, GSI_SAME_STMT);
201 if (need_memory)
203 gassign *copyout = gimple_build_assign (ret, asmoutput);
204 gimple_set_location (copyout, loc);
205 gsi_insert_before (gsip, copyout, GSI_SAME_STMT);
206 SSA_NAME_DEF_STMT (ret) = copyout;
208 gassign *clobber = gimple_build_assign (asmoutput,
209 build_clobber
210 (TREE_TYPE (asmoutput)));
211 gimple_set_location (clobber, loc);
212 gsi_insert_before (gsip, clobber, GSI_SAME_STMT);
214 else
215 SSA_NAME_DEF_STMT (ret) = detach;
217 return ret;
220 /* Build a cond stmt out of COP, LHS, RHS, insert it before *GSIP with
221 location LOC. *GSIP must be at the end of a basic block. The succ
222 edge out of the block becomes the true or false edge opposite to
223 that in FLAGS. Create a new block with a single trap stmt, in the
224 cold partition if the function is partitioned,, and a new edge to
225 it as the other edge for the cond. */
227 static inline void
228 insert_check_and_trap (location_t loc, gimple_stmt_iterator *gsip,
229 int flags, enum tree_code cop, tree lhs, tree rhs)
231 basic_block chk = gsi_bb (*gsip);
233 gcond *cond = gimple_build_cond (cop, lhs, rhs, NULL, NULL);
234 gimple_set_location (cond, loc);
235 gsi_insert_before (gsip, cond, GSI_SAME_STMT);
237 basic_block trp = create_empty_bb (chk);
239 gimple_stmt_iterator gsit = gsi_after_labels (trp);
240 gcall *trap = gimple_build_call (builtin_decl_explicit (BUILT_IN_TRAP), 0);
241 gimple_call_set_ctrl_altering (trap, true);
242 gimple_set_location (trap, loc);
243 gsi_insert_before (&gsit, trap, GSI_SAME_STMT);
245 if (dump_file)
246 fprintf (dump_file,
247 "Adding reversed compare to block %i, and trap to block %i\n",
248 chk->index, trp->index);
250 if (BB_PARTITION (chk))
251 BB_SET_PARTITION (trp, BB_COLD_PARTITION);
253 int true_false_flag = flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
254 gcc_assert (true_false_flag);
255 int neg_true_false_flag = (~flags) & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
257 /* Remove the fallthru bit, and set the truth value for the
258 preexisting edge and for the newly-created one. In hardcbr,
259 FLAGS is taken from the edge of the original cond expr that we're
260 dealing with, so the reversed compare is expected to yield the
261 negated result, and the same result calls for a trap. In
262 hardcmp, we're comparing the boolean results of the original and
263 of the reversed compare, so we're passed FLAGS to trap on
264 equality. */
265 single_succ_edge (chk)->flags &= ~EDGE_FALLTHRU;
266 single_succ_edge (chk)->flags |= neg_true_false_flag;
267 single_succ_edge (chk)->probability = profile_probability::always ();
268 edge e = make_edge (chk, trp, true_false_flag);
269 e->goto_locus = loc;
270 e->probability = profile_probability::never ();
272 if (dom_info_available_p (CDI_DOMINATORS))
273 set_immediate_dominator (CDI_DOMINATORS, trp, chk);
274 if (current_loops)
275 add_bb_to_loop (trp, current_loops->tree_root);
278 /* Split edge E, and insert_check_and_trap (see above) in the
279 newly-created block, using already-detached copies of LHS's and
280 RHS's values (see detach_value above) for the COP compare. */
282 static inline void
283 insert_edge_check_and_trap (location_t loc, edge e,
284 enum tree_code cop, tree lhs, tree rhs)
286 int flags = e->flags;
287 basic_block src = e->src;
288 basic_block dest = e->dest;
289 location_t eloc = e->goto_locus;
291 basic_block chk = split_edge (e);
292 e = NULL;
294 single_pred_edge (chk)->goto_locus = loc;
295 single_succ_edge (chk)->goto_locus = eloc;
297 if (dump_file)
298 fprintf (dump_file,
299 "Splitting edge %i->%i into block %i\n",
300 src->index, dest->index, chk->index);
302 gimple_stmt_iterator gsik = gsi_after_labels (chk);
304 insert_check_and_trap (loc, &gsik, flags, cop, lhs, rhs);
307 /* Harden cond stmts at the end of FUN's blocks. */
309 unsigned int
310 pass_harden_conditional_branches::execute (function *fun)
312 /* Record the preexisting blocks, to avoid visiting newly-created
313 blocks. */
314 auto_sbitmap to_visit (last_basic_block_for_fn (fun));
315 bitmap_clear (to_visit);
317 basic_block bb;
318 FOR_EACH_BB_FN (bb, fun)
319 bitmap_set_bit (to_visit, bb->index);
321 sbitmap_iterator it;
322 unsigned i;
323 EXECUTE_IF_SET_IN_BITMAP (to_visit, 0, i, it)
325 bb = BASIC_BLOCK_FOR_FN (fun, i);
327 gimple_stmt_iterator gsi = gsi_last_bb (bb);
329 if (gsi_end_p (gsi))
330 continue;
332 gcond *cond = dyn_cast <gcond *> (gsi_stmt (gsi));
333 if (!cond)
334 continue;
336 /* Turn:
338 if (x op y) goto l1; else goto l2;
340 into:
342 if (x op y) goto l1'; else goto l2';
343 l1': if (x' cop y') goto l1'trap; else goto l1;
344 l1'trap: __builtin_trap ();
345 l2': if (x' cop y') goto l2; else goto l2'trap;
346 l2'trap: __builtin_trap ();
348 where cop is a complementary boolean operation to op; l1', l1'trap,
349 l2' and l2'trap are newly-created labels; and x' and y' hold the same
350 value as x and y, but in a way that does not enable the compiler to
351 optimize the redundant compare away.
354 enum tree_code op = gimple_cond_code (cond);
355 tree lhs = gimple_cond_lhs (cond);
356 tree rhs = gimple_cond_rhs (cond);
357 location_t loc = gimple_location (cond);
359 enum tree_code cop = invert_tree_comparison (op, HONOR_NANS (lhs));
361 if (cop == ERROR_MARK)
362 /* ??? Can we do better? */
363 continue;
365 /* Detach the values before the compares. If we do so later,
366 the compiler may use values inferred from the compares. */
367 bool same_p = (lhs == rhs);
368 lhs = detach_value (loc, &gsi, lhs);
369 rhs = same_p ? lhs : detach_value (loc, &gsi, rhs);
371 insert_edge_check_and_trap (loc, EDGE_SUCC (bb, 0), cop, lhs, rhs);
372 insert_edge_check_and_trap (loc, EDGE_SUCC (bb, 1), cop, lhs, rhs);
375 return 0;
378 /* Instantiate a hardcbr pass. */
380 gimple_opt_pass *
381 make_pass_harden_conditional_branches (gcc::context *ctxt)
383 return new pass_harden_conditional_branches (ctxt);
386 /* Return the fallthru edge of a block whose other edge is an EH
387 edge. If EHP is not NULL, store the EH edge in it. */
388 static inline edge
389 non_eh_succ_edge (basic_block bb, edge *ehp = NULL)
391 gcc_checking_assert (EDGE_COUNT (bb->succs) == 2);
393 edge ret = find_fallthru_edge (bb->succs);
395 int eh_idx = EDGE_SUCC (bb, 0) == ret;
396 edge eh = EDGE_SUCC (bb, eh_idx);
398 gcc_checking_assert (!(ret->flags & EDGE_EH)
399 && (eh->flags & EDGE_EH));
401 if (ehp)
402 *ehp = eh;
404 return ret;
407 /* Harden boolean-yielding compares in FUN. */
409 unsigned int
410 pass_harden_compares::execute (function *fun)
412 /* Record the preexisting blocks, to avoid visiting newly-created
413 blocks. */
414 auto_sbitmap to_visit (last_basic_block_for_fn (fun));
415 bitmap_clear (to_visit);
417 basic_block bb;
418 FOR_EACH_BB_FN (bb, fun)
419 bitmap_set_bit (to_visit, bb->index);
421 sbitmap_iterator it;
422 unsigned i;
423 EXECUTE_IF_SET_IN_BITMAP (to_visit, 0, i, it)
425 bb = BASIC_BLOCK_FOR_FN (fun, i);
427 for (gimple_stmt_iterator gsi = gsi_last_bb (bb);
428 !gsi_end_p (gsi); gsi_prev (&gsi))
430 gassign *asgn = dyn_cast <gassign *> (gsi_stmt (gsi));
431 if (!asgn)
432 continue;
434 /* Turn:
436 z = x op y;
438 into:
440 z = x op y;
441 z' = x' cop y';
442 if (z == z') __builtin_trap ();
444 where cop is a complementary boolean operation to op; and x'
445 and y' hold the same value as x and y, but in a way that does
446 not enable the compiler to optimize the redundant compare
447 away.
450 enum tree_code op = gimple_assign_rhs_code (asgn);
452 enum tree_code cop;
454 switch (op)
456 case EQ_EXPR:
457 case NE_EXPR:
458 case GT_EXPR:
459 case GE_EXPR:
460 case LT_EXPR:
461 case LE_EXPR:
462 case LTGT_EXPR:
463 case UNEQ_EXPR:
464 case UNGT_EXPR:
465 case UNGE_EXPR:
466 case UNLT_EXPR:
467 case UNLE_EXPR:
468 case ORDERED_EXPR:
469 case UNORDERED_EXPR:
470 cop = invert_tree_comparison (op,
471 HONOR_NANS
472 (gimple_assign_rhs1 (asgn)));
474 if (cop == ERROR_MARK)
475 /* ??? Can we do better? */
476 continue;
478 break;
480 /* ??? Maybe handle these too? */
481 case TRUTH_NOT_EXPR:
482 /* ??? The code below assumes binary ops, it would have to
483 be adjusted for TRUTH_NOT_EXPR, since it's unary. */
484 case TRUTH_ANDIF_EXPR:
485 case TRUTH_ORIF_EXPR:
486 case TRUTH_AND_EXPR:
487 case TRUTH_OR_EXPR:
488 case TRUTH_XOR_EXPR:
489 default:
490 continue;
493 /* These are the operands for the verification. */
494 tree lhs = gimple_assign_lhs (asgn);
495 tree op1 = gimple_assign_rhs1 (asgn);
496 tree op2 = gimple_assign_rhs2 (asgn);
497 location_t loc = gimple_location (asgn);
499 /* Vector booleans can't be used in conditional branches. ???
500 Can we do better? How to reduce compare and
501 reversed-compare result vectors to a single boolean? */
502 if (VECTOR_TYPE_P (TREE_TYPE (op1)))
503 continue;
505 /* useless_type_conversion_p enables conversions from 1-bit
506 integer types to boolean to be discarded. */
507 gcc_checking_assert (TREE_CODE (TREE_TYPE (lhs)) == BOOLEAN_TYPE
508 || (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
509 && TYPE_PRECISION (TREE_TYPE (lhs)) == 1));
511 tree rhs = copy_ssa_name (lhs);
513 /* Detach the values before the compares, so that the
514 compiler infers nothing from them, not even from a
515 throwing compare that didn't throw. */
516 bool same_p = (op1 == op2);
517 op1 = detach_value (loc, &gsi, op1);
518 op2 = same_p ? op1 : detach_value (loc, &gsi, op2);
520 gimple_stmt_iterator gsi_split = gsi;
521 /* Don't separate the original assignment from debug stmts
522 that might be associated with it, and arrange to split the
523 block after debug stmts, so as to make sure the split block
524 won't be debug stmts only. */
525 gsi_next_nondebug (&gsi_split);
527 bool throwing_compare_p = stmt_ends_bb_p (asgn);
528 if (throwing_compare_p)
530 basic_block nbb = split_edge (non_eh_succ_edge
531 (gimple_bb (asgn)));
532 gsi_split = gsi_start_bb (nbb);
534 if (dump_file)
535 fprintf (dump_file,
536 "Splitting non-EH edge from block %i into %i"
537 " after a throwing compare\n",
538 gimple_bb (asgn)->index, nbb->index);
541 gassign *asgnck = gimple_build_assign (rhs, cop, op1, op2);
542 gimple_set_location (asgnck, loc);
543 gsi_insert_before (&gsi_split, asgnck, GSI_SAME_STMT);
545 /* We wish to insert a cond_expr after the compare, so arrange
546 for it to be at the end of a block if it isn't, and for it
547 to have a single successor in case there's more than
548 one, as in PR104975. */
549 if (!gsi_end_p (gsi_split)
550 || !single_succ_p (gsi_bb (gsi_split)))
552 if (!gsi_end_p (gsi_split))
553 gsi_prev (&gsi_split);
554 else
555 gsi_split = gsi_last_bb (gsi_bb (gsi_split));
556 basic_block obb = gsi_bb (gsi_split);
557 basic_block nbb = split_block (obb, gsi_stmt (gsi_split))->dest;
558 gsi_next (&gsi_split);
559 gcc_checking_assert (gsi_end_p (gsi_split));
561 single_succ_edge (bb)->goto_locus = loc;
563 if (dump_file)
564 fprintf (dump_file,
565 "Splitting block %i into %i"
566 " before the conditional trap branch\n",
567 obb->index, nbb->index);
570 /* If the check assignment must end a basic block, we can't
571 insert the conditional branch in the same block, so split
572 the block again, and prepare to insert the conditional
573 branch in the new block.
575 Also assign an EH region to the compare. Even though it's
576 unlikely that the hardening compare will throw after the
577 original compare didn't, the compiler won't even know that
578 it's the same compare operands, so add the EH edge anyway. */
579 if (throwing_compare_p)
581 add_stmt_to_eh_lp (asgnck, lookup_stmt_eh_lp (asgn));
582 make_eh_edges (asgnck);
584 edge ckeh;
585 basic_block nbb = split_edge (non_eh_succ_edge
586 (gimple_bb (asgnck), &ckeh));
587 gsi_split = gsi_start_bb (nbb);
589 if (dump_file)
590 fprintf (dump_file,
591 "Splitting non-EH edge from block %i into %i after"
592 " the newly-inserted reversed throwing compare\n",
593 gimple_bb (asgnck)->index, nbb->index);
595 if (!gimple_seq_empty_p (phi_nodes (ckeh->dest)))
597 edge aseh;
598 non_eh_succ_edge (gimple_bb (asgn), &aseh);
600 gcc_checking_assert (aseh->dest == ckeh->dest);
602 for (gphi_iterator psi = gsi_start_phis (ckeh->dest);
603 !gsi_end_p (psi); gsi_next (&psi))
605 gphi *phi = psi.phi ();
606 add_phi_arg (phi, PHI_ARG_DEF_FROM_EDGE (phi, aseh), ckeh,
607 gimple_phi_arg_location_from_edge (phi, aseh));
610 if (dump_file)
611 fprintf (dump_file,
612 "Copying PHI args in EH block %i from %i to %i\n",
613 aseh->dest->index, aseh->src->index,
614 ckeh->src->index);
618 gcc_checking_assert (single_succ_p (gsi_bb (gsi_split)));
620 insert_check_and_trap (loc, &gsi_split, EDGE_TRUE_VALUE,
621 EQ_EXPR, lhs, rhs);
625 return 0;
628 /* Instantiate a hardcmp pass. */
630 gimple_opt_pass *
631 make_pass_harden_compares (gcc::context *ctxt)
633 return new pass_harden_compares (ctxt);