rtl: ICE with thread_local and inline asm [PR104777]
[official-gcc.git] / gcc / gimple-harden-conditionals.cc
blob6a5fc3fb9e1a27c4feaf0df70374210b5b3b5260
1 /* Harden conditionals.
2 Copyright (C) 2021-2022 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <oliva@adacore.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "fold-const.h"
29 #include "gimple.h"
30 #include "gimplify.h"
31 #include "tree-pass.h"
32 #include "ssa.h"
33 #include "gimple-iterator.h"
34 #include "tree-cfg.h"
35 #include "basic-block.h"
36 #include "cfghooks.h"
37 #include "cfgloop.h"
38 #include "tree-eh.h"
39 #include "diagnostic.h"
40 #include "intl.h"
42 namespace {
44 /* These passes introduces redundant, but reversed conditionals at
45 compares, such as those used in conditional branches, and those
46 that compute boolean results. This doesn't make much sense for
47 abstract CPUs, but this kind of hardening may avoid undesirable
48 execution paths on actual CPUs under such attacks as of power
49 deprivation. */
51 /* Define a pass to harden conditionals other than branches. */
53 const pass_data pass_data_harden_compares = {
54 GIMPLE_PASS,
55 "hardcmp",
56 OPTGROUP_NONE,
57 TV_NONE,
58 PROP_cfg | PROP_ssa, // properties_required
59 0, // properties_provided
60 0, // properties_destroyed
61 0, // properties_start
62 TODO_update_ssa
63 | TODO_cleanup_cfg
64 | TODO_verify_il, // properties_finish
67 class pass_harden_compares : public gimple_opt_pass
69 public:
70 pass_harden_compares (gcc::context *ctxt)
71 : gimple_opt_pass (pass_data_harden_compares, ctxt)
73 opt_pass *clone () { return new pass_harden_compares (m_ctxt); }
74 virtual bool gate (function *) {
75 return flag_harden_compares;
77 virtual unsigned int execute (function *);
80 /* Define a pass to harden conditionals in branches. This pass must
81 run after the above, otherwise it will re-harden the checks
82 introduced by the above. */
84 const pass_data pass_data_harden_conditional_branches = {
85 GIMPLE_PASS,
86 "hardcbr",
87 OPTGROUP_NONE,
88 TV_NONE,
89 PROP_cfg | PROP_ssa, // properties_required
90 0, // properties_provided
91 0, // properties_destroyed
92 0, // properties_start
93 TODO_update_ssa
94 | TODO_cleanup_cfg
95 | TODO_verify_il, // properties_finish
98 class pass_harden_conditional_branches : public gimple_opt_pass
100 public:
101 pass_harden_conditional_branches (gcc::context *ctxt)
102 : gimple_opt_pass (pass_data_harden_conditional_branches, ctxt)
104 opt_pass *clone () { return new pass_harden_conditional_branches (m_ctxt); }
105 virtual bool gate (function *) {
106 return flag_harden_conditional_branches;
108 virtual unsigned int execute (function *);
113 /* If VAL is an SSA name, return an SSA name holding the same value,
114 but without the compiler's knowing that it holds the same value, so
115 that uses thereof can't be optimized the way VAL might. Insert
116 stmts that initialize it before *GSIP, with LOC.
118 Otherwise, VAL must be an invariant, returned unchanged. */
120 static inline tree
121 detach_value (location_t loc, gimple_stmt_iterator *gsip, tree val)
123 if (TREE_CONSTANT (val) || TREE_CODE (val) != SSA_NAME)
125 gcc_checking_assert (is_gimple_min_invariant (val));
126 return val;
129 /* Create a SSA "copy" of VAL. This could be an anonymous
130 temporary, but it's nice to have it named after the corresponding
131 variable. Alas, when VAL is a DECL_BY_REFERENCE RESULT_DECL,
132 setting (a copy of) it would be flagged by checking, so we don't
133 use copy_ssa_name: we create an anonymous SSA name, and then give
134 it the same identifier (rather than decl) as VAL. */
135 tree ret = make_ssa_name (TREE_TYPE (val));
136 SET_SSA_NAME_VAR_OR_IDENTIFIER (ret, SSA_NAME_IDENTIFIER (val));
138 /* Some modes won't fit in general regs, so we fall back to memory
139 for them. ??? It would be ideal to try to identify an alternate,
140 wider or more suitable register class, and use the corresponding
141 constraint, but there's no logic to go from register class to
142 constraint, even if there is a corresponding constraint, and even
143 if we could enumerate constraints, we can't get to their string
144 either. So this will do for now. */
145 bool need_memory = true;
146 enum machine_mode mode = TYPE_MODE (TREE_TYPE (val));
147 if (mode != BLKmode)
148 for (int i = 0; i < FIRST_PSEUDO_REGISTER; i++)
149 if (TEST_HARD_REG_BIT (reg_class_contents[GENERAL_REGS], i)
150 && targetm.hard_regno_mode_ok (i, mode))
152 need_memory = false;
153 break;
156 tree asminput = val;
157 tree asmoutput = ret;
158 const char *constraint_out = need_memory ? "=m" : "=g";
159 const char *constraint_in = need_memory ? "m" : "0";
161 if (need_memory)
163 tree temp = create_tmp_var (TREE_TYPE (val), "dtch");
164 mark_addressable (temp);
166 gassign *copyin = gimple_build_assign (temp, asminput);
167 gimple_set_location (copyin, loc);
168 gsi_insert_before (gsip, copyin, GSI_SAME_STMT);
170 asminput = asmoutput = temp;
173 /* Output an asm statement with matching input and output. It does
174 nothing, but after it the compiler no longer knows the output
175 still holds the same value as the input. */
176 vec<tree, va_gc> *inputs = NULL;
177 vec<tree, va_gc> *outputs = NULL;
178 vec_safe_push (outputs,
179 build_tree_list
180 (build_tree_list
181 (NULL_TREE, build_string (strlen (constraint_out),
182 constraint_out)),
183 asmoutput));
184 vec_safe_push (inputs,
185 build_tree_list
186 (build_tree_list
187 (NULL_TREE, build_string (strlen (constraint_in),
188 constraint_in)),
189 asminput));
190 gasm *detach = gimple_build_asm_vec ("", inputs, outputs,
191 NULL, NULL);
192 gimple_set_location (detach, loc);
193 gsi_insert_before (gsip, detach, GSI_SAME_STMT);
195 if (need_memory)
197 gassign *copyout = gimple_build_assign (ret, asmoutput);
198 gimple_set_location (copyout, loc);
199 gsi_insert_before (gsip, copyout, GSI_SAME_STMT);
200 SSA_NAME_DEF_STMT (ret) = copyout;
202 gassign *clobber = gimple_build_assign (asmoutput,
203 build_clobber
204 (TREE_TYPE (asmoutput)));
205 gimple_set_location (clobber, loc);
206 gsi_insert_before (gsip, clobber, GSI_SAME_STMT);
208 else
209 SSA_NAME_DEF_STMT (ret) = detach;
211 return ret;
214 /* Build a cond stmt out of COP, LHS, RHS, insert it before *GSIP with
215 location LOC. *GSIP must be at the end of a basic block. The succ
216 edge out of the block becomes the true or false edge opposite to
217 that in FLAGS. Create a new block with a single trap stmt, in the
218 cold partition if the function is partitioned,, and a new edge to
219 it as the other edge for the cond. */
221 static inline void
222 insert_check_and_trap (location_t loc, gimple_stmt_iterator *gsip,
223 int flags, enum tree_code cop, tree lhs, tree rhs)
225 basic_block chk = gsi_bb (*gsip);
227 gcond *cond = gimple_build_cond (cop, lhs, rhs, NULL, NULL);
228 gimple_set_location (cond, loc);
229 gsi_insert_before (gsip, cond, GSI_SAME_STMT);
231 basic_block trp = create_empty_bb (chk);
233 gimple_stmt_iterator gsit = gsi_after_labels (trp);
234 gcall *trap = gimple_build_call (builtin_decl_explicit (BUILT_IN_TRAP), 0);
235 gimple_set_location (trap, loc);
236 gsi_insert_before (&gsit, trap, GSI_SAME_STMT);
238 if (dump_file)
239 fprintf (dump_file,
240 "Adding reversed compare to block %i, and trap to block %i\n",
241 chk->index, trp->index);
243 if (BB_PARTITION (chk))
244 BB_SET_PARTITION (trp, BB_COLD_PARTITION);
246 int true_false_flag = flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
247 gcc_assert (true_false_flag);
248 int neg_true_false_flag = (~flags) & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
250 /* Remove the fallthru bit, and set the truth value for the
251 preexisting edge and for the newly-created one. In hardcbr,
252 FLAGS is taken from the edge of the original cond expr that we're
253 dealing with, so the reversed compare is expected to yield the
254 negated result, and the same result calls for a trap. In
255 hardcmp, we're comparing the boolean results of the original and
256 of the reversed compare, so we're passed FLAGS to trap on
257 equality. */
258 single_succ_edge (chk)->flags &= ~EDGE_FALLTHRU;
259 single_succ_edge (chk)->flags |= neg_true_false_flag;
260 edge e = make_edge (chk, trp, true_false_flag);
261 e->goto_locus = loc;
263 if (dom_info_available_p (CDI_DOMINATORS))
264 set_immediate_dominator (CDI_DOMINATORS, trp, chk);
265 if (current_loops)
266 add_bb_to_loop (trp, current_loops->tree_root);
269 /* Split edge E, and insert_check_and_trap (see above) in the
270 newly-created block, using detached copies of LHS's and RHS's
271 values (see detach_value above) for the COP compare. */
273 static inline void
274 insert_edge_check_and_trap (location_t loc, edge e,
275 enum tree_code cop, tree lhs, tree rhs)
277 int flags = e->flags;
278 basic_block src = e->src;
279 basic_block dest = e->dest;
280 location_t eloc = e->goto_locus;
282 basic_block chk = split_edge (e);
283 e = NULL;
285 single_pred_edge (chk)->goto_locus = loc;
286 single_succ_edge (chk)->goto_locus = eloc;
288 if (dump_file)
289 fprintf (dump_file,
290 "Splitting edge %i->%i into block %i\n",
291 src->index, dest->index, chk->index);
293 gimple_stmt_iterator gsik = gsi_after_labels (chk);
295 bool same_p = (lhs == rhs);
296 lhs = detach_value (loc, &gsik, lhs);
297 rhs = same_p ? lhs : detach_value (loc, &gsik, rhs);
299 insert_check_and_trap (loc, &gsik, flags, cop, lhs, rhs);
302 /* Harden cond stmts at the end of FUN's blocks. */
304 unsigned int
305 pass_harden_conditional_branches::execute (function *fun)
307 basic_block bb;
308 FOR_EACH_BB_REVERSE_FN (bb, fun)
310 gimple_stmt_iterator gsi = gsi_last_bb (bb);
312 if (gsi_end_p (gsi))
313 continue;
315 gcond *cond = dyn_cast <gcond *> (gsi_stmt (gsi));
316 if (!cond)
317 continue;
319 /* Turn:
321 if (x op y) goto l1; else goto l2;
323 into:
325 if (x op y) goto l1'; else goto l2';
326 l1': if (x' cop y') goto l1'trap; else goto l1;
327 l1'trap: __builtin_trap ();
328 l2': if (x' cop y') goto l2; else goto l2'trap;
329 l2'trap: __builtin_trap ();
331 where cop is a complementary boolean operation to op; l1', l1'trap,
332 l2' and l2'trap are newly-created labels; and x' and y' hold the same
333 value as x and y, but in a way that does not enable the compiler to
334 optimize the redundant compare away.
337 enum tree_code op = gimple_cond_code (cond);
338 tree lhs = gimple_cond_lhs (cond);
339 tree rhs = gimple_cond_rhs (cond);
340 location_t loc = gimple_location (cond);
342 enum tree_code cop = invert_tree_comparison (op, HONOR_NANS (lhs));
344 if (cop == ERROR_MARK)
345 /* ??? Can we do better? */
346 continue;
348 insert_edge_check_and_trap (loc, EDGE_SUCC (bb, 0), cop, lhs, rhs);
349 insert_edge_check_and_trap (loc, EDGE_SUCC (bb, 1), cop, lhs, rhs);
352 return 0;
355 /* Instantiate a hardcbr pass. */
357 gimple_opt_pass *
358 make_pass_harden_conditional_branches (gcc::context *ctxt)
360 return new pass_harden_conditional_branches (ctxt);
363 /* Return the fallthru edge of a block whose other edge is an EH
364 edge. If EHP is not NULL, store the EH edge in it. */
365 static inline edge
366 non_eh_succ_edge (basic_block bb, edge *ehp = NULL)
368 gcc_checking_assert (EDGE_COUNT (bb->succs) == 2);
370 edge ret = find_fallthru_edge (bb->succs);
372 int eh_idx = EDGE_SUCC (bb, 0) == ret;
373 edge eh = EDGE_SUCC (bb, eh_idx);
375 gcc_checking_assert (!(ret->flags & EDGE_EH)
376 && (eh->flags & EDGE_EH));
378 if (ehp)
379 *ehp = eh;
381 return ret;
384 /* Harden boolean-yielding compares in FUN. */
386 unsigned int
387 pass_harden_compares::execute (function *fun)
389 basic_block bb;
390 /* Go backwards over BBs and stmts, so that, even if we split the
391 block multiple times to insert a cond_expr after each compare we
392 find, we remain in the same block, visiting every preexisting
393 stmt exactly once, and not visiting newly-added blocks or
394 stmts. */
395 FOR_EACH_BB_REVERSE_FN (bb, fun)
396 for (gimple_stmt_iterator gsi = gsi_last_bb (bb);
397 !gsi_end_p (gsi); gsi_prev (&gsi))
399 gassign *asgn = dyn_cast <gassign *> (gsi_stmt (gsi));
400 if (!asgn)
401 continue;
403 /* Turn:
405 z = x op y;
407 into:
409 z = x op y;
410 z' = x' cop y';
411 if (z == z') __builtin_trap ();
413 where cop is a complementary boolean operation to op; and x'
414 and y' hold the same value as x and y, but in a way that does
415 not enable the compiler to optimize the redundant compare
416 away.
419 enum tree_code op = gimple_assign_rhs_code (asgn);
421 enum tree_code cop;
423 switch (op)
425 case EQ_EXPR:
426 case NE_EXPR:
427 case GT_EXPR:
428 case GE_EXPR:
429 case LT_EXPR:
430 case LE_EXPR:
431 case LTGT_EXPR:
432 case UNEQ_EXPR:
433 case UNGT_EXPR:
434 case UNGE_EXPR:
435 case UNLT_EXPR:
436 case UNLE_EXPR:
437 case ORDERED_EXPR:
438 case UNORDERED_EXPR:
439 cop = invert_tree_comparison (op,
440 HONOR_NANS
441 (gimple_assign_rhs1 (asgn)));
443 if (cop == ERROR_MARK)
444 /* ??? Can we do better? */
445 continue;
447 break;
449 /* ??? Maybe handle these too? */
450 case TRUTH_NOT_EXPR:
451 /* ??? The code below assumes binary ops, it would have to
452 be adjusted for TRUTH_NOT_EXPR, since it's unary. */
453 case TRUTH_ANDIF_EXPR:
454 case TRUTH_ORIF_EXPR:
455 case TRUTH_AND_EXPR:
456 case TRUTH_OR_EXPR:
457 case TRUTH_XOR_EXPR:
458 default:
459 continue;
462 /* These are the operands for the verification. */
463 tree lhs = gimple_assign_lhs (asgn);
464 tree op1 = gimple_assign_rhs1 (asgn);
465 tree op2 = gimple_assign_rhs2 (asgn);
466 location_t loc = gimple_location (asgn);
468 /* Vector booleans can't be used in conditional branches. ???
469 Can we do better? How to reduce compare and
470 reversed-compare result vectors to a single boolean? */
471 if (VECTOR_TYPE_P (TREE_TYPE (op1)))
472 continue;
474 /* useless_type_conversion_p enables conversions from 1-bit
475 integer types to boolean to be discarded. */
476 gcc_checking_assert (TREE_CODE (TREE_TYPE (lhs)) == BOOLEAN_TYPE
477 || (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
478 && TYPE_PRECISION (TREE_TYPE (lhs)) == 1));
480 tree rhs = copy_ssa_name (lhs);
482 gimple_stmt_iterator gsi_split = gsi;
483 /* Don't separate the original assignment from debug stmts
484 that might be associated with it, and arrange to split the
485 block after debug stmts, so as to make sure the split block
486 won't be debug stmts only. */
487 gsi_next_nondebug (&gsi_split);
489 bool throwing_compare_p = stmt_ends_bb_p (asgn);
490 if (throwing_compare_p)
492 basic_block nbb = split_edge (non_eh_succ_edge
493 (gimple_bb (asgn)));
494 gsi_split = gsi_start_bb (nbb);
496 if (dump_file)
497 fprintf (dump_file,
498 "Splitting non-EH edge from block %i into %i"
499 " after a throwing compare\n",
500 gimple_bb (asgn)->index, nbb->index);
503 bool same_p = (op1 == op2);
504 op1 = detach_value (loc, &gsi_split, op1);
505 op2 = same_p ? op1 : detach_value (loc, &gsi_split, op2);
507 gassign *asgnck = gimple_build_assign (rhs, cop, op1, op2);
508 gimple_set_location (asgnck, loc);
509 gsi_insert_before (&gsi_split, asgnck, GSI_SAME_STMT);
511 /* We wish to insert a cond_expr after the compare, so arrange
512 for it to be at the end of a block if it isn't. */
513 if (!gsi_end_p (gsi_split))
515 gsi_prev (&gsi_split);
516 basic_block obb = gsi_bb (gsi_split);
517 basic_block nbb = split_block (obb, gsi_stmt (gsi_split))->dest;
518 gsi_next (&gsi_split);
519 gcc_checking_assert (gsi_end_p (gsi_split));
521 single_succ_edge (bb)->goto_locus = loc;
523 if (dump_file)
524 fprintf (dump_file,
525 "Splitting block %i into %i"
526 " before the conditional trap branch\n",
527 obb->index, nbb->index);
530 /* If the check assignment must end a basic block, we can't
531 insert the conditional branch in the same block, so split
532 the block again, and prepare to insert the conditional
533 branch in the new block.
535 Also assign an EH region to the compare. Even though it's
536 unlikely that the hardening compare will throw after the
537 original compare didn't, the compiler won't even know that
538 it's the same compare operands, so add the EH edge anyway. */
539 if (throwing_compare_p)
541 add_stmt_to_eh_lp (asgnck, lookup_stmt_eh_lp (asgn));
542 make_eh_edges (asgnck);
544 edge ckeh;
545 basic_block nbb = split_edge (non_eh_succ_edge
546 (gimple_bb (asgnck), &ckeh));
547 gsi_split = gsi_start_bb (nbb);
549 if (dump_file)
550 fprintf (dump_file,
551 "Splitting non-EH edge from block %i into %i after"
552 " the newly-inserted reversed throwing compare\n",
553 gimple_bb (asgnck)->index, nbb->index);
555 if (!gimple_seq_empty_p (phi_nodes (ckeh->dest)))
557 edge aseh;
558 non_eh_succ_edge (gimple_bb (asgn), &aseh);
560 gcc_checking_assert (aseh->dest == ckeh->dest);
562 for (gphi_iterator psi = gsi_start_phis (ckeh->dest);
563 !gsi_end_p (psi); gsi_next (&psi))
565 gphi *phi = psi.phi ();
566 add_phi_arg (phi, PHI_ARG_DEF_FROM_EDGE (phi, aseh), ckeh,
567 gimple_phi_arg_location_from_edge (phi, aseh));
570 if (dump_file)
571 fprintf (dump_file,
572 "Copying PHI args in EH block %i from %i to %i\n",
573 aseh->dest->index, aseh->src->index, ckeh->src->index);
577 gcc_checking_assert (single_succ_p (gsi_bb (gsi_split)));
579 insert_check_and_trap (loc, &gsi_split, EDGE_TRUE_VALUE,
580 EQ_EXPR, lhs, rhs);
583 return 0;
586 /* Instantiate a hardcmp pass. */
588 gimple_opt_pass *
589 make_pass_harden_compares (gcc::context *ctxt)
591 return new pass_harden_compares (ctxt);