Fix minor problem in stack probing
[official-gcc.git] / gcc / ubsan.cc
blob4289ff41785f09be43f661dc58092df4e643dc14
1 /* UndefinedBehaviorSanitizer, undefined behavior detector.
2 Copyright (C) 2013-2023 Free Software Foundation, Inc.
3 Contributed by Marek Polacek <polacek@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "c-family/c-common.h"
27 #include "gimple.h"
28 #include "cfghooks.h"
29 #include "tree-pass.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "ssa.h"
33 #include "cgraph.h"
34 #include "tree-pretty-print.h"
35 #include "stor-layout.h"
36 #include "cfganal.h"
37 #include "gimple-iterator.h"
38 #include "output.h"
39 #include "cfgloop.h"
40 #include "ubsan.h"
41 #include "expr.h"
42 #include "stringpool.h"
43 #include "attribs.h"
44 #include "asan.h"
45 #include "gimplify-me.h"
46 #include "dfp.h"
47 #include "builtins.h"
48 #include "tree-object-size.h"
49 #include "tree-cfg.h"
50 #include "gimple-fold.h"
51 #include "varasm.h"
52 #include "realmpfr.h"
53 #include "target.h"
54 #include "langhooks.h"
56 /* Map from a tree to a VAR_DECL tree. */
58 struct GTY((for_user)) tree_type_map {
59 struct tree_map_base type;
60 tree decl;
63 struct tree_type_map_cache_hasher : ggc_cache_ptr_hash<tree_type_map>
65 static inline hashval_t
66 hash (tree_type_map *t)
68 return TYPE_UID (t->type.from);
71 static inline bool
72 equal (tree_type_map *a, tree_type_map *b)
74 return a->type.from == b->type.from;
77 static int
78 keep_cache_entry (tree_type_map *&m)
80 return ggc_marked_p (m->type.from);
84 static GTY ((cache))
85 hash_table<tree_type_map_cache_hasher> *decl_tree_for_type;
87 /* Lookup a VAR_DECL for TYPE, and return it if we find one. */
89 static tree
90 decl_for_type_lookup (tree type)
92 /* If the hash table is not initialized yet, create it now. */
93 if (decl_tree_for_type == NULL)
95 decl_tree_for_type
96 = hash_table<tree_type_map_cache_hasher>::create_ggc (10);
97 /* That also means we don't have to bother with the lookup. */
98 return NULL_TREE;
101 struct tree_type_map *h, in;
102 in.type.from = type;
104 h = decl_tree_for_type->find_with_hash (&in, TYPE_UID (type));
105 return h ? h->decl : NULL_TREE;
108 /* Insert a mapping TYPE->DECL in the VAR_DECL for type hashtable. */
110 static void
111 decl_for_type_insert (tree type, tree decl)
113 struct tree_type_map *h;
115 h = ggc_alloc<tree_type_map> ();
116 h->type.from = type;
117 h->decl = decl;
118 *decl_tree_for_type->find_slot_with_hash (h, TYPE_UID (type), INSERT) = h;
121 /* Helper routine, which encodes a value in the pointer_sized_int_node.
122 Arguments with precision <= POINTER_SIZE are passed directly,
123 the rest is passed by reference. T is a value we are to encode.
124 PHASE determines when this function is called. */
126 tree
127 ubsan_encode_value (tree t, enum ubsan_encode_value_phase phase)
129 tree type = TREE_TYPE (t);
130 if (TREE_CODE (type) == BITINT_TYPE)
132 if (TYPE_PRECISION (type) <= POINTER_SIZE)
134 type = pointer_sized_int_node;
135 t = fold_build1 (NOP_EXPR, type, t);
137 else
139 if (TYPE_PRECISION (type) > MAX_FIXED_MODE_SIZE)
140 return build_zero_cst (pointer_sized_int_node);
141 type = build_nonstandard_integer_type (MAX_FIXED_MODE_SIZE,
142 TYPE_UNSIGNED (type));
143 t = fold_build1 (NOP_EXPR, type, t);
146 scalar_mode mode = SCALAR_TYPE_MODE (type);
147 const unsigned int bitsize = GET_MODE_BITSIZE (mode);
148 if (bitsize <= POINTER_SIZE)
149 switch (TREE_CODE (type))
151 case BOOLEAN_TYPE:
152 case ENUMERAL_TYPE:
153 case INTEGER_TYPE:
154 return fold_build1 (NOP_EXPR, pointer_sized_int_node, t);
155 case REAL_TYPE:
157 tree itype = build_nonstandard_integer_type (bitsize, true);
158 t = fold_build1 (VIEW_CONVERT_EXPR, itype, t);
159 return fold_convert (pointer_sized_int_node, t);
161 default:
162 gcc_unreachable ();
164 else
166 if (!DECL_P (t) || !TREE_ADDRESSABLE (t))
168 /* The reason for this is that we don't want to pessimize
169 code by making vars unnecessarily addressable. */
170 tree var;
171 if (phase != UBSAN_ENCODE_VALUE_GENERIC)
173 var = create_tmp_var (type);
174 mark_addressable (var);
176 else
178 var = create_tmp_var_raw (type);
179 TREE_ADDRESSABLE (var) = 1;
180 DECL_CONTEXT (var) = current_function_decl;
182 if (phase == UBSAN_ENCODE_VALUE_RTL)
184 rtx mem = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
185 type);
186 SET_DECL_RTL (var, mem);
187 expand_assignment (var, t, false);
188 return build_fold_addr_expr (var);
190 if (phase != UBSAN_ENCODE_VALUE_GENERIC)
192 tree tem = build2 (MODIFY_EXPR, void_type_node, var, t);
193 t = build_fold_addr_expr (var);
194 return build2 (COMPOUND_EXPR, TREE_TYPE (t), tem, t);
196 else
198 var = build4 (TARGET_EXPR, type, var, t, NULL_TREE, NULL_TREE);
199 return build_fold_addr_expr (var);
202 else
203 return build_fold_addr_expr (t);
207 /* Cached ubsan_get_type_descriptor_type () return value. */
208 static GTY(()) tree ubsan_type_descriptor_type;
210 /* Build
211 struct __ubsan_type_descriptor
213 unsigned short __typekind;
214 unsigned short __typeinfo;
215 char __typename[];
217 type. */
219 static tree
220 ubsan_get_type_descriptor_type (void)
222 static const char *field_names[3]
223 = { "__typekind", "__typeinfo", "__typename" };
224 tree fields[3], ret;
226 if (ubsan_type_descriptor_type)
227 return ubsan_type_descriptor_type;
229 tree itype = build_range_type (sizetype, size_zero_node, NULL_TREE);
230 tree flex_arr_type = build_array_type (char_type_node, itype);
232 ret = make_node (RECORD_TYPE);
233 for (int i = 0; i < 3; i++)
235 fields[i] = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
236 get_identifier (field_names[i]),
237 (i == 2) ? flex_arr_type
238 : short_unsigned_type_node);
239 DECL_CONTEXT (fields[i]) = ret;
240 if (i)
241 DECL_CHAIN (fields[i - 1]) = fields[i];
243 tree type_decl = build_decl (input_location, TYPE_DECL,
244 get_identifier ("__ubsan_type_descriptor"),
245 ret);
246 DECL_IGNORED_P (type_decl) = 1;
247 DECL_ARTIFICIAL (type_decl) = 1;
248 TYPE_FIELDS (ret) = fields[0];
249 TYPE_NAME (ret) = type_decl;
250 TYPE_STUB_DECL (ret) = type_decl;
251 TYPE_ARTIFICIAL (ret) = 1;
252 layout_type (ret);
253 ubsan_type_descriptor_type = ret;
254 return ret;
257 /* Cached ubsan_get_source_location_type () return value. */
258 static GTY(()) tree ubsan_source_location_type;
260 /* Build
261 struct __ubsan_source_location
263 const char *__filename;
264 unsigned int __line;
265 unsigned int __column;
267 type. */
269 tree
270 ubsan_get_source_location_type (void)
272 static const char *field_names[3]
273 = { "__filename", "__line", "__column" };
274 tree fields[3], ret;
275 if (ubsan_source_location_type)
276 return ubsan_source_location_type;
278 tree const_char_type = build_qualified_type (char_type_node,
279 TYPE_QUAL_CONST);
281 ret = make_node (RECORD_TYPE);
282 for (int i = 0; i < 3; i++)
284 fields[i] = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
285 get_identifier (field_names[i]),
286 (i == 0) ? build_pointer_type (const_char_type)
287 : unsigned_type_node);
288 DECL_CONTEXT (fields[i]) = ret;
289 if (i)
290 DECL_CHAIN (fields[i - 1]) = fields[i];
292 tree type_decl = build_decl (input_location, TYPE_DECL,
293 get_identifier ("__ubsan_source_location"),
294 ret);
295 DECL_IGNORED_P (type_decl) = 1;
296 DECL_ARTIFICIAL (type_decl) = 1;
297 TYPE_FIELDS (ret) = fields[0];
298 TYPE_NAME (ret) = type_decl;
299 TYPE_STUB_DECL (ret) = type_decl;
300 TYPE_ARTIFICIAL (ret) = 1;
301 layout_type (ret);
302 ubsan_source_location_type = ret;
303 return ret;
306 /* Helper routine that returns a CONSTRUCTOR of __ubsan_source_location
307 type with its fields filled from a location_t LOC. */
309 static tree
310 ubsan_source_location (location_t loc)
312 expanded_location xloc;
313 tree type = ubsan_get_source_location_type ();
315 xloc = expand_location (loc);
316 tree str;
317 if (xloc.file == NULL)
319 str = build_int_cst (ptr_type_node, 0);
320 xloc.line = 0;
321 xloc.column = 0;
323 else
325 /* Fill in the values from LOC. */
326 size_t len = strlen (xloc.file) + 1;
327 str = build_string (len, xloc.file);
328 TREE_TYPE (str) = build_array_type_nelts (char_type_node, len);
329 TREE_READONLY (str) = 1;
330 TREE_STATIC (str) = 1;
331 str = build_fold_addr_expr (str);
333 tree ctor = build_constructor_va (type, 3, NULL_TREE, str, NULL_TREE,
334 build_int_cst (unsigned_type_node,
335 xloc.line), NULL_TREE,
336 build_int_cst (unsigned_type_node,
337 xloc.column));
338 TREE_CONSTANT (ctor) = 1;
339 TREE_STATIC (ctor) = 1;
341 return ctor;
344 /* This routine returns a magic number for TYPE. */
346 static unsigned short
347 get_ubsan_type_info_for_type (tree type)
349 if (SCALAR_FLOAT_TYPE_P (type))
350 return tree_to_uhwi (TYPE_SIZE (type));
351 else if (INTEGRAL_TYPE_P (type))
353 int prec = exact_log2 (tree_to_uhwi (TYPE_SIZE (type)));
354 gcc_assert (prec != -1);
355 return (prec << 1) | !TYPE_UNSIGNED (type);
357 else
358 return 0;
361 /* Counters for internal labels. ubsan_ids[0] for Lubsan_type,
362 ubsan_ids[1] for Lubsan_data labels. */
363 static GTY(()) unsigned int ubsan_ids[2];
365 /* Helper routine that returns ADDR_EXPR of a VAR_DECL of a type
366 descriptor. It first looks into the hash table; if not found,
367 create the VAR_DECL, put it into the hash table and return the
368 ADDR_EXPR of it. TYPE describes a particular type. PSTYLE is
369 an enum controlling how we want to print the type. */
371 tree
372 ubsan_type_descriptor (tree type, enum ubsan_print_style pstyle)
374 /* See through any typedefs. */
375 type = TYPE_MAIN_VARIANT (type);
376 tree type3 = type;
377 if (pstyle == UBSAN_PRINT_FORCE_INT)
379 /* Temporary hack for -fsanitize=shift with _BitInt(129) and more.
380 libubsan crashes if it is not TK_Integer type. */
381 if (TREE_CODE (type) == BITINT_TYPE
382 && TYPE_PRECISION (type) > MAX_FIXED_MODE_SIZE)
383 type3 = build_qualified_type (type, TYPE_QUAL_CONST);
384 if (type3 == type)
385 pstyle = UBSAN_PRINT_NORMAL;
388 tree decl = decl_for_type_lookup (type3);
389 /* It is possible that some of the earlier created DECLs were found
390 unused, in that case they weren't emitted and varpool_node::get
391 returns NULL node on them. But now we really need them. Thus,
392 renew them here. */
393 if (decl != NULL_TREE && varpool_node::get (decl))
395 return build_fold_addr_expr (decl);
398 tree dtype = ubsan_get_type_descriptor_type ();
399 tree type2 = type;
400 const char *tname = NULL;
401 pretty_printer pretty_name;
402 unsigned char deref_depth = 0;
403 unsigned short tkind, tinfo;
404 char tname_bitint[sizeof ("unsigned _BitInt(2147483647)")];
406 /* Get the name of the type, or the name of the pointer type. */
407 if (pstyle == UBSAN_PRINT_POINTER)
409 gcc_assert (POINTER_TYPE_P (type));
410 type2 = TREE_TYPE (type);
412 /* Remove any '*' operators from TYPE. */
413 while (POINTER_TYPE_P (type2))
414 deref_depth++, type2 = TREE_TYPE (type2);
416 if (TREE_CODE (type2) == METHOD_TYPE)
417 type2 = TYPE_METHOD_BASETYPE (type2);
420 /* If an array, get its type. */
421 type2 = strip_array_types (type2);
423 if (pstyle == UBSAN_PRINT_ARRAY)
425 while (POINTER_TYPE_P (type2))
426 deref_depth++, type2 = TREE_TYPE (type2);
429 if (TYPE_NAME (type2) != NULL)
431 if (TREE_CODE (TYPE_NAME (type2)) == IDENTIFIER_NODE)
432 tname = IDENTIFIER_POINTER (TYPE_NAME (type2));
433 else if (DECL_NAME (TYPE_NAME (type2)) != NULL)
434 tname = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type2)));
437 if (tname == NULL)
439 if (TREE_CODE (type2) == BITINT_TYPE)
441 snprintf (tname_bitint, sizeof (tname_bitint),
442 "%s_BitInt(%d)", TYPE_UNSIGNED (type2) ? "unsigned " : "",
443 TYPE_PRECISION (type2));
444 tname = tname_bitint;
446 else
447 /* We weren't able to determine the type name. */
448 tname = "<unknown>";
451 pp_quote (&pretty_name);
453 tree eltype = type;
454 if (pstyle == UBSAN_PRINT_POINTER)
456 pp_printf (&pretty_name, "%s%s%s%s%s%s%s",
457 TYPE_VOLATILE (type2) ? "volatile " : "",
458 TYPE_READONLY (type2) ? "const " : "",
459 TYPE_RESTRICT (type2) ? "restrict " : "",
460 TYPE_ATOMIC (type2) ? "_Atomic " : "",
461 TREE_CODE (type2) == RECORD_TYPE
462 ? "struct "
463 : TREE_CODE (type2) == UNION_TYPE
464 ? "union " : "", tname,
465 deref_depth == 0 ? "" : " ");
466 while (deref_depth-- > 0)
467 pp_star (&pretty_name);
469 else if (pstyle == UBSAN_PRINT_ARRAY)
471 /* Pretty print the array dimensions. */
472 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
473 tree t = type;
474 pp_string (&pretty_name, tname);
475 pp_space (&pretty_name);
476 while (deref_depth-- > 0)
477 pp_star (&pretty_name);
478 while (TREE_CODE (t) == ARRAY_TYPE)
480 pp_left_bracket (&pretty_name);
481 tree dom = TYPE_DOMAIN (t);
482 if (dom != NULL_TREE
483 && TYPE_MAX_VALUE (dom) != NULL_TREE
484 && TREE_CODE (TYPE_MAX_VALUE (dom)) == INTEGER_CST)
486 unsigned HOST_WIDE_INT m;
487 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (dom))
488 && (m = tree_to_uhwi (TYPE_MAX_VALUE (dom))) + 1 != 0)
489 pp_unsigned_wide_integer (&pretty_name, m + 1);
490 else
491 pp_wide_int (&pretty_name,
492 wi::add (wi::to_widest (TYPE_MAX_VALUE (dom)), 1),
493 TYPE_SIGN (TREE_TYPE (dom)));
495 else
496 /* ??? We can't determine the variable name; print VLA unspec. */
497 pp_star (&pretty_name);
498 pp_right_bracket (&pretty_name);
499 t = TREE_TYPE (t);
502 /* Save the tree with stripped types. */
503 eltype = t;
505 else
506 pp_string (&pretty_name, tname);
508 pp_quote (&pretty_name);
510 switch (TREE_CODE (eltype))
512 case BOOLEAN_TYPE:
513 case ENUMERAL_TYPE:
514 case INTEGER_TYPE:
515 tkind = 0x0000;
516 break;
517 case BITINT_TYPE:
518 if (TYPE_PRECISION (eltype) <= MAX_FIXED_MODE_SIZE)
519 tkind = 0x0000;
520 else
521 tkind = 0xffff;
522 break;
523 case REAL_TYPE:
524 /* FIXME: libubsan right now only supports float, double and
525 long double type formats. */
526 if (TYPE_MODE (eltype) == TYPE_MODE (float_type_node)
527 || TYPE_MODE (eltype) == TYPE_MODE (double_type_node)
528 || TYPE_MODE (eltype) == TYPE_MODE (long_double_type_node))
529 tkind = 0x0001;
530 else
531 tkind = 0xffff;
532 break;
533 default:
534 tkind = 0xffff;
535 break;
537 tinfo = tkind == 0xffff ? 0 : get_ubsan_type_info_for_type (eltype);
539 if (pstyle == UBSAN_PRINT_FORCE_INT)
541 tkind = 0x0000;
542 tree t = build_nonstandard_integer_type (MAX_FIXED_MODE_SIZE,
543 TYPE_UNSIGNED (eltype));
544 tinfo = get_ubsan_type_info_for_type (t);
547 /* Create a new VAR_DECL of type descriptor. */
548 const char *tmp = pp_formatted_text (&pretty_name);
549 size_t len = strlen (tmp) + 1;
550 tree str = build_string (len, tmp);
551 TREE_TYPE (str) = build_array_type_nelts (char_type_node, len);
552 TREE_READONLY (str) = 1;
553 TREE_STATIC (str) = 1;
555 char tmp_name[32];
556 ASM_GENERATE_INTERNAL_LABEL (tmp_name, "Lubsan_type", ubsan_ids[0]++);
557 decl = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (tmp_name),
558 dtype);
559 TREE_STATIC (decl) = 1;
560 TREE_PUBLIC (decl) = 0;
561 DECL_ARTIFICIAL (decl) = 1;
562 DECL_IGNORED_P (decl) = 1;
563 DECL_EXTERNAL (decl) = 0;
564 DECL_SIZE (decl)
565 = size_binop (PLUS_EXPR, DECL_SIZE (decl), TYPE_SIZE (TREE_TYPE (str)));
566 DECL_SIZE_UNIT (decl)
567 = size_binop (PLUS_EXPR, DECL_SIZE_UNIT (decl),
568 TYPE_SIZE_UNIT (TREE_TYPE (str)));
570 tree ctor = build_constructor_va (dtype, 3, NULL_TREE,
571 build_int_cst (short_unsigned_type_node,
572 tkind), NULL_TREE,
573 build_int_cst (short_unsigned_type_node,
574 tinfo), NULL_TREE, str);
575 TREE_CONSTANT (ctor) = 1;
576 TREE_STATIC (ctor) = 1;
577 DECL_INITIAL (decl) = ctor;
578 varpool_node::finalize_decl (decl);
580 /* Save the VAR_DECL into the hash table. */
581 decl_for_type_insert (type3, decl);
583 return build_fold_addr_expr (decl);
586 /* Create a structure for the ubsan library. NAME is a name of the new
587 structure. LOCCNT is number of locations, PLOC points to array of
588 locations. The arguments in ... are of __ubsan_type_descriptor type
589 and there are at most two of them, followed by NULL_TREE, followed
590 by optional extra arguments and another NULL_TREE. */
592 tree
593 ubsan_create_data (const char *name, int loccnt, const location_t *ploc, ...)
595 va_list args;
596 tree ret, t;
597 tree fields[6];
598 vec<tree, va_gc> *saved_args = NULL;
599 size_t i = 0;
600 int j;
602 /* It is possible that PCH zapped table with definitions of sanitizer
603 builtins. Reinitialize them if needed. */
604 initialize_sanitizer_builtins ();
606 /* Firstly, create a pointer to type descriptor type. */
607 tree td_type = ubsan_get_type_descriptor_type ();
608 td_type = build_pointer_type (td_type);
610 /* Create the structure type. */
611 ret = make_node (RECORD_TYPE);
612 for (j = 0; j < loccnt; j++)
614 gcc_checking_assert (i < 2);
615 fields[i] = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
616 ubsan_get_source_location_type ());
617 DECL_CONTEXT (fields[i]) = ret;
618 if (i)
619 DECL_CHAIN (fields[i - 1]) = fields[i];
620 i++;
623 va_start (args, ploc);
624 for (t = va_arg (args, tree); t != NULL_TREE;
625 i++, t = va_arg (args, tree))
627 gcc_checking_assert (i < 4);
628 /* Save the tree arguments for later use. */
629 vec_safe_push (saved_args, t);
630 fields[i] = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
631 td_type);
632 DECL_CONTEXT (fields[i]) = ret;
633 if (i)
634 DECL_CHAIN (fields[i - 1]) = fields[i];
637 for (t = va_arg (args, tree); t != NULL_TREE;
638 i++, t = va_arg (args, tree))
640 gcc_checking_assert (i < 6);
641 /* Save the tree arguments for later use. */
642 vec_safe_push (saved_args, t);
643 fields[i] = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
644 TREE_TYPE (t));
645 DECL_CONTEXT (fields[i]) = ret;
646 if (i)
647 DECL_CHAIN (fields[i - 1]) = fields[i];
649 va_end (args);
651 tree type_decl = build_decl (input_location, TYPE_DECL,
652 get_identifier (name), ret);
653 DECL_IGNORED_P (type_decl) = 1;
654 DECL_ARTIFICIAL (type_decl) = 1;
655 TYPE_FIELDS (ret) = fields[0];
656 TYPE_NAME (ret) = type_decl;
657 TYPE_STUB_DECL (ret) = type_decl;
658 TYPE_ARTIFICIAL (ret) = 1;
659 layout_type (ret);
661 /* Now, fill in the type. */
662 char tmp_name[32];
663 ASM_GENERATE_INTERNAL_LABEL (tmp_name, "Lubsan_data", ubsan_ids[1]++);
664 tree var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (tmp_name),
665 ret);
666 TREE_STATIC (var) = 1;
667 TREE_PUBLIC (var) = 0;
668 DECL_ARTIFICIAL (var) = 1;
669 DECL_IGNORED_P (var) = 1;
670 DECL_EXTERNAL (var) = 0;
672 vec<constructor_elt, va_gc> *v;
673 vec_alloc (v, i);
674 tree ctor = build_constructor (ret, v);
676 /* If desirable, set the __ubsan_source_location element. */
677 for (j = 0; j < loccnt; j++)
679 location_t loc = LOCATION_LOCUS (ploc[j]);
680 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, ubsan_source_location (loc));
683 size_t nelts = vec_safe_length (saved_args);
684 for (i = 0; i < nelts; i++)
686 t = (*saved_args)[i];
687 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, t);
690 TREE_CONSTANT (ctor) = 1;
691 TREE_STATIC (ctor) = 1;
692 DECL_INITIAL (var) = ctor;
693 varpool_node::finalize_decl (var);
695 return var;
698 /* Shared between *build_builtin_unreachable. */
700 tree
701 sanitize_unreachable_fn (tree *data, location_t loc)
703 tree fn = NULL_TREE;
704 bool san = sanitize_flags_p (SANITIZE_UNREACHABLE);
705 if (san
706 ? (flag_sanitize_trap & SANITIZE_UNREACHABLE)
707 : flag_unreachable_traps)
709 fn = builtin_decl_explicit (BUILT_IN_UNREACHABLE_TRAP);
710 *data = NULL_TREE;
712 else if (san)
714 /* Call ubsan_create_data first as it initializes SANITIZER built-ins. */
715 *data = ubsan_create_data ("__ubsan_unreachable_data", 1, &loc,
716 NULL_TREE, NULL_TREE);
717 fn = builtin_decl_explicit (BUILT_IN_UBSAN_HANDLE_BUILTIN_UNREACHABLE);
718 *data = build_fold_addr_expr_loc (loc, *data);
720 else
722 fn = builtin_decl_explicit (BUILT_IN_UNREACHABLE);
723 *data = NULL_TREE;
725 return fn;
728 /* Rewrite a gcall to __builtin_unreachable for -fsanitize=unreachable. Called
729 by the sanopt pass. */
731 bool
732 ubsan_instrument_unreachable (gimple_stmt_iterator *gsi)
734 location_t loc = gimple_location (gsi_stmt (*gsi));
735 gimple *g = gimple_build_builtin_unreachable (loc);
736 gsi_replace (gsi, g, false);
737 return false;
740 /* Return true if T is a call to a libubsan routine. */
742 bool
743 is_ubsan_builtin_p (tree t)
745 return TREE_CODE (t) == FUNCTION_DECL
746 && fndecl_built_in_p (t, BUILT_IN_NORMAL)
747 && strncmp (IDENTIFIER_POINTER (DECL_NAME (t)),
748 "__builtin___ubsan_", 18) == 0;
751 /* Create a callgraph edge for statement STMT. */
753 static void
754 ubsan_create_edge (gimple *stmt)
756 gcall *call_stmt = dyn_cast <gcall *> (stmt);
757 basic_block bb = gimple_bb (stmt);
758 cgraph_node *node = cgraph_node::get (current_function_decl);
759 tree decl = gimple_call_fndecl (call_stmt);
760 if (decl)
761 node->create_edge (cgraph_node::get_create (decl), call_stmt, bb->count);
764 /* Expand the UBSAN_BOUNDS special builtin function. */
766 bool
767 ubsan_expand_bounds_ifn (gimple_stmt_iterator *gsi)
769 gimple *stmt = gsi_stmt (*gsi);
770 location_t loc = gimple_location (stmt);
771 gcc_assert (gimple_call_num_args (stmt) == 3);
773 /* Pick up the arguments of the UBSAN_BOUNDS call. */
774 tree type = TREE_TYPE (TREE_TYPE (gimple_call_arg (stmt, 0)));
775 tree index = gimple_call_arg (stmt, 1);
776 tree orig_index = index;
777 tree bound = gimple_call_arg (stmt, 2);
779 gimple_stmt_iterator gsi_orig = *gsi;
781 /* Create condition "if (index >= bound)". */
782 basic_block then_bb, fallthru_bb;
783 gimple_stmt_iterator cond_insert_point
784 = create_cond_insert_point (gsi, false, false, true,
785 &then_bb, &fallthru_bb);
786 index = fold_convert (TREE_TYPE (bound), index);
787 index = force_gimple_operand_gsi (&cond_insert_point, index,
788 true, NULL_TREE,
789 false, GSI_NEW_STMT);
790 gimple *g = gimple_build_cond (GE_EXPR, index, bound, NULL_TREE, NULL_TREE);
791 gimple_set_location (g, loc);
792 gsi_insert_after (&cond_insert_point, g, GSI_NEW_STMT);
794 /* Generate __ubsan_handle_out_of_bounds call. */
795 *gsi = gsi_after_labels (then_bb);
796 if (flag_sanitize_trap & SANITIZE_BOUNDS)
797 g = gimple_build_call (builtin_decl_explicit (BUILT_IN_TRAP), 0);
798 else
800 tree data
801 = ubsan_create_data ("__ubsan_out_of_bounds_data", 1, &loc,
802 ubsan_type_descriptor (type, UBSAN_PRINT_ARRAY),
803 ubsan_type_descriptor (TREE_TYPE (orig_index)),
804 NULL_TREE, NULL_TREE);
805 data = build_fold_addr_expr_loc (loc, data);
806 enum built_in_function bcode
807 = (flag_sanitize_recover & SANITIZE_BOUNDS)
808 ? BUILT_IN_UBSAN_HANDLE_OUT_OF_BOUNDS
809 : BUILT_IN_UBSAN_HANDLE_OUT_OF_BOUNDS_ABORT;
810 tree fn = builtin_decl_explicit (bcode);
811 tree val = ubsan_encode_value (orig_index, UBSAN_ENCODE_VALUE_GIMPLE);
812 val = force_gimple_operand_gsi (gsi, val, true, NULL_TREE, true,
813 GSI_SAME_STMT);
814 g = gimple_build_call (fn, 2, data, val);
816 gimple_set_location (g, loc);
817 gsi_insert_before (gsi, g, GSI_SAME_STMT);
819 /* Get rid of the UBSAN_BOUNDS call from the IR. */
820 unlink_stmt_vdef (stmt);
821 gsi_remove (&gsi_orig, true);
823 /* Point GSI to next logical statement. */
824 *gsi = gsi_start_bb (fallthru_bb);
825 return true;
828 /* Expand UBSAN_NULL internal call. The type is kept on the ckind
829 argument which is a constant, because the middle-end treats pointer
830 conversions as useless and therefore the type of the first argument
831 could be changed to any other pointer type. */
833 bool
834 ubsan_expand_null_ifn (gimple_stmt_iterator *gsip)
836 gimple_stmt_iterator gsi = *gsip;
837 gimple *stmt = gsi_stmt (gsi);
838 location_t loc = gimple_location (stmt);
839 gcc_assert (gimple_call_num_args (stmt) == 3);
840 tree ptr = gimple_call_arg (stmt, 0);
841 tree ckind = gimple_call_arg (stmt, 1);
842 tree align = gimple_call_arg (stmt, 2);
843 tree check_align = NULL_TREE;
844 bool check_null;
846 basic_block cur_bb = gsi_bb (gsi);
848 gimple *g;
849 if (!integer_zerop (align))
851 unsigned int ptralign = get_pointer_alignment (ptr) / BITS_PER_UNIT;
852 if (compare_tree_int (align, ptralign) == 1)
854 check_align = make_ssa_name (pointer_sized_int_node);
855 g = gimple_build_assign (check_align, NOP_EXPR, ptr);
856 gimple_set_location (g, loc);
857 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
860 check_null = sanitize_flags_p (SANITIZE_NULL);
862 if (check_align == NULL_TREE && !check_null)
864 gsi_remove (gsip, true);
865 /* Unlink the UBSAN_NULLs vops before replacing it. */
866 unlink_stmt_vdef (stmt);
867 return true;
870 /* Split the original block holding the pointer dereference. */
871 edge e = split_block (cur_bb, stmt);
873 /* Get a hold on the 'condition block', the 'then block' and the
874 'else block'. */
875 basic_block cond_bb = e->src;
876 basic_block fallthru_bb = e->dest;
877 basic_block then_bb = create_empty_bb (cond_bb);
878 add_bb_to_loop (then_bb, cond_bb->loop_father);
879 loops_state_set (LOOPS_NEED_FIXUP);
881 /* Make an edge coming from the 'cond block' into the 'then block';
882 this edge is unlikely taken, so set up the probability accordingly. */
883 e = make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
884 e->probability = profile_probability::very_unlikely ();
885 then_bb->count = e->count ();
887 /* Connect 'then block' with the 'else block'. This is needed
888 as the ubsan routines we call in the 'then block' are not noreturn.
889 The 'then block' only has one outcoming edge. */
890 make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU);
892 /* Set up the fallthrough basic block. */
893 e = find_edge (cond_bb, fallthru_bb);
894 e->flags = EDGE_FALSE_VALUE;
895 e->probability = profile_probability::very_likely ();
897 /* Update dominance info for the newly created then_bb; note that
898 fallthru_bb's dominance info has already been updated by
899 split_block. */
900 if (dom_info_available_p (CDI_DOMINATORS))
901 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
903 /* Put the ubsan builtin call into the newly created BB. */
904 if (flag_sanitize_trap & ((check_align ? SANITIZE_ALIGNMENT + 0 : 0)
905 | (check_null ? SANITIZE_NULL + 0 : 0)))
906 g = gimple_build_call (builtin_decl_implicit (BUILT_IN_TRAP), 0);
907 else
909 enum built_in_function bcode
910 = (flag_sanitize_recover & ((check_align ? SANITIZE_ALIGNMENT + 0 : 0)
911 | (check_null ? SANITIZE_NULL + 0 : 0)))
912 ? BUILT_IN_UBSAN_HANDLE_TYPE_MISMATCH_V1
913 : BUILT_IN_UBSAN_HANDLE_TYPE_MISMATCH_V1_ABORT;
914 tree fn = builtin_decl_implicit (bcode);
915 int align_log = tree_log2 (align);
916 tree data
917 = ubsan_create_data ("__ubsan_null_data", 1, &loc,
918 ubsan_type_descriptor (TREE_TYPE (ckind),
919 UBSAN_PRINT_POINTER),
920 NULL_TREE,
921 build_int_cst (unsigned_char_type_node,
922 MAX (align_log, 0)),
923 fold_convert (unsigned_char_type_node, ckind),
924 NULL_TREE);
925 data = build_fold_addr_expr_loc (loc, data);
926 g = gimple_build_call (fn, 2, data,
927 check_align ? check_align
928 : build_zero_cst (pointer_sized_int_node));
930 gimple_stmt_iterator gsi2 = gsi_start_bb (then_bb);
931 gimple_set_location (g, loc);
932 gsi_insert_after (&gsi2, g, GSI_NEW_STMT);
934 /* Unlink the UBSAN_NULLs vops before replacing it. */
935 unlink_stmt_vdef (stmt);
937 if (check_null)
939 g = gimple_build_cond (EQ_EXPR, ptr, build_int_cst (TREE_TYPE (ptr), 0),
940 NULL_TREE, NULL_TREE);
941 gimple_set_location (g, loc);
943 /* Replace the UBSAN_NULL with a GIMPLE_COND stmt. */
944 gsi_replace (&gsi, g, false);
945 stmt = g;
948 if (check_align)
950 if (check_null)
952 /* Split the block with the condition again. */
953 e = split_block (cond_bb, stmt);
954 basic_block cond1_bb = e->src;
955 basic_block cond2_bb = e->dest;
957 /* Make an edge coming from the 'cond1 block' into the 'then block';
958 this edge is unlikely taken, so set up the probability
959 accordingly. */
960 e = make_edge (cond1_bb, then_bb, EDGE_TRUE_VALUE);
961 e->probability = profile_probability::very_unlikely ();
963 /* Set up the fallthrough basic block. */
964 e = find_edge (cond1_bb, cond2_bb);
965 e->flags = EDGE_FALSE_VALUE;
966 e->probability = profile_probability::very_likely ();
968 /* Update dominance info. */
969 if (dom_info_available_p (CDI_DOMINATORS))
971 set_immediate_dominator (CDI_DOMINATORS, fallthru_bb, cond1_bb);
972 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond1_bb);
975 gsi2 = gsi_start_bb (cond2_bb);
978 tree mask = build_int_cst (pointer_sized_int_node,
979 tree_to_uhwi (align) - 1);
980 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
981 BIT_AND_EXPR, check_align, mask);
982 gimple_set_location (g, loc);
983 if (check_null)
984 gsi_insert_after (&gsi2, g, GSI_NEW_STMT);
985 else
986 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
988 g = gimple_build_cond (NE_EXPR, gimple_assign_lhs (g),
989 build_int_cst (pointer_sized_int_node, 0),
990 NULL_TREE, NULL_TREE);
991 gimple_set_location (g, loc);
992 if (check_null)
993 gsi_insert_after (&gsi2, g, GSI_NEW_STMT);
994 else
995 /* Replace the UBSAN_NULL with a GIMPLE_COND stmt. */
996 gsi_replace (&gsi, g, false);
998 return false;
1001 #define OBJSZ_MAX_OFFSET (1024 * 16)
1003 /* Expand UBSAN_OBJECT_SIZE internal call. */
1005 bool
1006 ubsan_expand_objsize_ifn (gimple_stmt_iterator *gsi)
1008 gimple *stmt = gsi_stmt (*gsi);
1009 location_t loc = gimple_location (stmt);
1010 gcc_assert (gimple_call_num_args (stmt) == 4);
1012 tree ptr = gimple_call_arg (stmt, 0);
1013 tree offset = gimple_call_arg (stmt, 1);
1014 tree size = gimple_call_arg (stmt, 2);
1015 tree ckind = gimple_call_arg (stmt, 3);
1016 gimple_stmt_iterator gsi_orig = *gsi;
1017 gimple *g;
1019 /* See if we can discard the check. */
1020 if (TREE_CODE (size) == INTEGER_CST
1021 && integer_all_onesp (size))
1022 /* Yes, __builtin_object_size couldn't determine the
1023 object size. */;
1024 else if (TREE_CODE (offset) == INTEGER_CST
1025 && wi::to_widest (offset) >= -OBJSZ_MAX_OFFSET
1026 && wi::to_widest (offset) <= -1)
1027 /* The offset is in range [-16K, -1]. */;
1028 else
1030 /* if (offset > objsize) */
1031 basic_block then_bb, fallthru_bb;
1032 gimple_stmt_iterator cond_insert_point
1033 = create_cond_insert_point (gsi, false, false, true,
1034 &then_bb, &fallthru_bb);
1035 g = gimple_build_cond (GT_EXPR, offset, size, NULL_TREE, NULL_TREE);
1036 gimple_set_location (g, loc);
1037 gsi_insert_after (&cond_insert_point, g, GSI_NEW_STMT);
1039 /* If the offset is small enough, we don't need the second
1040 run-time check. */
1041 if (TREE_CODE (offset) == INTEGER_CST
1042 && wi::to_widest (offset) >= 0
1043 && wi::to_widest (offset) <= OBJSZ_MAX_OFFSET)
1044 *gsi = gsi_after_labels (then_bb);
1045 else
1047 /* Don't issue run-time error if (ptr > ptr + offset). That
1048 may happen when computing a POINTER_PLUS_EXPR. */
1049 basic_block then2_bb, fallthru2_bb;
1051 gimple_stmt_iterator gsi2 = gsi_after_labels (then_bb);
1052 cond_insert_point = create_cond_insert_point (&gsi2, false, false,
1053 true, &then2_bb,
1054 &fallthru2_bb);
1055 /* Convert the pointer to an integer type. */
1056 tree p = make_ssa_name (pointer_sized_int_node);
1057 g = gimple_build_assign (p, NOP_EXPR, ptr);
1058 gimple_set_location (g, loc);
1059 gsi_insert_before (&cond_insert_point, g, GSI_NEW_STMT);
1060 p = gimple_assign_lhs (g);
1061 /* Compute ptr + offset. */
1062 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
1063 PLUS_EXPR, p, offset);
1064 gimple_set_location (g, loc);
1065 gsi_insert_after (&cond_insert_point, g, GSI_NEW_STMT);
1066 /* Now build the conditional and put it into the IR. */
1067 g = gimple_build_cond (LE_EXPR, p, gimple_assign_lhs (g),
1068 NULL_TREE, NULL_TREE);
1069 gimple_set_location (g, loc);
1070 gsi_insert_after (&cond_insert_point, g, GSI_NEW_STMT);
1071 *gsi = gsi_after_labels (then2_bb);
1074 /* Generate __ubsan_handle_type_mismatch call. */
1075 if (flag_sanitize_trap & SANITIZE_OBJECT_SIZE)
1076 g = gimple_build_call (builtin_decl_explicit (BUILT_IN_TRAP), 0);
1077 else
1079 tree data
1080 = ubsan_create_data ("__ubsan_objsz_data", 1, &loc,
1081 ubsan_type_descriptor (TREE_TYPE (ptr),
1082 UBSAN_PRINT_POINTER),
1083 NULL_TREE,
1084 build_zero_cst (unsigned_char_type_node),
1085 ckind,
1086 NULL_TREE);
1087 data = build_fold_addr_expr_loc (loc, data);
1088 enum built_in_function bcode
1089 = (flag_sanitize_recover & SANITIZE_OBJECT_SIZE)
1090 ? BUILT_IN_UBSAN_HANDLE_TYPE_MISMATCH_V1
1091 : BUILT_IN_UBSAN_HANDLE_TYPE_MISMATCH_V1_ABORT;
1092 tree p = make_ssa_name (pointer_sized_int_node);
1093 g = gimple_build_assign (p, NOP_EXPR, ptr);
1094 gimple_set_location (g, loc);
1095 gsi_insert_before (gsi, g, GSI_SAME_STMT);
1096 g = gimple_build_call (builtin_decl_explicit (bcode), 2, data, p);
1098 gimple_set_location (g, loc);
1099 gsi_insert_before (gsi, g, GSI_SAME_STMT);
1101 /* Point GSI to next logical statement. */
1102 *gsi = gsi_start_bb (fallthru_bb);
1104 /* Get rid of the UBSAN_OBJECT_SIZE call from the IR. */
1105 unlink_stmt_vdef (stmt);
1106 gsi_remove (&gsi_orig, true);
1107 return true;
1110 /* Get rid of the UBSAN_OBJECT_SIZE call from the IR. */
1111 unlink_stmt_vdef (stmt);
1112 gsi_remove (gsi, true);
1113 return true;
1116 /* Expand UBSAN_PTR internal call. */
1118 bool
1119 ubsan_expand_ptr_ifn (gimple_stmt_iterator *gsip)
1121 gimple_stmt_iterator gsi = *gsip;
1122 gimple *stmt = gsi_stmt (gsi);
1123 location_t loc = gimple_location (stmt);
1124 gcc_assert (gimple_call_num_args (stmt) == 2);
1125 tree ptr = gimple_call_arg (stmt, 0);
1126 tree off = gimple_call_arg (stmt, 1);
1128 if (integer_zerop (off))
1130 gsi_remove (gsip, true);
1131 unlink_stmt_vdef (stmt);
1132 return true;
1135 basic_block cur_bb = gsi_bb (gsi);
1136 tree ptrplusoff = make_ssa_name (pointer_sized_int_node);
1137 tree ptri = make_ssa_name (pointer_sized_int_node);
1138 int pos_neg = get_range_pos_neg (off);
1140 /* Split the original block holding the pointer dereference. */
1141 edge e = split_block (cur_bb, stmt);
1143 /* Get a hold on the 'condition block', the 'then block' and the
1144 'else block'. */
1145 basic_block cond_bb = e->src;
1146 basic_block fallthru_bb = e->dest;
1147 basic_block then_bb = create_empty_bb (cond_bb);
1148 basic_block cond_pos_bb = NULL, cond_neg_bb = NULL;
1149 add_bb_to_loop (then_bb, cond_bb->loop_father);
1150 loops_state_set (LOOPS_NEED_FIXUP);
1152 /* Set up the fallthrough basic block. */
1153 e->flags = EDGE_FALSE_VALUE;
1154 if (pos_neg != 3)
1156 e->probability = profile_probability::very_likely ();
1158 /* Connect 'then block' with the 'else block'. This is needed
1159 as the ubsan routines we call in the 'then block' are not noreturn.
1160 The 'then block' only has one outcoming edge. */
1161 make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU);
1163 /* Make an edge coming from the 'cond block' into the 'then block';
1164 this edge is unlikely taken, so set up the probability
1165 accordingly. */
1166 e = make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
1167 e->probability = profile_probability::very_unlikely ();
1168 then_bb->count = e->count ();
1170 else
1172 e->probability = profile_probability::even ();
1174 e = split_block (fallthru_bb, (gimple *) NULL);
1175 cond_neg_bb = e->src;
1176 fallthru_bb = e->dest;
1177 e->probability = profile_probability::very_likely ();
1178 e->flags = EDGE_FALSE_VALUE;
1180 e = make_edge (cond_neg_bb, then_bb, EDGE_TRUE_VALUE);
1181 e->probability = profile_probability::very_unlikely ();
1182 then_bb->count = e->count ();
1184 cond_pos_bb = create_empty_bb (cond_bb);
1185 add_bb_to_loop (cond_pos_bb, cond_bb->loop_father);
1187 e = make_edge (cond_bb, cond_pos_bb, EDGE_TRUE_VALUE);
1188 e->probability = profile_probability::even ();
1189 cond_pos_bb->count = e->count ();
1191 e = make_edge (cond_pos_bb, then_bb, EDGE_TRUE_VALUE);
1192 e->probability = profile_probability::very_unlikely ();
1194 e = make_edge (cond_pos_bb, fallthru_bb, EDGE_FALSE_VALUE);
1195 e->probability = profile_probability::very_likely ();
1197 make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU);
1200 gimple *g = gimple_build_assign (ptri, NOP_EXPR, ptr);
1201 gimple_set_location (g, loc);
1202 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
1203 g = gimple_build_assign (ptrplusoff, PLUS_EXPR, ptri, off);
1204 gimple_set_location (g, loc);
1205 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
1207 /* Update dominance info for the newly created then_bb; note that
1208 fallthru_bb's dominance info has already been updated by
1209 split_block. */
1210 if (dom_info_available_p (CDI_DOMINATORS))
1212 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
1213 if (pos_neg == 3)
1215 set_immediate_dominator (CDI_DOMINATORS, cond_pos_bb, cond_bb);
1216 set_immediate_dominator (CDI_DOMINATORS, fallthru_bb, cond_bb);
1220 /* Put the ubsan builtin call into the newly created BB. */
1221 if (flag_sanitize_trap & SANITIZE_POINTER_OVERFLOW)
1222 g = gimple_build_call (builtin_decl_implicit (BUILT_IN_TRAP), 0);
1223 else
1225 enum built_in_function bcode
1226 = (flag_sanitize_recover & SANITIZE_POINTER_OVERFLOW)
1227 ? BUILT_IN_UBSAN_HANDLE_POINTER_OVERFLOW
1228 : BUILT_IN_UBSAN_HANDLE_POINTER_OVERFLOW_ABORT;
1229 tree fn = builtin_decl_implicit (bcode);
1230 tree data
1231 = ubsan_create_data ("__ubsan_ptrovf_data", 1, &loc,
1232 NULL_TREE, NULL_TREE);
1233 data = build_fold_addr_expr_loc (loc, data);
1234 g = gimple_build_call (fn, 3, data, ptr, ptrplusoff);
1236 gimple_stmt_iterator gsi2 = gsi_start_bb (then_bb);
1237 gimple_set_location (g, loc);
1238 gsi_insert_after (&gsi2, g, GSI_NEW_STMT);
1240 /* Unlink the UBSAN_PTRs vops before replacing it. */
1241 unlink_stmt_vdef (stmt);
1243 if (TREE_CODE (off) == INTEGER_CST)
1244 g = gimple_build_cond (wi::neg_p (wi::to_wide (off)) ? LT_EXPR : GE_EXPR,
1245 ptri, fold_build1 (NEGATE_EXPR, sizetype, off),
1246 NULL_TREE, NULL_TREE);
1247 else if (pos_neg != 3)
1248 g = gimple_build_cond (pos_neg == 1 ? LT_EXPR : GT_EXPR,
1249 ptrplusoff, ptri, NULL_TREE, NULL_TREE);
1250 else
1252 gsi2 = gsi_start_bb (cond_pos_bb);
1253 g = gimple_build_cond (LT_EXPR, ptrplusoff, ptri, NULL_TREE, NULL_TREE);
1254 gimple_set_location (g, loc);
1255 gsi_insert_after (&gsi2, g, GSI_NEW_STMT);
1257 gsi2 = gsi_start_bb (cond_neg_bb);
1258 g = gimple_build_cond (GT_EXPR, ptrplusoff, ptri, NULL_TREE, NULL_TREE);
1259 gimple_set_location (g, loc);
1260 gsi_insert_after (&gsi2, g, GSI_NEW_STMT);
1262 tree t = gimple_build (&gsi, true, GSI_SAME_STMT,
1263 loc, NOP_EXPR, ssizetype, off);
1264 g = gimple_build_cond (GE_EXPR, t, ssize_int (0),
1265 NULL_TREE, NULL_TREE);
1267 gimple_set_location (g, loc);
1268 /* Replace the UBSAN_PTR with a GIMPLE_COND stmt. */
1269 gsi_replace (&gsi, g, false);
1270 return false;
1274 /* Cached __ubsan_vptr_type_cache decl. */
1275 static GTY(()) tree ubsan_vptr_type_cache_decl;
1277 /* Expand UBSAN_VPTR internal call. The type is kept on the ckind
1278 argument which is a constant, because the middle-end treats pointer
1279 conversions as useless and therefore the type of the first argument
1280 could be changed to any other pointer type. */
1282 bool
1283 ubsan_expand_vptr_ifn (gimple_stmt_iterator *gsip)
1285 gimple_stmt_iterator gsi = *gsip;
1286 gimple *stmt = gsi_stmt (gsi);
1287 location_t loc = gimple_location (stmt);
1288 gcc_assert (gimple_call_num_args (stmt) == 5);
1289 tree op = gimple_call_arg (stmt, 0);
1290 tree vptr = gimple_call_arg (stmt, 1);
1291 tree str_hash = gimple_call_arg (stmt, 2);
1292 tree ti_decl_addr = gimple_call_arg (stmt, 3);
1293 tree ckind_tree = gimple_call_arg (stmt, 4);
1294 ubsan_null_ckind ckind = (ubsan_null_ckind) tree_to_uhwi (ckind_tree);
1295 tree type = TREE_TYPE (TREE_TYPE (ckind_tree));
1296 gimple *g;
1297 basic_block fallthru_bb = NULL;
1299 if (ckind == UBSAN_DOWNCAST_POINTER)
1301 /* Guard everything with if (op != NULL) { ... }. */
1302 basic_block then_bb;
1303 gimple_stmt_iterator cond_insert_point
1304 = create_cond_insert_point (gsip, false, false, true,
1305 &then_bb, &fallthru_bb);
1306 g = gimple_build_cond (NE_EXPR, op, build_zero_cst (TREE_TYPE (op)),
1307 NULL_TREE, NULL_TREE);
1308 gimple_set_location (g, loc);
1309 gsi_insert_after (&cond_insert_point, g, GSI_NEW_STMT);
1310 *gsip = gsi_after_labels (then_bb);
1311 gsi_remove (&gsi, false);
1312 gsi_insert_before (gsip, stmt, GSI_NEW_STMT);
1313 gsi = *gsip;
1316 tree htype = TREE_TYPE (str_hash);
1317 tree cst = wide_int_to_tree (htype,
1318 wi::uhwi (((uint64_t) 0x9ddfea08 << 32)
1319 | 0xeb382d69, 64));
1320 g = gimple_build_assign (make_ssa_name (htype), BIT_XOR_EXPR,
1321 vptr, str_hash);
1322 gimple_set_location (g, loc);
1323 gsi_insert_before (gsip, g, GSI_SAME_STMT);
1324 g = gimple_build_assign (make_ssa_name (htype), MULT_EXPR,
1325 gimple_assign_lhs (g), cst);
1326 gimple_set_location (g, loc);
1327 gsi_insert_before (gsip, g, GSI_SAME_STMT);
1328 tree t1 = gimple_assign_lhs (g);
1329 g = gimple_build_assign (make_ssa_name (htype), LSHIFT_EXPR,
1330 t1, build_int_cst (integer_type_node, 47));
1331 gimple_set_location (g, loc);
1332 tree t2 = gimple_assign_lhs (g);
1333 gsi_insert_before (gsip, g, GSI_SAME_STMT);
1334 g = gimple_build_assign (make_ssa_name (htype), BIT_XOR_EXPR,
1335 vptr, t1);
1336 gimple_set_location (g, loc);
1337 gsi_insert_before (gsip, g, GSI_SAME_STMT);
1338 g = gimple_build_assign (make_ssa_name (htype), BIT_XOR_EXPR,
1339 t2, gimple_assign_lhs (g));
1340 gimple_set_location (g, loc);
1341 gsi_insert_before (gsip, g, GSI_SAME_STMT);
1342 g = gimple_build_assign (make_ssa_name (htype), MULT_EXPR,
1343 gimple_assign_lhs (g), cst);
1344 gimple_set_location (g, loc);
1345 gsi_insert_before (gsip, g, GSI_SAME_STMT);
1346 tree t3 = gimple_assign_lhs (g);
1347 g = gimple_build_assign (make_ssa_name (htype), LSHIFT_EXPR,
1348 t3, build_int_cst (integer_type_node, 47));
1349 gimple_set_location (g, loc);
1350 gsi_insert_before (gsip, g, GSI_SAME_STMT);
1351 g = gimple_build_assign (make_ssa_name (htype), BIT_XOR_EXPR,
1352 t3, gimple_assign_lhs (g));
1353 gimple_set_location (g, loc);
1354 gsi_insert_before (gsip, g, GSI_SAME_STMT);
1355 g = gimple_build_assign (make_ssa_name (htype), MULT_EXPR,
1356 gimple_assign_lhs (g), cst);
1357 gimple_set_location (g, loc);
1358 gsi_insert_before (gsip, g, GSI_SAME_STMT);
1359 if (!useless_type_conversion_p (pointer_sized_int_node, htype))
1361 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
1362 NOP_EXPR, gimple_assign_lhs (g));
1363 gimple_set_location (g, loc);
1364 gsi_insert_before (gsip, g, GSI_SAME_STMT);
1366 tree hash = gimple_assign_lhs (g);
1368 if (ubsan_vptr_type_cache_decl == NULL_TREE)
1370 tree atype = build_array_type_nelts (pointer_sized_int_node, 128);
1371 tree array = build_decl (UNKNOWN_LOCATION, VAR_DECL,
1372 get_identifier ("__ubsan_vptr_type_cache"),
1373 atype);
1374 DECL_ARTIFICIAL (array) = 1;
1375 DECL_IGNORED_P (array) = 1;
1376 TREE_PUBLIC (array) = 1;
1377 TREE_STATIC (array) = 1;
1378 DECL_EXTERNAL (array) = 1;
1379 DECL_VISIBILITY (array) = VISIBILITY_DEFAULT;
1380 DECL_VISIBILITY_SPECIFIED (array) = 1;
1381 varpool_node::finalize_decl (array);
1382 ubsan_vptr_type_cache_decl = array;
1385 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
1386 BIT_AND_EXPR, hash,
1387 build_int_cst (pointer_sized_int_node, 127));
1388 gimple_set_location (g, loc);
1389 gsi_insert_before (gsip, g, GSI_SAME_STMT);
1391 tree c = build4_loc (loc, ARRAY_REF, pointer_sized_int_node,
1392 ubsan_vptr_type_cache_decl, gimple_assign_lhs (g),
1393 NULL_TREE, NULL_TREE);
1394 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
1395 ARRAY_REF, c);
1396 gimple_set_location (g, loc);
1397 gsi_insert_before (gsip, g, GSI_SAME_STMT);
1399 basic_block then_bb, fallthru2_bb;
1400 gimple_stmt_iterator cond_insert_point
1401 = create_cond_insert_point (gsip, false, false, true,
1402 &then_bb, &fallthru2_bb);
1403 g = gimple_build_cond (NE_EXPR, gimple_assign_lhs (g), hash,
1404 NULL_TREE, NULL_TREE);
1405 gimple_set_location (g, loc);
1406 gsi_insert_after (&cond_insert_point, g, GSI_NEW_STMT);
1407 *gsip = gsi_after_labels (then_bb);
1408 if (fallthru_bb == NULL)
1409 fallthru_bb = fallthru2_bb;
1411 tree data
1412 = ubsan_create_data ("__ubsan_vptr_data", 1, &loc,
1413 ubsan_type_descriptor (type), NULL_TREE, ti_decl_addr,
1414 build_int_cst (unsigned_char_type_node, ckind),
1415 NULL_TREE);
1416 data = build_fold_addr_expr_loc (loc, data);
1417 enum built_in_function bcode
1418 = (flag_sanitize_recover & SANITIZE_VPTR)
1419 ? BUILT_IN_UBSAN_HANDLE_DYNAMIC_TYPE_CACHE_MISS
1420 : BUILT_IN_UBSAN_HANDLE_DYNAMIC_TYPE_CACHE_MISS_ABORT;
1422 g = gimple_build_call (builtin_decl_explicit (bcode), 3, data, op, hash);
1423 gimple_set_location (g, loc);
1424 gsi_insert_before (gsip, g, GSI_SAME_STMT);
1426 /* Point GSI to next logical statement. */
1427 *gsip = gsi_start_bb (fallthru_bb);
1429 /* Get rid of the UBSAN_VPTR call from the IR. */
1430 unlink_stmt_vdef (stmt);
1431 gsi_remove (&gsi, true);
1432 return true;
1435 /* Instrument a memory reference. BASE is the base of MEM, IS_LHS says
1436 whether the pointer is on the left hand side of the assignment. */
1438 static void
1439 instrument_mem_ref (tree mem, tree base, gimple_stmt_iterator *iter,
1440 bool is_lhs)
1442 enum ubsan_null_ckind ikind = is_lhs ? UBSAN_STORE_OF : UBSAN_LOAD_OF;
1443 unsigned int align = 0;
1444 if (sanitize_flags_p (SANITIZE_ALIGNMENT))
1446 align = min_align_of_type (TREE_TYPE (base));
1447 if (align <= 1)
1448 align = 0;
1450 if (align == 0 && !sanitize_flags_p (SANITIZE_NULL))
1451 return;
1452 tree t = TREE_OPERAND (base, 0);
1453 if (!POINTER_TYPE_P (TREE_TYPE (t)))
1454 return;
1455 if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (base)) && mem != base)
1456 ikind = UBSAN_MEMBER_ACCESS;
1457 tree kind = build_int_cst (build_pointer_type (TREE_TYPE (base)), ikind);
1458 tree alignt = build_int_cst (pointer_sized_int_node, align);
1459 gcall *g = gimple_build_call_internal (IFN_UBSAN_NULL, 3, t, kind, alignt);
1460 gimple_set_location (g, gimple_location (gsi_stmt (*iter)));
1461 gsi_insert_before (iter, g, GSI_SAME_STMT);
1464 /* Perform the pointer instrumentation. */
1466 static void
1467 instrument_null (gimple_stmt_iterator gsi, tree t, bool is_lhs)
1469 /* Handle also e.g. &s->i. */
1470 if (TREE_CODE (t) == ADDR_EXPR)
1471 t = TREE_OPERAND (t, 0);
1472 tree base = get_base_address (t);
1473 if (base != NULL_TREE
1474 && TREE_CODE (base) == MEM_REF
1475 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
1476 instrument_mem_ref (t, base, &gsi, is_lhs);
1479 /* Instrument pointer arithmetics PTR p+ OFF. */
1481 static void
1482 instrument_pointer_overflow (gimple_stmt_iterator *gsi, tree ptr, tree off)
1484 if (TYPE_PRECISION (sizetype) != POINTER_SIZE)
1485 return;
1486 gcall *g = gimple_build_call_internal (IFN_UBSAN_PTR, 2, ptr, off);
1487 gimple_set_location (g, gimple_location (gsi_stmt (*gsi)));
1488 gsi_insert_before (gsi, g, GSI_SAME_STMT);
1491 /* Instrument pointer arithmetics if any. */
1493 static void
1494 maybe_instrument_pointer_overflow (gimple_stmt_iterator *gsi, tree t)
1496 if (TYPE_PRECISION (sizetype) != POINTER_SIZE)
1497 return;
1499 /* Handle also e.g. &s->i. */
1500 if (TREE_CODE (t) == ADDR_EXPR)
1501 t = TREE_OPERAND (t, 0);
1503 if (!handled_component_p (t) && TREE_CODE (t) != MEM_REF)
1504 return;
1506 poly_int64 bitsize, bitpos, bytepos;
1507 tree offset;
1508 machine_mode mode;
1509 int volatilep = 0, reversep, unsignedp = 0;
1510 tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset, &mode,
1511 &unsignedp, &reversep, &volatilep);
1512 tree moff = NULL_TREE;
1514 bool decl_p = DECL_P (inner);
1515 tree base;
1516 if (decl_p)
1518 if ((VAR_P (inner)
1519 || TREE_CODE (inner) == PARM_DECL
1520 || TREE_CODE (inner) == RESULT_DECL)
1521 && DECL_REGISTER (inner))
1522 return;
1523 base = inner;
1524 /* If BASE is a fixed size automatic variable or
1525 global variable defined in the current TU and bitpos
1526 fits, don't instrument anything. */
1527 poly_int64 base_size;
1528 if (offset == NULL_TREE
1529 && maybe_ne (bitpos, 0)
1530 && (VAR_P (base)
1531 || TREE_CODE (base) == PARM_DECL
1532 || TREE_CODE (base) == RESULT_DECL)
1533 && poly_int_tree_p (DECL_SIZE (base), &base_size)
1534 && known_ge (base_size, bitpos)
1535 && (!is_global_var (base) || decl_binds_to_current_def_p (base)))
1536 return;
1538 else if (TREE_CODE (inner) == MEM_REF)
1540 base = TREE_OPERAND (inner, 0);
1541 if (TREE_CODE (base) == ADDR_EXPR
1542 && DECL_P (TREE_OPERAND (base, 0))
1543 && !TREE_ADDRESSABLE (TREE_OPERAND (base, 0))
1544 && !is_global_var (TREE_OPERAND (base, 0)))
1545 return;
1546 moff = TREE_OPERAND (inner, 1);
1547 if (integer_zerop (moff))
1548 moff = NULL_TREE;
1550 else
1551 return;
1553 if (!POINTER_TYPE_P (TREE_TYPE (base)) && !DECL_P (base))
1554 return;
1555 bytepos = bits_to_bytes_round_down (bitpos);
1556 if (offset == NULL_TREE && known_eq (bytepos, 0) && moff == NULL_TREE)
1557 return;
1559 tree base_addr = base;
1560 if (decl_p)
1561 base_addr = build1 (ADDR_EXPR,
1562 build_pointer_type (TREE_TYPE (base)), base);
1563 t = offset;
1564 if (maybe_ne (bytepos, 0))
1566 if (t)
1567 t = fold_build2 (PLUS_EXPR, TREE_TYPE (t), t,
1568 build_int_cst (TREE_TYPE (t), bytepos));
1569 else
1570 t = size_int (bytepos);
1572 if (moff)
1574 if (t)
1575 t = fold_build2 (PLUS_EXPR, TREE_TYPE (t), t,
1576 fold_convert (TREE_TYPE (t), moff));
1577 else
1578 t = fold_convert (sizetype, moff);
1580 t = force_gimple_operand_gsi (gsi, t, true, NULL_TREE, true,
1581 GSI_SAME_STMT);
1582 base_addr = force_gimple_operand_gsi (gsi, base_addr, true, NULL_TREE, true,
1583 GSI_SAME_STMT);
1584 instrument_pointer_overflow (gsi, base_addr, t);
1587 /* Build an ubsan builtin call for the signed-integer-overflow
1588 sanitization. CODE says what kind of builtin are we building,
1589 LOC is a location, LHSTYPE is the type of LHS, OP0 and OP1
1590 are operands of the binary operation. */
1592 tree
1593 ubsan_build_overflow_builtin (tree_code code, location_t loc, tree lhstype,
1594 tree op0, tree op1, tree *datap)
1596 if (flag_sanitize_trap & SANITIZE_SI_OVERFLOW)
1597 return build_call_expr_loc (loc, builtin_decl_explicit (BUILT_IN_TRAP), 0);
1599 tree data;
1600 if (datap && *datap)
1601 data = *datap;
1602 else
1603 data = ubsan_create_data ("__ubsan_overflow_data", 1, &loc,
1604 ubsan_type_descriptor (lhstype), NULL_TREE,
1605 NULL_TREE);
1606 if (datap)
1607 *datap = data;
1608 enum built_in_function fn_code;
1610 switch (code)
1612 case PLUS_EXPR:
1613 fn_code = (flag_sanitize_recover & SANITIZE_SI_OVERFLOW)
1614 ? BUILT_IN_UBSAN_HANDLE_ADD_OVERFLOW
1615 : BUILT_IN_UBSAN_HANDLE_ADD_OVERFLOW_ABORT;
1616 break;
1617 case MINUS_EXPR:
1618 fn_code = (flag_sanitize_recover & SANITIZE_SI_OVERFLOW)
1619 ? BUILT_IN_UBSAN_HANDLE_SUB_OVERFLOW
1620 : BUILT_IN_UBSAN_HANDLE_SUB_OVERFLOW_ABORT;
1621 break;
1622 case MULT_EXPR:
1623 fn_code = (flag_sanitize_recover & SANITIZE_SI_OVERFLOW)
1624 ? BUILT_IN_UBSAN_HANDLE_MUL_OVERFLOW
1625 : BUILT_IN_UBSAN_HANDLE_MUL_OVERFLOW_ABORT;
1626 break;
1627 case NEGATE_EXPR:
1628 fn_code = (flag_sanitize_recover & SANITIZE_SI_OVERFLOW)
1629 ? BUILT_IN_UBSAN_HANDLE_NEGATE_OVERFLOW
1630 : BUILT_IN_UBSAN_HANDLE_NEGATE_OVERFLOW_ABORT;
1631 break;
1632 default:
1633 gcc_unreachable ();
1635 tree fn = builtin_decl_explicit (fn_code);
1636 return build_call_expr_loc (loc, fn, 2 + (code != NEGATE_EXPR),
1637 build_fold_addr_expr_loc (loc, data),
1638 ubsan_encode_value (op0, UBSAN_ENCODE_VALUE_RTL),
1640 ? ubsan_encode_value (op1,
1641 UBSAN_ENCODE_VALUE_RTL)
1642 : NULL_TREE);
1645 /* Perform the signed integer instrumentation. GSI is the iterator
1646 pointing at statement we are trying to instrument. */
1648 static void
1649 instrument_si_overflow (gimple_stmt_iterator gsi)
1651 gimple *stmt = gsi_stmt (gsi);
1652 tree_code code = gimple_assign_rhs_code (stmt);
1653 tree lhs = gimple_assign_lhs (stmt);
1654 tree lhstype = TREE_TYPE (lhs);
1655 tree lhsinner = VECTOR_TYPE_P (lhstype) ? TREE_TYPE (lhstype) : lhstype;
1656 tree a, b;
1657 gimple *g;
1659 /* If this is not a signed operation, don't instrument anything here.
1660 Also punt on bit-fields. */
1661 if (!INTEGRAL_TYPE_P (lhsinner)
1662 || TYPE_OVERFLOW_WRAPS (lhsinner)
1663 || (TREE_CODE (lhsinner) != BITINT_TYPE
1664 && maybe_ne (GET_MODE_BITSIZE (TYPE_MODE (lhsinner)),
1665 TYPE_PRECISION (lhsinner))))
1666 return;
1668 switch (code)
1670 case MINUS_EXPR:
1671 case PLUS_EXPR:
1672 case MULT_EXPR:
1673 /* Transform
1674 i = u {+,-,*} 5;
1675 into
1676 i = UBSAN_CHECK_{ADD,SUB,MUL} (u, 5); */
1677 a = gimple_assign_rhs1 (stmt);
1678 b = gimple_assign_rhs2 (stmt);
1679 g = gimple_build_call_internal (code == PLUS_EXPR
1680 ? IFN_UBSAN_CHECK_ADD
1681 : code == MINUS_EXPR
1682 ? IFN_UBSAN_CHECK_SUB
1683 : IFN_UBSAN_CHECK_MUL, 2, a, b);
1684 gimple_call_set_lhs (g, lhs);
1685 gsi_replace (&gsi, g, true);
1686 break;
1687 case NEGATE_EXPR:
1688 /* Represent i = -u;
1690 i = UBSAN_CHECK_SUB (0, u); */
1691 a = build_zero_cst (lhstype);
1692 b = gimple_assign_rhs1 (stmt);
1693 g = gimple_build_call_internal (IFN_UBSAN_CHECK_SUB, 2, a, b);
1694 gimple_call_set_lhs (g, lhs);
1695 gsi_replace (&gsi, g, true);
1696 break;
1697 case ABS_EXPR:
1698 /* Transform i = ABS_EXPR<u>;
1699 into
1700 _N = UBSAN_CHECK_SUB (0, u);
1701 i = ABS_EXPR<_N>; */
1702 a = build_zero_cst (lhstype);
1703 b = gimple_assign_rhs1 (stmt);
1704 g = gimple_build_call_internal (IFN_UBSAN_CHECK_SUB, 2, a, b);
1705 a = make_ssa_name (lhstype);
1706 gimple_call_set_lhs (g, a);
1707 gimple_set_location (g, gimple_location (stmt));
1708 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
1709 gimple_assign_set_rhs1 (stmt, a);
1710 update_stmt (stmt);
1711 break;
1712 default:
1713 break;
1717 /* Instrument loads from (non-bitfield) bool and C++ enum values
1718 to check if the memory value is outside of the range of the valid
1719 type values. */
1721 static void
1722 instrument_bool_enum_load (gimple_stmt_iterator *gsi)
1724 gimple *stmt = gsi_stmt (*gsi);
1725 tree rhs = gimple_assign_rhs1 (stmt);
1726 tree type = TREE_TYPE (rhs);
1727 tree minv = NULL_TREE, maxv = NULL_TREE;
1729 if (TREE_CODE (type) == BOOLEAN_TYPE
1730 && sanitize_flags_p (SANITIZE_BOOL))
1732 minv = boolean_false_node;
1733 maxv = boolean_true_node;
1735 else if (TREE_CODE (type) == ENUMERAL_TYPE
1736 && sanitize_flags_p (SANITIZE_ENUM)
1737 && TREE_TYPE (type) != NULL_TREE
1738 && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
1739 && (TYPE_PRECISION (TREE_TYPE (type))
1740 < GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (type))))
1742 minv = TYPE_MIN_VALUE (TREE_TYPE (type));
1743 maxv = TYPE_MAX_VALUE (TREE_TYPE (type));
1745 else
1746 return;
1748 int modebitsize = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type));
1749 poly_int64 bitsize, bitpos;
1750 tree offset;
1751 machine_mode mode;
1752 int volatilep = 0, reversep, unsignedp = 0;
1753 tree base = get_inner_reference (rhs, &bitsize, &bitpos, &offset, &mode,
1754 &unsignedp, &reversep, &volatilep);
1755 tree utype = build_nonstandard_integer_type (modebitsize, 1);
1757 if ((VAR_P (base) && DECL_HARD_REGISTER (base))
1758 || !multiple_p (bitpos, modebitsize)
1759 || maybe_ne (bitsize, modebitsize)
1760 || GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (utype)) != modebitsize
1761 || TREE_CODE (gimple_assign_lhs (stmt)) != SSA_NAME)
1762 return;
1764 bool ends_bb = stmt_ends_bb_p (stmt);
1765 location_t loc = gimple_location (stmt);
1766 tree lhs = gimple_assign_lhs (stmt);
1767 tree ptype = build_pointer_type (TREE_TYPE (rhs));
1768 tree atype = reference_alias_ptr_type (rhs);
1769 gimple *g = gimple_build_assign (make_ssa_name (ptype),
1770 build_fold_addr_expr (rhs));
1771 gimple_set_location (g, loc);
1772 gsi_insert_before (gsi, g, GSI_SAME_STMT);
1773 tree mem = build2 (MEM_REF, utype, gimple_assign_lhs (g),
1774 build_int_cst (atype, 0));
1775 tree urhs = make_ssa_name (utype);
1776 if (ends_bb)
1778 gimple_assign_set_lhs (stmt, urhs);
1779 g = gimple_build_assign (lhs, NOP_EXPR, urhs);
1780 gimple_set_location (g, loc);
1781 edge e = find_fallthru_edge (gimple_bb (stmt)->succs);
1782 gsi_insert_on_edge_immediate (e, g);
1783 gimple_assign_set_rhs_from_tree (gsi, mem);
1784 update_stmt (stmt);
1785 *gsi = gsi_for_stmt (g);
1786 g = stmt;
1788 else
1790 g = gimple_build_assign (urhs, mem);
1791 gimple_set_location (g, loc);
1792 gsi_insert_before (gsi, g, GSI_SAME_STMT);
1794 minv = fold_convert (utype, minv);
1795 maxv = fold_convert (utype, maxv);
1796 if (!integer_zerop (minv))
1798 g = gimple_build_assign (make_ssa_name (utype), MINUS_EXPR, urhs, minv);
1799 gimple_set_location (g, loc);
1800 gsi_insert_before (gsi, g, GSI_SAME_STMT);
1803 gimple_stmt_iterator gsi2 = *gsi;
1804 basic_block then_bb, fallthru_bb;
1805 *gsi = create_cond_insert_point (gsi, true, false, true,
1806 &then_bb, &fallthru_bb);
1807 g = gimple_build_cond (GT_EXPR, gimple_assign_lhs (g),
1808 int_const_binop (MINUS_EXPR, maxv, minv),
1809 NULL_TREE, NULL_TREE);
1810 gimple_set_location (g, loc);
1811 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1813 if (!ends_bb)
1815 gimple_assign_set_rhs_with_ops (&gsi2, NOP_EXPR, urhs);
1816 update_stmt (stmt);
1819 gsi2 = gsi_after_labels (then_bb);
1820 if (flag_sanitize_trap & (TREE_CODE (type) == BOOLEAN_TYPE
1821 ? SANITIZE_BOOL : SANITIZE_ENUM))
1822 g = gimple_build_call (builtin_decl_explicit (BUILT_IN_TRAP), 0);
1823 else
1825 tree data = ubsan_create_data ("__ubsan_invalid_value_data", 1, &loc,
1826 ubsan_type_descriptor (type), NULL_TREE,
1827 NULL_TREE);
1828 data = build_fold_addr_expr_loc (loc, data);
1829 enum built_in_function bcode
1830 = (flag_sanitize_recover & (TREE_CODE (type) == BOOLEAN_TYPE
1831 ? SANITIZE_BOOL : SANITIZE_ENUM))
1832 ? BUILT_IN_UBSAN_HANDLE_LOAD_INVALID_VALUE
1833 : BUILT_IN_UBSAN_HANDLE_LOAD_INVALID_VALUE_ABORT;
1834 tree fn = builtin_decl_explicit (bcode);
1836 tree val = ubsan_encode_value (urhs, UBSAN_ENCODE_VALUE_GIMPLE);
1837 val = force_gimple_operand_gsi (&gsi2, val, true, NULL_TREE, true,
1838 GSI_SAME_STMT);
1839 g = gimple_build_call (fn, 2, data, val);
1841 gimple_set_location (g, loc);
1842 gsi_insert_before (&gsi2, g, GSI_SAME_STMT);
1843 ubsan_create_edge (g);
1844 *gsi = gsi_for_stmt (stmt);
1847 /* Determine if we can propagate given LOCATION to ubsan_data descriptor to use
1848 new style handlers. Libubsan uses heuristics to destinguish between old and
1849 new styles and relies on these properties for filename:
1851 a) Location's filename must not be NULL.
1852 b) Location's filename must not be equal to "".
1853 c) Location's filename must not be equal to "\1".
1854 d) First two bytes of filename must not contain '\xff' symbol. */
1856 static bool
1857 ubsan_use_new_style_p (location_t loc)
1859 if (loc == UNKNOWN_LOCATION)
1860 return false;
1862 expanded_location xloc = expand_location (loc);
1863 if (xloc.file == NULL || startswith (xloc.file, "\1")
1864 || xloc.file[0] == '\0' || xloc.file[0] == '\xff'
1865 || xloc.file[1] == '\xff')
1866 return false;
1868 return true;
1871 /* Instrument float point-to-integer conversion. TYPE is an integer type of
1872 destination, EXPR is floating-point expression. */
1874 tree
1875 ubsan_instrument_float_cast (location_t loc, tree type, tree expr)
1877 tree expr_type = TREE_TYPE (expr);
1878 tree t, tt, fn, min, max;
1879 machine_mode mode = TYPE_MODE (expr_type);
1880 int prec = TYPE_PRECISION (type);
1881 bool uns_p = TYPE_UNSIGNED (type);
1882 if (loc == UNKNOWN_LOCATION)
1883 loc = input_location;
1885 /* Float to integer conversion first truncates toward zero, so
1886 even signed char c = 127.875f; is not problematic.
1887 Therefore, we should complain only if EXPR is unordered or smaller
1888 or equal than TYPE_MIN_VALUE - 1.0 or greater or equal than
1889 TYPE_MAX_VALUE + 1.0. */
1890 if (REAL_MODE_FORMAT (mode)->b == 2)
1892 /* For maximum, TYPE_MAX_VALUE might not be representable
1893 in EXPR_TYPE, e.g. if TYPE is 64-bit long long and
1894 EXPR_TYPE is IEEE single float, but TYPE_MAX_VALUE + 1.0 is
1895 either representable or infinity. */
1896 REAL_VALUE_TYPE maxval = dconst1;
1897 SET_REAL_EXP (&maxval, REAL_EXP (&maxval) + prec - !uns_p);
1898 real_convert (&maxval, mode, &maxval);
1899 max = build_real (expr_type, maxval);
1901 /* For unsigned, assume -1.0 is always representable. */
1902 if (uns_p)
1903 min = build_minus_one_cst (expr_type);
1904 else
1906 /* TYPE_MIN_VALUE is generally representable (or -inf),
1907 but TYPE_MIN_VALUE - 1.0 might not be. */
1908 REAL_VALUE_TYPE minval = dconstm1, minval2;
1909 SET_REAL_EXP (&minval, REAL_EXP (&minval) + prec - 1);
1910 real_convert (&minval, mode, &minval);
1911 real_arithmetic (&minval2, MINUS_EXPR, &minval, &dconst1);
1912 real_convert (&minval2, mode, &minval2);
1913 if (real_compare (EQ_EXPR, &minval, &minval2)
1914 && !real_isinf (&minval))
1916 /* If TYPE_MIN_VALUE - 1.0 is not representable and
1917 rounds to TYPE_MIN_VALUE, we need to subtract
1918 more. As REAL_MODE_FORMAT (mode)->p is the number
1919 of base digits, we want to subtract a number that
1920 will be 1 << (REAL_MODE_FORMAT (mode)->p - 1)
1921 times smaller than minval. */
1922 minval2 = dconst1;
1923 gcc_assert (prec > REAL_MODE_FORMAT (mode)->p);
1924 SET_REAL_EXP (&minval2,
1925 REAL_EXP (&minval2) + prec - 1
1926 - REAL_MODE_FORMAT (mode)->p + 1);
1927 real_arithmetic (&minval2, MINUS_EXPR, &minval, &minval2);
1928 real_convert (&minval2, mode, &minval2);
1930 min = build_real (expr_type, minval2);
1933 else if (REAL_MODE_FORMAT (mode)->b == 10)
1935 /* For _Decimal128 up to 34 decimal digits, - sign,
1936 dot, e, exponent. */
1937 char buf[64];
1938 int p = REAL_MODE_FORMAT (mode)->p;
1939 REAL_VALUE_TYPE maxval, minval;
1941 /* Use mpfr_snprintf rounding to compute the smallest
1942 representable decimal number greater or equal than
1943 1 << (prec - !uns_p). */
1944 auto_mpfr m (prec + 2);
1945 mpfr_set_ui_2exp (m, 1, prec - !uns_p, MPFR_RNDN);
1946 mpfr_snprintf (buf, sizeof buf, "%.*RUe", p - 1, (mpfr_srcptr) m);
1947 decimal_real_from_string (&maxval, buf);
1948 max = build_real (expr_type, maxval);
1950 /* For unsigned, assume -1.0 is always representable. */
1951 if (uns_p)
1952 min = build_minus_one_cst (expr_type);
1953 else
1955 /* Use mpfr_snprintf rounding to compute the largest
1956 representable decimal number less or equal than
1957 (-1 << (prec - 1)) - 1. */
1958 mpfr_set_si_2exp (m, -1, prec - 1, MPFR_RNDN);
1959 mpfr_sub_ui (m, m, 1, MPFR_RNDN);
1960 mpfr_snprintf (buf, sizeof buf, "%.*RDe", p - 1, (mpfr_srcptr) m);
1961 decimal_real_from_string (&minval, buf);
1962 min = build_real (expr_type, minval);
1965 else
1966 return NULL_TREE;
1968 if (HONOR_NANS (mode))
1970 t = fold_build2 (UNLE_EXPR, boolean_type_node, expr, min);
1971 tt = fold_build2 (UNGE_EXPR, boolean_type_node, expr, max);
1973 else
1975 t = fold_build2 (LE_EXPR, boolean_type_node, expr, min);
1976 tt = fold_build2 (GE_EXPR, boolean_type_node, expr, max);
1978 t = fold_build2 (TRUTH_OR_EXPR, boolean_type_node, t, tt);
1979 if (integer_zerop (t))
1980 return NULL_TREE;
1982 if (flag_sanitize_trap & SANITIZE_FLOAT_CAST)
1983 fn = build_call_expr_loc (loc, builtin_decl_explicit (BUILT_IN_TRAP), 0);
1984 else
1986 location_t *loc_ptr = NULL;
1987 unsigned num_locations = 0;
1988 /* Figure out if we can propagate location to ubsan_data and use new
1989 style handlers in libubsan. */
1990 if (ubsan_use_new_style_p (loc))
1992 loc_ptr = &loc;
1993 num_locations = 1;
1995 /* Create the __ubsan_handle_float_cast_overflow fn call. */
1996 tree data = ubsan_create_data ("__ubsan_float_cast_overflow_data",
1997 num_locations, loc_ptr,
1998 ubsan_type_descriptor (expr_type),
1999 ubsan_type_descriptor (type), NULL_TREE,
2000 NULL_TREE);
2001 enum built_in_function bcode
2002 = (flag_sanitize_recover & SANITIZE_FLOAT_CAST)
2003 ? BUILT_IN_UBSAN_HANDLE_FLOAT_CAST_OVERFLOW
2004 : BUILT_IN_UBSAN_HANDLE_FLOAT_CAST_OVERFLOW_ABORT;
2005 fn = builtin_decl_explicit (bcode);
2006 fn = build_call_expr_loc (loc, fn, 2,
2007 build_fold_addr_expr_loc (loc, data),
2008 ubsan_encode_value (expr));
2011 return fold_build3 (COND_EXPR, void_type_node, t, fn, integer_zero_node);
2014 /* Instrument values passed to function arguments with nonnull attribute. */
2016 static void
2017 instrument_nonnull_arg (gimple_stmt_iterator *gsi)
2019 gimple *stmt = gsi_stmt (*gsi);
2020 location_t loc[2];
2021 /* infer_nonnull_range needs flag_delete_null_pointer_checks set,
2022 while for nonnull sanitization it is clear. */
2023 int save_flag_delete_null_pointer_checks = flag_delete_null_pointer_checks;
2024 flag_delete_null_pointer_checks = 1;
2025 loc[0] = gimple_location (stmt);
2026 loc[1] = UNKNOWN_LOCATION;
2027 for (unsigned int i = 0; i < gimple_call_num_args (stmt); i++)
2029 tree arg = gimple_call_arg (stmt, i);
2030 if (POINTER_TYPE_P (TREE_TYPE (arg))
2031 && infer_nonnull_range_by_attribute (stmt, arg))
2033 gimple *g;
2034 if (!is_gimple_val (arg))
2036 g = gimple_build_assign (make_ssa_name (TREE_TYPE (arg)), arg);
2037 gimple_set_location (g, loc[0]);
2038 gsi_insert_before (gsi, g, GSI_SAME_STMT);
2039 arg = gimple_assign_lhs (g);
2042 basic_block then_bb, fallthru_bb;
2043 *gsi = create_cond_insert_point (gsi, true, false, true,
2044 &then_bb, &fallthru_bb);
2045 g = gimple_build_cond (EQ_EXPR, arg,
2046 build_zero_cst (TREE_TYPE (arg)),
2047 NULL_TREE, NULL_TREE);
2048 gimple_set_location (g, loc[0]);
2049 gsi_insert_after (gsi, g, GSI_NEW_STMT);
2051 *gsi = gsi_after_labels (then_bb);
2052 if (flag_sanitize_trap & SANITIZE_NONNULL_ATTRIBUTE)
2053 g = gimple_build_call (builtin_decl_explicit (BUILT_IN_TRAP), 0);
2054 else
2056 tree data = ubsan_create_data ("__ubsan_nonnull_arg_data",
2057 2, loc, NULL_TREE,
2058 build_int_cst (integer_type_node,
2059 i + 1),
2060 NULL_TREE);
2061 data = build_fold_addr_expr_loc (loc[0], data);
2062 enum built_in_function bcode
2063 = (flag_sanitize_recover & SANITIZE_NONNULL_ATTRIBUTE)
2064 ? BUILT_IN_UBSAN_HANDLE_NONNULL_ARG
2065 : BUILT_IN_UBSAN_HANDLE_NONNULL_ARG_ABORT;
2066 tree fn = builtin_decl_explicit (bcode);
2068 g = gimple_build_call (fn, 1, data);
2070 gimple_set_location (g, loc[0]);
2071 gsi_insert_before (gsi, g, GSI_SAME_STMT);
2072 ubsan_create_edge (g);
2074 *gsi = gsi_for_stmt (stmt);
2076 flag_delete_null_pointer_checks = save_flag_delete_null_pointer_checks;
2079 /* Instrument returns in functions with returns_nonnull attribute. */
2081 static void
2082 instrument_nonnull_return (gimple_stmt_iterator *gsi)
2084 greturn *stmt = as_a <greturn *> (gsi_stmt (*gsi));
2085 location_t loc[2];
2086 tree arg = gimple_return_retval (stmt);
2087 /* infer_nonnull_range needs flag_delete_null_pointer_checks set,
2088 while for nonnull return sanitization it is clear. */
2089 int save_flag_delete_null_pointer_checks = flag_delete_null_pointer_checks;
2090 flag_delete_null_pointer_checks = 1;
2091 loc[0] = gimple_location (stmt);
2092 loc[1] = UNKNOWN_LOCATION;
2093 if (arg
2094 && POINTER_TYPE_P (TREE_TYPE (arg))
2095 && is_gimple_val (arg)
2096 && infer_nonnull_range_by_attribute (stmt, arg))
2098 basic_block then_bb, fallthru_bb;
2099 *gsi = create_cond_insert_point (gsi, true, false, true,
2100 &then_bb, &fallthru_bb);
2101 gimple *g = gimple_build_cond (EQ_EXPR, arg,
2102 build_zero_cst (TREE_TYPE (arg)),
2103 NULL_TREE, NULL_TREE);
2104 gimple_set_location (g, loc[0]);
2105 gsi_insert_after (gsi, g, GSI_NEW_STMT);
2107 *gsi = gsi_after_labels (then_bb);
2108 if (flag_sanitize_trap & SANITIZE_RETURNS_NONNULL_ATTRIBUTE)
2109 g = gimple_build_call (builtin_decl_explicit (BUILT_IN_TRAP), 0);
2110 else
2112 tree data = ubsan_create_data ("__ubsan_nonnull_return_data",
2113 1, &loc[1], NULL_TREE, NULL_TREE);
2114 data = build_fold_addr_expr_loc (loc[0], data);
2115 tree data2 = ubsan_create_data ("__ubsan_nonnull_return_data",
2116 1, &loc[0], NULL_TREE, NULL_TREE);
2117 data2 = build_fold_addr_expr_loc (loc[0], data2);
2118 enum built_in_function bcode
2119 = (flag_sanitize_recover & SANITIZE_RETURNS_NONNULL_ATTRIBUTE)
2120 ? BUILT_IN_UBSAN_HANDLE_NONNULL_RETURN_V1
2121 : BUILT_IN_UBSAN_HANDLE_NONNULL_RETURN_V1_ABORT;
2122 tree fn = builtin_decl_explicit (bcode);
2124 g = gimple_build_call (fn, 2, data, data2);
2126 gimple_set_location (g, loc[0]);
2127 gsi_insert_before (gsi, g, GSI_SAME_STMT);
2128 ubsan_create_edge (g);
2129 *gsi = gsi_for_stmt (stmt);
2131 flag_delete_null_pointer_checks = save_flag_delete_null_pointer_checks;
2134 /* Instrument memory references. Here we check whether the pointer
2135 points to an out-of-bounds location. */
2137 static void
2138 instrument_object_size (gimple_stmt_iterator *gsi, tree t, bool is_lhs)
2140 gimple *stmt = gsi_stmt (*gsi);
2141 location_t loc = gimple_location (stmt);
2142 tree type;
2143 tree index = NULL_TREE;
2144 HOST_WIDE_INT size_in_bytes;
2146 type = TREE_TYPE (t);
2147 if (VOID_TYPE_P (type))
2148 return;
2150 switch (TREE_CODE (t))
2152 case COMPONENT_REF:
2153 if (TREE_CODE (t) == COMPONENT_REF
2154 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)) != NULL_TREE)
2156 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1));
2157 t = build3 (COMPONENT_REF, TREE_TYPE (repr), TREE_OPERAND (t, 0),
2158 repr, TREE_OPERAND (t, 2));
2160 break;
2161 case ARRAY_REF:
2162 index = TREE_OPERAND (t, 1);
2163 break;
2164 case INDIRECT_REF:
2165 case MEM_REF:
2166 case VAR_DECL:
2167 case PARM_DECL:
2168 case RESULT_DECL:
2169 break;
2170 default:
2171 return;
2174 size_in_bytes = int_size_in_bytes (type);
2175 if (size_in_bytes <= 0)
2176 return;
2178 poly_int64 bitsize, bitpos;
2179 tree offset;
2180 machine_mode mode;
2181 int volatilep = 0, reversep, unsignedp = 0;
2182 tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset, &mode,
2183 &unsignedp, &reversep, &volatilep);
2185 if (!multiple_p (bitpos, BITS_PER_UNIT)
2186 || maybe_ne (bitsize, size_in_bytes * BITS_PER_UNIT))
2187 return;
2189 bool decl_p = DECL_P (inner);
2190 tree base;
2191 if (decl_p)
2193 if ((VAR_P (inner)
2194 || TREE_CODE (inner) == PARM_DECL
2195 || TREE_CODE (inner) == RESULT_DECL)
2196 && DECL_REGISTER (inner))
2197 return;
2198 if (t == inner && !is_global_var (t))
2199 return;
2200 base = inner;
2202 else if (TREE_CODE (inner) == MEM_REF)
2203 base = TREE_OPERAND (inner, 0);
2204 else
2205 return;
2206 tree ptr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (t)), t);
2208 while (TREE_CODE (base) == SSA_NAME)
2210 gimple *def_stmt = SSA_NAME_DEF_STMT (base);
2211 if (gimple_assign_ssa_name_copy_p (def_stmt)
2212 || (gimple_assign_cast_p (def_stmt)
2213 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (def_stmt))))
2214 || (is_gimple_assign (def_stmt)
2215 && gimple_assign_rhs_code (def_stmt) == POINTER_PLUS_EXPR))
2217 tree rhs1 = gimple_assign_rhs1 (def_stmt);
2218 if (TREE_CODE (rhs1) == SSA_NAME
2219 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1))
2220 break;
2221 else
2222 base = rhs1;
2224 else
2225 break;
2228 if (!POINTER_TYPE_P (TREE_TYPE (base)) && !DECL_P (base))
2229 return;
2231 tree sizet;
2232 tree base_addr = base;
2233 gimple *bos_stmt = NULL;
2234 if (decl_p)
2235 base_addr = build1 (ADDR_EXPR,
2236 build_pointer_type (TREE_TYPE (base)), base);
2237 if (compute_builtin_object_size (base_addr, OST_DYNAMIC, &sizet))
2239 else if (optimize)
2241 if (LOCATION_LOCUS (loc) == UNKNOWN_LOCATION)
2242 loc = input_location;
2243 /* Generate __builtin_dynamic_object_size call. */
2244 sizet = builtin_decl_explicit (BUILT_IN_DYNAMIC_OBJECT_SIZE);
2245 sizet = build_call_expr_loc (loc, sizet, 2, base_addr,
2246 integer_zero_node);
2247 sizet = force_gimple_operand_gsi (gsi, sizet, false, NULL_TREE, true,
2248 GSI_SAME_STMT);
2249 /* If the call above didn't end up being an integer constant, go one
2250 statement back and get the __builtin_object_size stmt. Save it,
2251 we might need it later. */
2252 if (SSA_VAR_P (sizet))
2254 gsi_prev (gsi);
2255 bos_stmt = gsi_stmt (*gsi);
2257 /* Move on to where we were. */
2258 gsi_next (gsi);
2261 else
2262 return;
2264 /* Generate UBSAN_OBJECT_SIZE (ptr, ptr+sizeof(*ptr)-base, objsize, ckind)
2265 call. */
2266 /* ptr + sizeof (*ptr) - base */
2267 t = fold_build2 (MINUS_EXPR, sizetype,
2268 fold_convert (pointer_sized_int_node, ptr),
2269 fold_convert (pointer_sized_int_node, base_addr));
2270 t = fold_build2 (PLUS_EXPR, sizetype, t, TYPE_SIZE_UNIT (type));
2272 /* Perhaps we can omit the check. */
2273 if (TREE_CODE (t) == INTEGER_CST
2274 && TREE_CODE (sizet) == INTEGER_CST
2275 && tree_int_cst_le (t, sizet))
2276 return;
2278 if (index != NULL_TREE
2279 && TREE_CODE (index) == SSA_NAME
2280 && TREE_CODE (sizet) == INTEGER_CST)
2282 gimple *def = SSA_NAME_DEF_STMT (index);
2283 if (is_gimple_assign (def)
2284 && gimple_assign_rhs_code (def) == BIT_AND_EXPR
2285 && TREE_CODE (gimple_assign_rhs2 (def)) == INTEGER_CST)
2287 tree cst = gimple_assign_rhs2 (def);
2288 tree sz = fold_build2 (EXACT_DIV_EXPR, sizetype, sizet,
2289 TYPE_SIZE_UNIT (type));
2290 if (tree_int_cst_sgn (cst) >= 0
2291 && tree_int_cst_lt (cst, sz))
2292 return;
2296 if (DECL_P (base)
2297 && decl_function_context (base) == current_function_decl
2298 && !TREE_ADDRESSABLE (base))
2299 mark_addressable (base);
2301 if (bos_stmt
2302 && gimple_call_builtin_p (bos_stmt, BUILT_IN_DYNAMIC_OBJECT_SIZE))
2303 ubsan_create_edge (bos_stmt);
2305 /* We have to emit the check. */
2306 t = force_gimple_operand_gsi (gsi, t, true, NULL_TREE, true,
2307 GSI_SAME_STMT);
2308 ptr = force_gimple_operand_gsi (gsi, ptr, true, NULL_TREE, true,
2309 GSI_SAME_STMT);
2310 tree ckind = build_int_cst (unsigned_char_type_node,
2311 is_lhs ? UBSAN_STORE_OF : UBSAN_LOAD_OF);
2312 gimple *g = gimple_build_call_internal (IFN_UBSAN_OBJECT_SIZE, 4,
2313 ptr, t, sizet, ckind);
2314 gimple_set_location (g, loc);
2315 gsi_insert_before (gsi, g, GSI_SAME_STMT);
2318 /* Instrument values passed to builtin functions. */
2320 static void
2321 instrument_builtin (gimple_stmt_iterator *gsi)
2323 gimple *stmt = gsi_stmt (*gsi);
2324 location_t loc = gimple_location (stmt);
2325 tree arg;
2326 enum built_in_function fcode
2327 = DECL_FUNCTION_CODE (gimple_call_fndecl (stmt));
2328 int kind = 0;
2329 switch (fcode)
2331 CASE_INT_FN (BUILT_IN_CLZ):
2332 kind = 1;
2333 gcc_fallthrough ();
2334 CASE_INT_FN (BUILT_IN_CTZ):
2335 arg = gimple_call_arg (stmt, 0);
2336 if (!integer_nonzerop (arg))
2338 gimple *g;
2339 if (!is_gimple_val (arg))
2341 g = gimple_build_assign (make_ssa_name (TREE_TYPE (arg)), arg);
2342 gimple_set_location (g, loc);
2343 gsi_insert_before (gsi, g, GSI_SAME_STMT);
2344 arg = gimple_assign_lhs (g);
2347 basic_block then_bb, fallthru_bb;
2348 *gsi = create_cond_insert_point (gsi, true, false, true,
2349 &then_bb, &fallthru_bb);
2350 g = gimple_build_cond (EQ_EXPR, arg,
2351 build_zero_cst (TREE_TYPE (arg)),
2352 NULL_TREE, NULL_TREE);
2353 gimple_set_location (g, loc);
2354 gsi_insert_after (gsi, g, GSI_NEW_STMT);
2356 *gsi = gsi_after_labels (then_bb);
2357 if (flag_sanitize_trap & SANITIZE_BUILTIN)
2358 g = gimple_build_call (builtin_decl_explicit (BUILT_IN_TRAP), 0);
2359 else
2361 tree t = build_int_cst (unsigned_char_type_node, kind);
2362 tree data = ubsan_create_data ("__ubsan_builtin_data",
2363 1, &loc, NULL_TREE, t, NULL_TREE);
2364 data = build_fold_addr_expr_loc (loc, data);
2365 enum built_in_function bcode
2366 = (flag_sanitize_recover & SANITIZE_BUILTIN)
2367 ? BUILT_IN_UBSAN_HANDLE_INVALID_BUILTIN
2368 : BUILT_IN_UBSAN_HANDLE_INVALID_BUILTIN_ABORT;
2369 tree fn = builtin_decl_explicit (bcode);
2371 g = gimple_build_call (fn, 1, data);
2373 gimple_set_location (g, loc);
2374 gsi_insert_before (gsi, g, GSI_SAME_STMT);
2375 ubsan_create_edge (g);
2377 *gsi = gsi_for_stmt (stmt);
2378 break;
2379 default:
2380 break;
2384 namespace {
2386 const pass_data pass_data_ubsan =
2388 GIMPLE_PASS, /* type */
2389 "ubsan", /* name */
2390 OPTGROUP_NONE, /* optinfo_flags */
2391 TV_TREE_UBSAN, /* tv_id */
2392 ( PROP_cfg | PROP_ssa ), /* properties_required */
2393 0, /* properties_provided */
2394 0, /* properties_destroyed */
2395 0, /* todo_flags_start */
2396 TODO_update_ssa, /* todo_flags_finish */
2399 class pass_ubsan : public gimple_opt_pass
2401 public:
2402 pass_ubsan (gcc::context *ctxt)
2403 : gimple_opt_pass (pass_data_ubsan, ctxt)
2406 /* opt_pass methods: */
2407 bool gate (function *) final override
2409 return sanitize_flags_p ((SANITIZE_NULL | SANITIZE_SI_OVERFLOW
2410 | SANITIZE_BOOL | SANITIZE_ENUM
2411 | SANITIZE_ALIGNMENT
2412 | SANITIZE_NONNULL_ATTRIBUTE
2413 | SANITIZE_RETURNS_NONNULL_ATTRIBUTE
2414 | SANITIZE_OBJECT_SIZE
2415 | SANITIZE_POINTER_OVERFLOW
2416 | SANITIZE_BUILTIN));
2419 unsigned int execute (function *) final override;
2421 }; // class pass_ubsan
2423 unsigned int
2424 pass_ubsan::execute (function *fun)
2426 basic_block bb;
2427 gimple_stmt_iterator gsi;
2428 unsigned int ret = 0;
2430 initialize_sanitizer_builtins ();
2432 FOR_EACH_BB_FN (bb, fun)
2434 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
2436 gimple *stmt = gsi_stmt (gsi);
2437 if (is_gimple_debug (stmt) || gimple_clobber_p (stmt))
2439 gsi_next (&gsi);
2440 continue;
2443 if ((sanitize_flags_p (SANITIZE_SI_OVERFLOW, fun->decl))
2444 && is_gimple_assign (stmt))
2445 instrument_si_overflow (gsi);
2447 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT, fun->decl))
2449 if (gimple_store_p (stmt))
2450 instrument_null (gsi, gimple_get_lhs (stmt), true);
2451 if (gimple_assign_single_p (stmt))
2452 instrument_null (gsi, gimple_assign_rhs1 (stmt), false);
2453 if (is_gimple_call (stmt))
2455 unsigned args_num = gimple_call_num_args (stmt);
2456 for (unsigned i = 0; i < args_num; ++i)
2458 tree arg = gimple_call_arg (stmt, i);
2459 if (is_gimple_reg (arg) || is_gimple_min_invariant (arg))
2460 continue;
2461 instrument_null (gsi, arg, false);
2466 if (sanitize_flags_p (SANITIZE_BOOL | SANITIZE_ENUM, fun->decl)
2467 && gimple_assign_load_p (stmt))
2469 instrument_bool_enum_load (&gsi);
2470 bb = gimple_bb (stmt);
2473 if (sanitize_flags_p (SANITIZE_NONNULL_ATTRIBUTE, fun->decl)
2474 && is_gimple_call (stmt)
2475 && !gimple_call_internal_p (stmt))
2477 instrument_nonnull_arg (&gsi);
2478 bb = gimple_bb (stmt);
2481 if (sanitize_flags_p (SANITIZE_BUILTIN, fun->decl)
2482 && gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
2484 instrument_builtin (&gsi);
2485 bb = gimple_bb (stmt);
2488 if (sanitize_flags_p (SANITIZE_RETURNS_NONNULL_ATTRIBUTE, fun->decl)
2489 && gimple_code (stmt) == GIMPLE_RETURN)
2491 instrument_nonnull_return (&gsi);
2492 bb = gimple_bb (stmt);
2495 if (sanitize_flags_p (SANITIZE_OBJECT_SIZE, fun->decl))
2497 if (gimple_store_p (stmt))
2498 instrument_object_size (&gsi, gimple_get_lhs (stmt), true);
2499 if (gimple_assign_load_p (stmt))
2500 instrument_object_size (&gsi, gimple_assign_rhs1 (stmt),
2501 false);
2502 if (is_gimple_call (stmt))
2504 unsigned args_num = gimple_call_num_args (stmt);
2505 for (unsigned i = 0; i < args_num; ++i)
2507 tree arg = gimple_call_arg (stmt, i);
2508 if (is_gimple_reg (arg) || is_gimple_min_invariant (arg))
2509 continue;
2510 instrument_object_size (&gsi, arg, false);
2515 if (sanitize_flags_p (SANITIZE_POINTER_OVERFLOW, fun->decl))
2517 if (is_gimple_assign (stmt)
2518 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2519 instrument_pointer_overflow (&gsi,
2520 gimple_assign_rhs1 (stmt),
2521 gimple_assign_rhs2 (stmt));
2522 if (gimple_store_p (stmt))
2523 maybe_instrument_pointer_overflow (&gsi,
2524 gimple_get_lhs (stmt));
2525 if (gimple_assign_single_p (stmt))
2526 maybe_instrument_pointer_overflow (&gsi,
2527 gimple_assign_rhs1 (stmt));
2528 if (is_gimple_call (stmt))
2530 unsigned args_num = gimple_call_num_args (stmt);
2531 for (unsigned i = 0; i < args_num; ++i)
2533 tree arg = gimple_call_arg (stmt, i);
2534 if (is_gimple_reg (arg))
2535 continue;
2536 maybe_instrument_pointer_overflow (&gsi, arg);
2541 gsi_next (&gsi);
2543 if (gimple_purge_dead_eh_edges (bb))
2544 ret = TODO_cleanup_cfg;
2546 return ret;
2549 } // anon namespace
2551 gimple_opt_pass *
2552 make_pass_ubsan (gcc::context *ctxt)
2554 return new pass_ubsan (ctxt);
2557 #include "gt-ubsan.h"