arm: cleanup legacy ARM_PE code
[official-gcc.git] / gcc / ubsan.cc
blob76ca7a04265baa57e5ee1e15ffb293018ec7251c
1 /* UndefinedBehaviorSanitizer, undefined behavior detector.
2 Copyright (C) 2013-2024 Free Software Foundation, Inc.
3 Contributed by Marek Polacek <polacek@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "c-family/c-common.h"
27 #include "gimple.h"
28 #include "cfghooks.h"
29 #include "tree-pass.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "ssa.h"
33 #include "cgraph.h"
34 #include "tree-pretty-print.h"
35 #include "stor-layout.h"
36 #include "cfganal.h"
37 #include "gimple-iterator.h"
38 #include "output.h"
39 #include "cfgloop.h"
40 #include "ubsan.h"
41 #include "expr.h"
42 #include "stringpool.h"
43 #include "attribs.h"
44 #include "asan.h"
45 #include "gimplify-me.h"
46 #include "dfp.h"
47 #include "builtins.h"
48 #include "tree-object-size.h"
49 #include "tree-cfg.h"
50 #include "gimple-fold.h"
51 #include "varasm.h"
52 #include "realmpfr.h"
53 #include "target.h"
54 #include "langhooks.h"
56 /* Map from a tree to a VAR_DECL tree. */
58 struct GTY((for_user)) tree_type_map {
59 struct tree_map_base type;
60 tree decl;
63 struct tree_type_map_cache_hasher : ggc_cache_ptr_hash<tree_type_map>
65 static inline hashval_t
66 hash (tree_type_map *t)
68 return TYPE_UID (t->type.from);
71 static inline bool
72 equal (tree_type_map *a, tree_type_map *b)
74 return a->type.from == b->type.from;
77 static int
78 keep_cache_entry (tree_type_map *&m)
80 return ggc_marked_p (m->type.from);
84 static GTY ((cache))
85 hash_table<tree_type_map_cache_hasher> *decl_tree_for_type;
87 /* Lookup a VAR_DECL for TYPE, and return it if we find one. */
89 static tree
90 decl_for_type_lookup (tree type)
92 /* If the hash table is not initialized yet, create it now. */
93 if (decl_tree_for_type == NULL)
95 decl_tree_for_type
96 = hash_table<tree_type_map_cache_hasher>::create_ggc (10);
97 /* That also means we don't have to bother with the lookup. */
98 return NULL_TREE;
101 struct tree_type_map *h, in;
102 in.type.from = type;
104 h = decl_tree_for_type->find_with_hash (&in, TYPE_UID (type));
105 return h ? h->decl : NULL_TREE;
108 /* Insert a mapping TYPE->DECL in the VAR_DECL for type hashtable. */
110 static void
111 decl_for_type_insert (tree type, tree decl)
113 struct tree_type_map *h;
115 h = ggc_alloc<tree_type_map> ();
116 h->type.from = type;
117 h->decl = decl;
118 *decl_tree_for_type->find_slot_with_hash (h, TYPE_UID (type), INSERT) = h;
121 /* Helper routine, which encodes a value in the pointer_sized_int_node.
122 Arguments with precision <= POINTER_SIZE are passed directly,
123 the rest is passed by reference. T is a value we are to encode.
124 PHASE determines when this function is called. */
126 tree
127 ubsan_encode_value (tree t, enum ubsan_encode_value_phase phase)
129 tree type = TREE_TYPE (t);
130 if (TREE_CODE (type) == BITINT_TYPE)
132 if (TYPE_PRECISION (type) <= POINTER_SIZE)
134 type = pointer_sized_int_node;
135 t = fold_build1 (NOP_EXPR, type, t);
137 else
139 if (TYPE_PRECISION (type) > MAX_FIXED_MODE_SIZE)
140 return build_zero_cst (pointer_sized_int_node);
141 type = build_nonstandard_integer_type (MAX_FIXED_MODE_SIZE,
142 TYPE_UNSIGNED (type));
143 t = fold_build1 (NOP_EXPR, type, t);
146 scalar_mode mode = SCALAR_TYPE_MODE (type);
147 const unsigned int bitsize = GET_MODE_BITSIZE (mode);
148 if (bitsize <= POINTER_SIZE)
149 switch (TREE_CODE (type))
151 case BOOLEAN_TYPE:
152 case ENUMERAL_TYPE:
153 case INTEGER_TYPE:
154 return fold_build1 (NOP_EXPR, pointer_sized_int_node, t);
155 case REAL_TYPE:
157 tree itype = build_nonstandard_integer_type (bitsize, true);
158 t = fold_build1 (VIEW_CONVERT_EXPR, itype, t);
159 return fold_convert (pointer_sized_int_node, t);
161 default:
162 gcc_unreachable ();
164 else
166 if (!DECL_P (t) || !TREE_ADDRESSABLE (t))
168 /* The reason for this is that we don't want to pessimize
169 code by making vars unnecessarily addressable. */
170 tree var;
171 if (phase != UBSAN_ENCODE_VALUE_GENERIC)
173 var = create_tmp_var (type);
174 mark_addressable (var);
176 else
178 var = create_tmp_var_raw (type);
179 TREE_ADDRESSABLE (var) = 1;
180 DECL_CONTEXT (var) = current_function_decl;
182 if (phase == UBSAN_ENCODE_VALUE_RTL)
184 rtx mem = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
185 type);
186 SET_DECL_RTL (var, mem);
187 expand_assignment (var, t, false);
188 return build_fold_addr_expr (var);
190 if (phase != UBSAN_ENCODE_VALUE_GENERIC)
192 tree tem = build2 (MODIFY_EXPR, void_type_node, var, t);
193 t = build_fold_addr_expr (var);
194 return build2 (COMPOUND_EXPR, TREE_TYPE (t), tem, t);
196 else
198 var = build4 (TARGET_EXPR, type, var, t, NULL_TREE, NULL_TREE);
199 return build_fold_addr_expr (var);
202 else
203 return build_fold_addr_expr (t);
207 /* Cached ubsan_get_type_descriptor_type () return value. */
208 static GTY(()) tree ubsan_type_descriptor_type;
210 /* Build
211 struct __ubsan_type_descriptor
213 unsigned short __typekind;
214 unsigned short __typeinfo;
215 char __typename[];
217 type. */
219 static tree
220 ubsan_get_type_descriptor_type (void)
222 static const char *field_names[3]
223 = { "__typekind", "__typeinfo", "__typename" };
224 tree fields[3], ret;
226 if (ubsan_type_descriptor_type)
227 return ubsan_type_descriptor_type;
229 tree itype = build_range_type (sizetype, size_zero_node, NULL_TREE);
230 tree flex_arr_type = build_array_type (char_type_node, itype);
232 ret = make_node (RECORD_TYPE);
233 for (int i = 0; i < 3; i++)
235 fields[i] = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
236 get_identifier (field_names[i]),
237 (i == 2) ? flex_arr_type
238 : short_unsigned_type_node);
239 DECL_CONTEXT (fields[i]) = ret;
240 if (i)
241 DECL_CHAIN (fields[i - 1]) = fields[i];
243 tree type_decl = build_decl (input_location, TYPE_DECL,
244 get_identifier ("__ubsan_type_descriptor"),
245 ret);
246 DECL_IGNORED_P (type_decl) = 1;
247 DECL_ARTIFICIAL (type_decl) = 1;
248 TYPE_FIELDS (ret) = fields[0];
249 TYPE_NAME (ret) = type_decl;
250 TYPE_STUB_DECL (ret) = type_decl;
251 TYPE_ARTIFICIAL (ret) = 1;
252 layout_type (ret);
253 ubsan_type_descriptor_type = ret;
254 return ret;
257 /* Cached ubsan_get_source_location_type () return value. */
258 static GTY(()) tree ubsan_source_location_type;
260 /* Build
261 struct __ubsan_source_location
263 const char *__filename;
264 unsigned int __line;
265 unsigned int __column;
267 type. */
269 tree
270 ubsan_get_source_location_type (void)
272 static const char *field_names[3]
273 = { "__filename", "__line", "__column" };
274 tree fields[3], ret;
275 if (ubsan_source_location_type)
276 return ubsan_source_location_type;
278 tree const_char_type = build_qualified_type (char_type_node,
279 TYPE_QUAL_CONST);
281 ret = make_node (RECORD_TYPE);
282 for (int i = 0; i < 3; i++)
284 fields[i] = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
285 get_identifier (field_names[i]),
286 (i == 0) ? build_pointer_type (const_char_type)
287 : unsigned_type_node);
288 DECL_CONTEXT (fields[i]) = ret;
289 if (i)
290 DECL_CHAIN (fields[i - 1]) = fields[i];
292 tree type_decl = build_decl (input_location, TYPE_DECL,
293 get_identifier ("__ubsan_source_location"),
294 ret);
295 DECL_IGNORED_P (type_decl) = 1;
296 DECL_ARTIFICIAL (type_decl) = 1;
297 TYPE_FIELDS (ret) = fields[0];
298 TYPE_NAME (ret) = type_decl;
299 TYPE_STUB_DECL (ret) = type_decl;
300 TYPE_ARTIFICIAL (ret) = 1;
301 layout_type (ret);
302 ubsan_source_location_type = ret;
303 return ret;
306 /* Helper routine that returns a CONSTRUCTOR of __ubsan_source_location
307 type with its fields filled from a location_t LOC. */
309 static tree
310 ubsan_source_location (location_t loc)
312 expanded_location xloc;
313 tree type = ubsan_get_source_location_type ();
315 xloc = expand_location (loc);
316 tree str;
317 if (xloc.file == NULL)
319 str = build_int_cst (ptr_type_node, 0);
320 xloc.line = 0;
321 xloc.column = 0;
323 else
325 /* Fill in the values from LOC. */
326 size_t len = strlen (xloc.file) + 1;
327 str = build_string (len, xloc.file);
328 TREE_TYPE (str) = build_array_type_nelts (char_type_node, len);
329 TREE_READONLY (str) = 1;
330 TREE_STATIC (str) = 1;
331 str = build_fold_addr_expr (str);
333 tree ctor = build_constructor_va (type, 3, NULL_TREE, str, NULL_TREE,
334 build_int_cst (unsigned_type_node,
335 xloc.line), NULL_TREE,
336 build_int_cst (unsigned_type_node,
337 xloc.column));
338 TREE_CONSTANT (ctor) = 1;
339 TREE_STATIC (ctor) = 1;
341 return ctor;
344 /* This routine returns a magic number for TYPE. */
346 static unsigned short
347 get_ubsan_type_info_for_type (tree type)
349 if (SCALAR_FLOAT_TYPE_P (type))
350 return tree_to_uhwi (TYPE_SIZE (type));
351 else if (INTEGRAL_TYPE_P (type))
353 int prec = exact_log2 (tree_to_uhwi (TYPE_SIZE (type)));
354 gcc_assert (prec != -1);
355 return (prec << 1) | !TYPE_UNSIGNED (type);
357 else
358 return 0;
361 /* Counters for internal labels. ubsan_ids[0] for Lubsan_type,
362 ubsan_ids[1] for Lubsan_data labels. */
363 static GTY(()) unsigned int ubsan_ids[2];
365 /* Helper routine that returns ADDR_EXPR of a VAR_DECL of a type
366 descriptor. It first looks into the hash table; if not found,
367 create the VAR_DECL, put it into the hash table and return the
368 ADDR_EXPR of it. TYPE describes a particular type. PSTYLE is
369 an enum controlling how we want to print the type. */
371 tree
372 ubsan_type_descriptor (tree type, enum ubsan_print_style pstyle)
374 /* See through any typedefs. */
375 type = TYPE_MAIN_VARIANT (type);
376 tree type3 = type;
377 if (pstyle == UBSAN_PRINT_FORCE_INT)
379 /* Temporary hack for -fsanitize=shift with _BitInt(129) and more.
380 libubsan crashes if it is not TK_Integer type. */
381 if (TREE_CODE (type) == BITINT_TYPE
382 && TYPE_PRECISION (type) > MAX_FIXED_MODE_SIZE)
383 type3 = build_qualified_type (type, TYPE_QUAL_CONST);
384 if (type3 == type)
385 pstyle = UBSAN_PRINT_NORMAL;
388 tree decl = decl_for_type_lookup (type3);
389 /* It is possible that some of the earlier created DECLs were found
390 unused, in that case they weren't emitted and varpool_node::get
391 returns NULL node on them. But now we really need them. Thus,
392 renew them here. */
393 if (decl != NULL_TREE && varpool_node::get (decl))
395 return build_fold_addr_expr (decl);
398 tree dtype = ubsan_get_type_descriptor_type ();
399 tree type2 = type;
400 const char *tname = NULL;
401 pretty_printer pretty_name;
402 unsigned char deref_depth = 0;
403 unsigned short tkind, tinfo;
404 char tname_bitint[sizeof ("unsigned _BitInt(2147483647)")];
406 /* Get the name of the type, or the name of the pointer type. */
407 if (pstyle == UBSAN_PRINT_POINTER)
409 gcc_assert (POINTER_TYPE_P (type));
410 type2 = TREE_TYPE (type);
412 /* Remove any '*' operators from TYPE. */
413 while (POINTER_TYPE_P (type2))
414 deref_depth++, type2 = TREE_TYPE (type2);
416 if (TREE_CODE (type2) == METHOD_TYPE)
417 type2 = TYPE_METHOD_BASETYPE (type2);
420 /* If an array, get its type. */
421 type2 = strip_array_types (type2);
423 if (pstyle == UBSAN_PRINT_ARRAY)
425 while (POINTER_TYPE_P (type2))
426 deref_depth++, type2 = TREE_TYPE (type2);
429 if (TYPE_NAME (type2) != NULL)
431 if (TREE_CODE (TYPE_NAME (type2)) == IDENTIFIER_NODE)
432 tname = IDENTIFIER_POINTER (TYPE_NAME (type2));
433 else if (DECL_NAME (TYPE_NAME (type2)) != NULL)
434 tname = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type2)));
437 if (tname == NULL)
439 if (TREE_CODE (type2) == BITINT_TYPE)
441 snprintf (tname_bitint, sizeof (tname_bitint),
442 "%s_BitInt(%d)", TYPE_UNSIGNED (type2) ? "unsigned " : "",
443 TYPE_PRECISION (type2));
444 tname = tname_bitint;
446 else
447 /* We weren't able to determine the type name. */
448 tname = "<unknown>";
451 pp_quote (&pretty_name);
453 tree eltype = type;
454 if (pstyle == UBSAN_PRINT_POINTER)
456 pp_printf (&pretty_name, "%s%s%s%s%s%s%s",
457 TYPE_VOLATILE (type2) ? "volatile " : "",
458 TYPE_READONLY (type2) ? "const " : "",
459 TYPE_RESTRICT (type2) ? "restrict " : "",
460 TYPE_ATOMIC (type2) ? "_Atomic " : "",
461 TREE_CODE (type2) == RECORD_TYPE
462 ? "struct "
463 : TREE_CODE (type2) == UNION_TYPE
464 ? "union " : "", tname,
465 deref_depth == 0 ? "" : " ");
466 while (deref_depth-- > 0)
467 pp_star (&pretty_name);
469 else if (pstyle == UBSAN_PRINT_ARRAY)
471 /* Pretty print the array dimensions. */
472 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
473 tree t = type;
474 pp_string (&pretty_name, tname);
475 pp_space (&pretty_name);
476 while (deref_depth-- > 0)
477 pp_star (&pretty_name);
478 while (TREE_CODE (t) == ARRAY_TYPE)
480 pp_left_bracket (&pretty_name);
481 tree dom = TYPE_DOMAIN (t);
482 if (dom != NULL_TREE
483 && TYPE_MAX_VALUE (dom) != NULL_TREE
484 && TREE_CODE (TYPE_MAX_VALUE (dom)) == INTEGER_CST)
486 unsigned HOST_WIDE_INT m;
487 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (dom))
488 && (m = tree_to_uhwi (TYPE_MAX_VALUE (dom))) + 1 != 0)
489 pp_unsigned_wide_integer (&pretty_name, m + 1);
490 else
491 pp_wide_int (&pretty_name,
492 wi::add (wi::to_widest (TYPE_MAX_VALUE (dom)), 1),
493 TYPE_SIGN (TREE_TYPE (dom)));
495 else
496 /* ??? We can't determine the variable name; print VLA unspec. */
497 pp_star (&pretty_name);
498 pp_right_bracket (&pretty_name);
499 t = TREE_TYPE (t);
502 /* Save the tree with stripped types. */
503 eltype = t;
505 else
506 pp_string (&pretty_name, tname);
508 pp_quote (&pretty_name);
510 switch (TREE_CODE (eltype))
512 case BOOLEAN_TYPE:
513 case ENUMERAL_TYPE:
514 case INTEGER_TYPE:
515 tkind = 0x0000;
516 break;
517 case BITINT_TYPE:
518 if (TYPE_PRECISION (eltype) <= MAX_FIXED_MODE_SIZE)
519 tkind = 0x0000;
520 else
521 tkind = 0xffff;
522 break;
523 case REAL_TYPE:
524 /* FIXME: libubsan right now only supports float, double and
525 long double type formats. */
526 if (TYPE_MODE (eltype) == TYPE_MODE (float_type_node)
527 || TYPE_MODE (eltype) == TYPE_MODE (double_type_node)
528 || TYPE_MODE (eltype) == TYPE_MODE (long_double_type_node))
529 tkind = 0x0001;
530 else
531 tkind = 0xffff;
532 break;
533 default:
534 tkind = 0xffff;
535 break;
537 tinfo = tkind == 0xffff ? 0 : get_ubsan_type_info_for_type (eltype);
539 if (pstyle == UBSAN_PRINT_FORCE_INT)
541 tkind = 0x0000;
542 tree t = build_nonstandard_integer_type (MAX_FIXED_MODE_SIZE,
543 TYPE_UNSIGNED (eltype));
544 tinfo = get_ubsan_type_info_for_type (t);
547 /* Create a new VAR_DECL of type descriptor. */
548 const char *tmp = pp_formatted_text (&pretty_name);
549 size_t len = strlen (tmp) + 1;
550 tree str = build_string (len, tmp);
551 TREE_TYPE (str) = build_array_type_nelts (char_type_node, len);
552 TREE_READONLY (str) = 1;
553 TREE_STATIC (str) = 1;
555 char tmp_name[32];
556 ASM_GENERATE_INTERNAL_LABEL (tmp_name, "Lubsan_type", ubsan_ids[0]++);
557 decl = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (tmp_name),
558 dtype);
559 TREE_STATIC (decl) = 1;
560 TREE_PUBLIC (decl) = 0;
561 DECL_ARTIFICIAL (decl) = 1;
562 DECL_IGNORED_P (decl) = 1;
563 DECL_EXTERNAL (decl) = 0;
564 DECL_SIZE (decl)
565 = size_binop (PLUS_EXPR, DECL_SIZE (decl), TYPE_SIZE (TREE_TYPE (str)));
566 DECL_SIZE_UNIT (decl)
567 = size_binop (PLUS_EXPR, DECL_SIZE_UNIT (decl),
568 TYPE_SIZE_UNIT (TREE_TYPE (str)));
570 tree ctor = build_constructor_va (dtype, 3, NULL_TREE,
571 build_int_cst (short_unsigned_type_node,
572 tkind), NULL_TREE,
573 build_int_cst (short_unsigned_type_node,
574 tinfo), NULL_TREE, str);
575 TREE_CONSTANT (ctor) = 1;
576 TREE_STATIC (ctor) = 1;
577 DECL_INITIAL (decl) = ctor;
578 varpool_node::finalize_decl (decl);
580 /* Save the VAR_DECL into the hash table. */
581 decl_for_type_insert (type3, decl);
583 return build_fold_addr_expr (decl);
586 /* Create a structure for the ubsan library. NAME is a name of the new
587 structure. LOCCNT is number of locations, PLOC points to array of
588 locations. The arguments in ... are of __ubsan_type_descriptor type
589 and there are at most two of them, followed by NULL_TREE, followed
590 by optional extra arguments and another NULL_TREE. */
592 tree
593 ubsan_create_data (const char *name, int loccnt, const location_t *ploc, ...)
595 va_list args;
596 tree ret, t;
597 tree fields[6];
598 vec<tree, va_gc> *saved_args = NULL;
599 size_t i = 0;
600 int j;
602 /* It is possible that PCH zapped table with definitions of sanitizer
603 builtins. Reinitialize them if needed. */
604 initialize_sanitizer_builtins ();
606 /* Firstly, create a pointer to type descriptor type. */
607 tree td_type = ubsan_get_type_descriptor_type ();
608 td_type = build_pointer_type (td_type);
610 /* Create the structure type. */
611 ret = make_node (RECORD_TYPE);
612 for (j = 0; j < loccnt; j++)
614 gcc_checking_assert (i < 2);
615 fields[i] = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
616 ubsan_get_source_location_type ());
617 DECL_CONTEXT (fields[i]) = ret;
618 if (i)
619 DECL_CHAIN (fields[i - 1]) = fields[i];
620 i++;
623 va_start (args, ploc);
624 for (t = va_arg (args, tree); t != NULL_TREE;
625 i++, t = va_arg (args, tree))
627 gcc_checking_assert (i < 4);
628 /* Save the tree arguments for later use. */
629 vec_safe_push (saved_args, t);
630 fields[i] = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
631 td_type);
632 DECL_CONTEXT (fields[i]) = ret;
633 if (i)
634 DECL_CHAIN (fields[i - 1]) = fields[i];
637 for (t = va_arg (args, tree); t != NULL_TREE;
638 i++, t = va_arg (args, tree))
640 gcc_checking_assert (i < 6);
641 /* Save the tree arguments for later use. */
642 vec_safe_push (saved_args, t);
643 fields[i] = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
644 TREE_TYPE (t));
645 DECL_CONTEXT (fields[i]) = ret;
646 if (i)
647 DECL_CHAIN (fields[i - 1]) = fields[i];
649 va_end (args);
651 tree type_decl = build_decl (input_location, TYPE_DECL,
652 get_identifier (name), ret);
653 DECL_IGNORED_P (type_decl) = 1;
654 DECL_ARTIFICIAL (type_decl) = 1;
655 TYPE_FIELDS (ret) = fields[0];
656 TYPE_NAME (ret) = type_decl;
657 TYPE_STUB_DECL (ret) = type_decl;
658 TYPE_ARTIFICIAL (ret) = 1;
659 layout_type (ret);
661 /* Now, fill in the type. */
662 char tmp_name[32];
663 ASM_GENERATE_INTERNAL_LABEL (tmp_name, "Lubsan_data", ubsan_ids[1]++);
664 tree var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (tmp_name),
665 ret);
666 TREE_STATIC (var) = 1;
667 TREE_PUBLIC (var) = 0;
668 DECL_ARTIFICIAL (var) = 1;
669 DECL_IGNORED_P (var) = 1;
670 DECL_EXTERNAL (var) = 0;
672 vec<constructor_elt, va_gc> *v;
673 vec_alloc (v, i);
674 tree ctor = build_constructor (ret, v);
676 /* If desirable, set the __ubsan_source_location element. */
677 for (j = 0; j < loccnt; j++)
679 location_t loc = LOCATION_LOCUS (ploc[j]);
680 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, ubsan_source_location (loc));
683 size_t nelts = vec_safe_length (saved_args);
684 for (i = 0; i < nelts; i++)
686 t = (*saved_args)[i];
687 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, t);
690 TREE_CONSTANT (ctor) = 1;
691 TREE_STATIC (ctor) = 1;
692 DECL_INITIAL (var) = ctor;
693 varpool_node::finalize_decl (var);
695 return var;
698 /* Shared between *build_builtin_unreachable. */
700 tree
701 sanitize_unreachable_fn (tree *data, location_t loc)
703 tree fn = NULL_TREE;
704 bool san = sanitize_flags_p (SANITIZE_UNREACHABLE);
705 if (san
706 ? (flag_sanitize_trap & SANITIZE_UNREACHABLE)
707 : flag_unreachable_traps)
709 fn = builtin_decl_explicit (BUILT_IN_UNREACHABLE_TRAP);
710 *data = NULL_TREE;
712 else if (san)
714 /* Call ubsan_create_data first as it initializes SANITIZER built-ins. */
715 *data = ubsan_create_data ("__ubsan_unreachable_data", 1, &loc,
716 NULL_TREE, NULL_TREE);
717 fn = builtin_decl_explicit (BUILT_IN_UBSAN_HANDLE_BUILTIN_UNREACHABLE);
718 *data = build_fold_addr_expr_loc (loc, *data);
720 else
722 fn = builtin_decl_explicit (BUILT_IN_UNREACHABLE);
723 *data = NULL_TREE;
725 return fn;
728 /* Rewrite a gcall to __builtin_unreachable for -fsanitize=unreachable. Called
729 by the sanopt pass. */
731 bool
732 ubsan_instrument_unreachable (gimple_stmt_iterator *gsi)
734 location_t loc = gimple_location (gsi_stmt (*gsi));
735 gimple *g = gimple_build_builtin_unreachable (loc);
736 gsi_replace (gsi, g, false);
737 return false;
740 /* Return true if T is a call to a libubsan routine. */
742 bool
743 is_ubsan_builtin_p (tree t)
745 return TREE_CODE (t) == FUNCTION_DECL
746 && fndecl_built_in_p (t, BUILT_IN_NORMAL)
747 && strncmp (IDENTIFIER_POINTER (DECL_NAME (t)),
748 "__builtin___ubsan_", 18) == 0;
751 /* Create a callgraph edge for statement STMT. */
753 static void
754 ubsan_create_edge (gimple *stmt)
756 gcall *call_stmt = dyn_cast <gcall *> (stmt);
757 basic_block bb = gimple_bb (stmt);
758 cgraph_node *node = cgraph_node::get (current_function_decl);
759 tree decl = gimple_call_fndecl (call_stmt);
760 if (decl)
761 node->create_edge (cgraph_node::get_create (decl), call_stmt, bb->count);
764 /* Expand the UBSAN_BOUNDS special builtin function. */
766 bool
767 ubsan_expand_bounds_ifn (gimple_stmt_iterator *gsi)
769 gimple *stmt = gsi_stmt (*gsi);
770 location_t loc = gimple_location (stmt);
771 gcc_assert (gimple_call_num_args (stmt) == 3);
773 /* Pick up the arguments of the UBSAN_BOUNDS call. */
774 tree type = TREE_TYPE (TREE_TYPE (gimple_call_arg (stmt, 0)));
775 tree index = gimple_call_arg (stmt, 1);
776 tree orig_index = index;
777 tree bound = gimple_call_arg (stmt, 2);
779 gimple_stmt_iterator gsi_orig = *gsi;
781 /* Create condition "if (index >= bound)". */
782 basic_block then_bb, fallthru_bb;
783 gimple_stmt_iterator cond_insert_point
784 = create_cond_insert_point (gsi, false, false, true,
785 &then_bb, &fallthru_bb);
786 index = fold_convert (TREE_TYPE (bound), index);
787 index = force_gimple_operand_gsi (&cond_insert_point, index,
788 true, NULL_TREE,
789 false, GSI_NEW_STMT);
790 gimple *g = gimple_build_cond (GE_EXPR, index, bound, NULL_TREE, NULL_TREE);
791 gimple_set_location (g, loc);
792 gsi_insert_after (&cond_insert_point, g, GSI_NEW_STMT);
794 /* Generate __ubsan_handle_out_of_bounds call. */
795 *gsi = gsi_after_labels (then_bb);
796 if (flag_sanitize_trap & SANITIZE_BOUNDS)
797 g = gimple_build_call (builtin_decl_explicit (BUILT_IN_TRAP), 0);
798 else
800 tree data
801 = ubsan_create_data ("__ubsan_out_of_bounds_data", 1, &loc,
802 ubsan_type_descriptor (type, UBSAN_PRINT_ARRAY),
803 ubsan_type_descriptor (TREE_TYPE (orig_index)),
804 NULL_TREE, NULL_TREE);
805 data = build_fold_addr_expr_loc (loc, data);
806 enum built_in_function bcode
807 = (flag_sanitize_recover & SANITIZE_BOUNDS)
808 ? BUILT_IN_UBSAN_HANDLE_OUT_OF_BOUNDS
809 : BUILT_IN_UBSAN_HANDLE_OUT_OF_BOUNDS_ABORT;
810 tree fn = builtin_decl_explicit (bcode);
811 tree val = ubsan_encode_value (orig_index, UBSAN_ENCODE_VALUE_GIMPLE);
812 val = force_gimple_operand_gsi (gsi, val, true, NULL_TREE, true,
813 GSI_SAME_STMT);
814 g = gimple_build_call (fn, 2, data, val);
816 gimple_set_location (g, loc);
817 gsi_insert_before (gsi, g, GSI_SAME_STMT);
819 /* Get rid of the UBSAN_BOUNDS call from the IR. */
820 unlink_stmt_vdef (stmt);
821 gsi_remove (&gsi_orig, true);
823 /* Point GSI to next logical statement. */
824 *gsi = gsi_start_bb (fallthru_bb);
825 return true;
828 /* Expand UBSAN_NULL internal call. The type is kept on the ckind
829 argument which is a constant, because the middle-end treats pointer
830 conversions as useless and therefore the type of the first argument
831 could be changed to any other pointer type. */
833 bool
834 ubsan_expand_null_ifn (gimple_stmt_iterator *gsip)
836 gimple_stmt_iterator gsi = *gsip;
837 gimple *stmt = gsi_stmt (gsi);
838 location_t loc = gimple_location (stmt);
839 gcc_assert (gimple_call_num_args (stmt) == 3);
840 tree ptr = gimple_call_arg (stmt, 0);
841 tree ckind = gimple_call_arg (stmt, 1);
842 tree align = gimple_call_arg (stmt, 2);
843 tree check_align = NULL_TREE;
844 bool check_null;
846 basic_block cur_bb = gsi_bb (gsi);
848 gimple *g;
849 if (!integer_zerop (align))
851 unsigned int ptralign = get_pointer_alignment (ptr) / BITS_PER_UNIT;
852 if (compare_tree_int (align, ptralign) == 1)
854 check_align = make_ssa_name (pointer_sized_int_node);
855 g = gimple_build_assign (check_align, NOP_EXPR, ptr);
856 gimple_set_location (g, loc);
857 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
860 check_null = sanitize_flags_p (SANITIZE_NULL);
861 if (check_null && POINTER_TYPE_P (TREE_TYPE (ptr)))
863 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (ptr)));
864 if (!ADDR_SPACE_GENERIC_P (as)
865 && targetm.addr_space.zero_address_valid (as))
866 check_null = false;
869 if (check_align == NULL_TREE && !check_null)
871 gsi_remove (gsip, true);
872 /* Unlink the UBSAN_NULLs vops before replacing it. */
873 unlink_stmt_vdef (stmt);
874 return true;
877 /* Split the original block holding the pointer dereference. */
878 edge e = split_block (cur_bb, stmt);
880 /* Get a hold on the 'condition block', the 'then block' and the
881 'else block'. */
882 basic_block cond_bb = e->src;
883 basic_block fallthru_bb = e->dest;
884 basic_block then_bb = create_empty_bb (cond_bb);
885 add_bb_to_loop (then_bb, cond_bb->loop_father);
886 loops_state_set (LOOPS_NEED_FIXUP);
888 /* Make an edge coming from the 'cond block' into the 'then block';
889 this edge is unlikely taken, so set up the probability accordingly. */
890 e = make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
891 e->probability = profile_probability::very_unlikely ();
892 then_bb->count = e->count ();
894 /* Connect 'then block' with the 'else block'. This is needed
895 as the ubsan routines we call in the 'then block' are not noreturn.
896 The 'then block' only has one outcoming edge. */
897 make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU);
899 /* Set up the fallthrough basic block. */
900 e = find_edge (cond_bb, fallthru_bb);
901 e->flags = EDGE_FALSE_VALUE;
902 e->probability = profile_probability::very_likely ();
904 /* Update dominance info for the newly created then_bb; note that
905 fallthru_bb's dominance info has already been updated by
906 split_block. */
907 if (dom_info_available_p (CDI_DOMINATORS))
908 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
910 /* Put the ubsan builtin call into the newly created BB. */
911 if (flag_sanitize_trap & ((check_align ? SANITIZE_ALIGNMENT + 0 : 0)
912 | (check_null ? SANITIZE_NULL + 0 : 0)))
913 g = gimple_build_call (builtin_decl_implicit (BUILT_IN_TRAP), 0);
914 else
916 enum built_in_function bcode
917 = (flag_sanitize_recover & ((check_align ? SANITIZE_ALIGNMENT + 0 : 0)
918 | (check_null ? SANITIZE_NULL + 0 : 0)))
919 ? BUILT_IN_UBSAN_HANDLE_TYPE_MISMATCH_V1
920 : BUILT_IN_UBSAN_HANDLE_TYPE_MISMATCH_V1_ABORT;
921 tree fn = builtin_decl_implicit (bcode);
922 int align_log = tree_log2 (align);
923 tree data
924 = ubsan_create_data ("__ubsan_null_data", 1, &loc,
925 ubsan_type_descriptor (TREE_TYPE (ckind),
926 UBSAN_PRINT_POINTER),
927 NULL_TREE,
928 build_int_cst (unsigned_char_type_node,
929 MAX (align_log, 0)),
930 fold_convert (unsigned_char_type_node, ckind),
931 NULL_TREE);
932 data = build_fold_addr_expr_loc (loc, data);
933 g = gimple_build_call (fn, 2, data,
934 check_align ? check_align
935 : build_zero_cst (pointer_sized_int_node));
937 gimple_stmt_iterator gsi2 = gsi_start_bb (then_bb);
938 gimple_set_location (g, loc);
939 gsi_insert_after (&gsi2, g, GSI_NEW_STMT);
941 /* Unlink the UBSAN_NULLs vops before replacing it. */
942 unlink_stmt_vdef (stmt);
944 if (check_null)
946 g = gimple_build_cond (EQ_EXPR, ptr, build_int_cst (TREE_TYPE (ptr), 0),
947 NULL_TREE, NULL_TREE);
948 gimple_set_location (g, loc);
950 /* Replace the UBSAN_NULL with a GIMPLE_COND stmt. */
951 gsi_replace (&gsi, g, false);
952 stmt = g;
955 if (check_align)
957 if (check_null)
959 /* Split the block with the condition again. */
960 e = split_block (cond_bb, stmt);
961 basic_block cond1_bb = e->src;
962 basic_block cond2_bb = e->dest;
964 /* Make an edge coming from the 'cond1 block' into the 'then block';
965 this edge is unlikely taken, so set up the probability
966 accordingly. */
967 e = make_edge (cond1_bb, then_bb, EDGE_TRUE_VALUE);
968 e->probability = profile_probability::very_unlikely ();
970 /* Set up the fallthrough basic block. */
971 e = find_edge (cond1_bb, cond2_bb);
972 e->flags = EDGE_FALSE_VALUE;
973 e->probability = profile_probability::very_likely ();
975 /* Update dominance info. */
976 if (dom_info_available_p (CDI_DOMINATORS))
978 set_immediate_dominator (CDI_DOMINATORS, fallthru_bb, cond1_bb);
979 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond1_bb);
982 gsi2 = gsi_start_bb (cond2_bb);
985 tree mask = build_int_cst (pointer_sized_int_node,
986 tree_to_uhwi (align) - 1);
987 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
988 BIT_AND_EXPR, check_align, mask);
989 gimple_set_location (g, loc);
990 if (check_null)
991 gsi_insert_after (&gsi2, g, GSI_NEW_STMT);
992 else
993 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
995 g = gimple_build_cond (NE_EXPR, gimple_assign_lhs (g),
996 build_int_cst (pointer_sized_int_node, 0),
997 NULL_TREE, NULL_TREE);
998 gimple_set_location (g, loc);
999 if (check_null)
1000 gsi_insert_after (&gsi2, g, GSI_NEW_STMT);
1001 else
1002 /* Replace the UBSAN_NULL with a GIMPLE_COND stmt. */
1003 gsi_replace (&gsi, g, false);
1005 return false;
1008 #define OBJSZ_MAX_OFFSET (1024 * 16)
1010 /* Expand UBSAN_OBJECT_SIZE internal call. */
1012 bool
1013 ubsan_expand_objsize_ifn (gimple_stmt_iterator *gsi)
1015 gimple *stmt = gsi_stmt (*gsi);
1016 location_t loc = gimple_location (stmt);
1017 gcc_assert (gimple_call_num_args (stmt) == 4);
1019 tree ptr = gimple_call_arg (stmt, 0);
1020 tree offset = gimple_call_arg (stmt, 1);
1021 tree size = gimple_call_arg (stmt, 2);
1022 tree ckind = gimple_call_arg (stmt, 3);
1023 gimple_stmt_iterator gsi_orig = *gsi;
1024 gimple *g;
1026 /* See if we can discard the check. */
1027 if (TREE_CODE (size) == INTEGER_CST
1028 && integer_all_onesp (size))
1029 /* Yes, __builtin_object_size couldn't determine the
1030 object size. */;
1031 else if (TREE_CODE (offset) == INTEGER_CST
1032 && wi::to_widest (offset) >= -OBJSZ_MAX_OFFSET
1033 && wi::to_widest (offset) <= -1)
1034 /* The offset is in range [-16K, -1]. */;
1035 else
1037 /* if (offset > objsize) */
1038 basic_block then_bb, fallthru_bb;
1039 gimple_stmt_iterator cond_insert_point
1040 = create_cond_insert_point (gsi, false, false, true,
1041 &then_bb, &fallthru_bb);
1042 g = gimple_build_cond (GT_EXPR, offset, size, NULL_TREE, NULL_TREE);
1043 gimple_set_location (g, loc);
1044 gsi_insert_after (&cond_insert_point, g, GSI_NEW_STMT);
1046 /* If the offset is small enough, we don't need the second
1047 run-time check. */
1048 if (TREE_CODE (offset) == INTEGER_CST
1049 && wi::to_widest (offset) >= 0
1050 && wi::to_widest (offset) <= OBJSZ_MAX_OFFSET)
1051 *gsi = gsi_after_labels (then_bb);
1052 else
1054 /* Don't issue run-time error if (ptr > ptr + offset). That
1055 may happen when computing a POINTER_PLUS_EXPR. */
1056 basic_block then2_bb, fallthru2_bb;
1058 gimple_stmt_iterator gsi2 = gsi_after_labels (then_bb);
1059 cond_insert_point = create_cond_insert_point (&gsi2, false, false,
1060 true, &then2_bb,
1061 &fallthru2_bb);
1062 /* Convert the pointer to an integer type. */
1063 tree p = make_ssa_name (pointer_sized_int_node);
1064 g = gimple_build_assign (p, NOP_EXPR, ptr);
1065 gimple_set_location (g, loc);
1066 gsi_insert_before (&cond_insert_point, g, GSI_NEW_STMT);
1067 p = gimple_assign_lhs (g);
1068 /* Compute ptr + offset. */
1069 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
1070 PLUS_EXPR, p, offset);
1071 gimple_set_location (g, loc);
1072 gsi_insert_after (&cond_insert_point, g, GSI_NEW_STMT);
1073 /* Now build the conditional and put it into the IR. */
1074 g = gimple_build_cond (LE_EXPR, p, gimple_assign_lhs (g),
1075 NULL_TREE, NULL_TREE);
1076 gimple_set_location (g, loc);
1077 gsi_insert_after (&cond_insert_point, g, GSI_NEW_STMT);
1078 *gsi = gsi_after_labels (then2_bb);
1081 /* Generate __ubsan_handle_type_mismatch call. */
1082 if (flag_sanitize_trap & SANITIZE_OBJECT_SIZE)
1083 g = gimple_build_call (builtin_decl_explicit (BUILT_IN_TRAP), 0);
1084 else
1086 tree data
1087 = ubsan_create_data ("__ubsan_objsz_data", 1, &loc,
1088 ubsan_type_descriptor (TREE_TYPE (ptr),
1089 UBSAN_PRINT_POINTER),
1090 NULL_TREE,
1091 build_zero_cst (unsigned_char_type_node),
1092 ckind,
1093 NULL_TREE);
1094 data = build_fold_addr_expr_loc (loc, data);
1095 enum built_in_function bcode
1096 = (flag_sanitize_recover & SANITIZE_OBJECT_SIZE)
1097 ? BUILT_IN_UBSAN_HANDLE_TYPE_MISMATCH_V1
1098 : BUILT_IN_UBSAN_HANDLE_TYPE_MISMATCH_V1_ABORT;
1099 tree p = make_ssa_name (pointer_sized_int_node);
1100 g = gimple_build_assign (p, NOP_EXPR, ptr);
1101 gimple_set_location (g, loc);
1102 gsi_insert_before (gsi, g, GSI_SAME_STMT);
1103 g = gimple_build_call (builtin_decl_explicit (bcode), 2, data, p);
1105 gimple_set_location (g, loc);
1106 gsi_insert_before (gsi, g, GSI_SAME_STMT);
1108 /* Point GSI to next logical statement. */
1109 *gsi = gsi_start_bb (fallthru_bb);
1111 /* Get rid of the UBSAN_OBJECT_SIZE call from the IR. */
1112 unlink_stmt_vdef (stmt);
1113 gsi_remove (&gsi_orig, true);
1114 return true;
1117 /* Get rid of the UBSAN_OBJECT_SIZE call from the IR. */
1118 unlink_stmt_vdef (stmt);
1119 gsi_remove (gsi, true);
1120 return true;
1123 /* Expand UBSAN_PTR internal call. */
1125 bool
1126 ubsan_expand_ptr_ifn (gimple_stmt_iterator *gsip)
1128 gimple_stmt_iterator gsi = *gsip;
1129 gimple *stmt = gsi_stmt (gsi);
1130 location_t loc = gimple_location (stmt);
1131 gcc_assert (gimple_call_num_args (stmt) == 2);
1132 tree ptr = gimple_call_arg (stmt, 0);
1133 tree off = gimple_call_arg (stmt, 1);
1135 if (integer_zerop (off))
1137 gsi_remove (gsip, true);
1138 unlink_stmt_vdef (stmt);
1139 return true;
1142 basic_block cur_bb = gsi_bb (gsi);
1143 tree ptrplusoff = make_ssa_name (pointer_sized_int_node);
1144 tree ptri = make_ssa_name (pointer_sized_int_node);
1145 int pos_neg = get_range_pos_neg (off);
1147 /* Split the original block holding the pointer dereference. */
1148 edge e = split_block (cur_bb, stmt);
1150 /* Get a hold on the 'condition block', the 'then block' and the
1151 'else block'. */
1152 basic_block cond_bb = e->src;
1153 basic_block fallthru_bb = e->dest;
1154 basic_block then_bb = create_empty_bb (cond_bb);
1155 basic_block cond_pos_bb = NULL, cond_neg_bb = NULL;
1156 add_bb_to_loop (then_bb, cond_bb->loop_father);
1157 loops_state_set (LOOPS_NEED_FIXUP);
1159 /* Set up the fallthrough basic block. */
1160 e->flags = EDGE_FALSE_VALUE;
1161 if (pos_neg != 3)
1163 e->probability = profile_probability::very_likely ();
1165 /* Connect 'then block' with the 'else block'. This is needed
1166 as the ubsan routines we call in the 'then block' are not noreturn.
1167 The 'then block' only has one outcoming edge. */
1168 make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU);
1170 /* Make an edge coming from the 'cond block' into the 'then block';
1171 this edge is unlikely taken, so set up the probability
1172 accordingly. */
1173 e = make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
1174 e->probability = profile_probability::very_unlikely ();
1175 then_bb->count = e->count ();
1177 else
1179 e->probability = profile_probability::even ();
1181 e = split_block (fallthru_bb, (gimple *) NULL);
1182 cond_neg_bb = e->src;
1183 fallthru_bb = e->dest;
1184 e->probability = profile_probability::very_likely ();
1185 e->flags = EDGE_FALSE_VALUE;
1187 e = make_edge (cond_neg_bb, then_bb, EDGE_TRUE_VALUE);
1188 e->probability = profile_probability::very_unlikely ();
1189 then_bb->count = e->count ();
1191 cond_pos_bb = create_empty_bb (cond_bb);
1192 add_bb_to_loop (cond_pos_bb, cond_bb->loop_father);
1194 e = make_edge (cond_bb, cond_pos_bb, EDGE_TRUE_VALUE);
1195 e->probability = profile_probability::even ();
1196 cond_pos_bb->count = e->count ();
1198 e = make_edge (cond_pos_bb, then_bb, EDGE_TRUE_VALUE);
1199 e->probability = profile_probability::very_unlikely ();
1201 e = make_edge (cond_pos_bb, fallthru_bb, EDGE_FALSE_VALUE);
1202 e->probability = profile_probability::very_likely ();
1204 make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU);
1207 gimple *g = gimple_build_assign (ptri, NOP_EXPR, ptr);
1208 gimple_set_location (g, loc);
1209 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
1210 g = gimple_build_assign (ptrplusoff, PLUS_EXPR, ptri, off);
1211 gimple_set_location (g, loc);
1212 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
1214 /* Update dominance info for the newly created then_bb; note that
1215 fallthru_bb's dominance info has already been updated by
1216 split_block. */
1217 if (dom_info_available_p (CDI_DOMINATORS))
1219 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
1220 if (pos_neg == 3)
1222 set_immediate_dominator (CDI_DOMINATORS, cond_pos_bb, cond_bb);
1223 set_immediate_dominator (CDI_DOMINATORS, fallthru_bb, cond_bb);
1227 /* Put the ubsan builtin call into the newly created BB. */
1228 if (flag_sanitize_trap & SANITIZE_POINTER_OVERFLOW)
1229 g = gimple_build_call (builtin_decl_implicit (BUILT_IN_TRAP), 0);
1230 else
1232 enum built_in_function bcode
1233 = (flag_sanitize_recover & SANITIZE_POINTER_OVERFLOW)
1234 ? BUILT_IN_UBSAN_HANDLE_POINTER_OVERFLOW
1235 : BUILT_IN_UBSAN_HANDLE_POINTER_OVERFLOW_ABORT;
1236 tree fn = builtin_decl_implicit (bcode);
1237 tree data
1238 = ubsan_create_data ("__ubsan_ptrovf_data", 1, &loc,
1239 NULL_TREE, NULL_TREE);
1240 data = build_fold_addr_expr_loc (loc, data);
1241 g = gimple_build_call (fn, 3, data, ptr, ptrplusoff);
1243 gimple_stmt_iterator gsi2 = gsi_start_bb (then_bb);
1244 gimple_set_location (g, loc);
1245 gsi_insert_after (&gsi2, g, GSI_NEW_STMT);
1247 /* Unlink the UBSAN_PTRs vops before replacing it. */
1248 unlink_stmt_vdef (stmt);
1250 if (TREE_CODE (off) == INTEGER_CST)
1251 g = gimple_build_cond (wi::neg_p (wi::to_wide (off)) ? LT_EXPR : GE_EXPR,
1252 ptri, fold_build1 (NEGATE_EXPR, sizetype, off),
1253 NULL_TREE, NULL_TREE);
1254 else if (pos_neg != 3)
1255 g = gimple_build_cond (pos_neg == 1 ? LT_EXPR : GT_EXPR,
1256 ptrplusoff, ptri, NULL_TREE, NULL_TREE);
1257 else
1259 gsi2 = gsi_start_bb (cond_pos_bb);
1260 g = gimple_build_cond (LT_EXPR, ptrplusoff, ptri, NULL_TREE, NULL_TREE);
1261 gimple_set_location (g, loc);
1262 gsi_insert_after (&gsi2, g, GSI_NEW_STMT);
1264 gsi2 = gsi_start_bb (cond_neg_bb);
1265 g = gimple_build_cond (GT_EXPR, ptrplusoff, ptri, NULL_TREE, NULL_TREE);
1266 gimple_set_location (g, loc);
1267 gsi_insert_after (&gsi2, g, GSI_NEW_STMT);
1269 tree t = gimple_build (&gsi, true, GSI_SAME_STMT,
1270 loc, NOP_EXPR, ssizetype, off);
1271 g = gimple_build_cond (GE_EXPR, t, ssize_int (0),
1272 NULL_TREE, NULL_TREE);
1274 gimple_set_location (g, loc);
1275 /* Replace the UBSAN_PTR with a GIMPLE_COND stmt. */
1276 gsi_replace (&gsi, g, false);
1277 return false;
1281 /* Cached __ubsan_vptr_type_cache decl. */
1282 static GTY(()) tree ubsan_vptr_type_cache_decl;
1284 /* Expand UBSAN_VPTR internal call. The type is kept on the ckind
1285 argument which is a constant, because the middle-end treats pointer
1286 conversions as useless and therefore the type of the first argument
1287 could be changed to any other pointer type. */
1289 bool
1290 ubsan_expand_vptr_ifn (gimple_stmt_iterator *gsip)
1292 gimple_stmt_iterator gsi = *gsip;
1293 gimple *stmt = gsi_stmt (gsi);
1294 location_t loc = gimple_location (stmt);
1295 gcc_assert (gimple_call_num_args (stmt) == 5);
1296 tree op = gimple_call_arg (stmt, 0);
1297 tree vptr = gimple_call_arg (stmt, 1);
1298 tree str_hash = gimple_call_arg (stmt, 2);
1299 tree ti_decl_addr = gimple_call_arg (stmt, 3);
1300 tree ckind_tree = gimple_call_arg (stmt, 4);
1301 ubsan_null_ckind ckind = (ubsan_null_ckind) tree_to_uhwi (ckind_tree);
1302 tree type = TREE_TYPE (TREE_TYPE (ckind_tree));
1303 gimple *g;
1304 basic_block fallthru_bb = NULL;
1306 if (ckind == UBSAN_DOWNCAST_POINTER)
1308 /* Guard everything with if (op != NULL) { ... }. */
1309 basic_block then_bb;
1310 gimple_stmt_iterator cond_insert_point
1311 = create_cond_insert_point (gsip, false, false, true,
1312 &then_bb, &fallthru_bb);
1313 g = gimple_build_cond (NE_EXPR, op, build_zero_cst (TREE_TYPE (op)),
1314 NULL_TREE, NULL_TREE);
1315 gimple_set_location (g, loc);
1316 gsi_insert_after (&cond_insert_point, g, GSI_NEW_STMT);
1317 *gsip = gsi_after_labels (then_bb);
1318 gsi_remove (&gsi, false);
1319 gsi_insert_before (gsip, stmt, GSI_NEW_STMT);
1320 gsi = *gsip;
1323 tree htype = TREE_TYPE (str_hash);
1324 tree cst = wide_int_to_tree (htype,
1325 wi::uhwi (((uint64_t) 0x9ddfea08 << 32)
1326 | 0xeb382d69, 64));
1327 g = gimple_build_assign (make_ssa_name (htype), BIT_XOR_EXPR,
1328 vptr, str_hash);
1329 gimple_set_location (g, loc);
1330 gsi_insert_before (gsip, g, GSI_SAME_STMT);
1331 g = gimple_build_assign (make_ssa_name (htype), MULT_EXPR,
1332 gimple_assign_lhs (g), cst);
1333 gimple_set_location (g, loc);
1334 gsi_insert_before (gsip, g, GSI_SAME_STMT);
1335 tree t1 = gimple_assign_lhs (g);
1336 g = gimple_build_assign (make_ssa_name (htype), LSHIFT_EXPR,
1337 t1, build_int_cst (integer_type_node, 47));
1338 gimple_set_location (g, loc);
1339 tree t2 = gimple_assign_lhs (g);
1340 gsi_insert_before (gsip, g, GSI_SAME_STMT);
1341 g = gimple_build_assign (make_ssa_name (htype), BIT_XOR_EXPR,
1342 vptr, t1);
1343 gimple_set_location (g, loc);
1344 gsi_insert_before (gsip, g, GSI_SAME_STMT);
1345 g = gimple_build_assign (make_ssa_name (htype), BIT_XOR_EXPR,
1346 t2, gimple_assign_lhs (g));
1347 gimple_set_location (g, loc);
1348 gsi_insert_before (gsip, g, GSI_SAME_STMT);
1349 g = gimple_build_assign (make_ssa_name (htype), MULT_EXPR,
1350 gimple_assign_lhs (g), cst);
1351 gimple_set_location (g, loc);
1352 gsi_insert_before (gsip, g, GSI_SAME_STMT);
1353 tree t3 = gimple_assign_lhs (g);
1354 g = gimple_build_assign (make_ssa_name (htype), LSHIFT_EXPR,
1355 t3, build_int_cst (integer_type_node, 47));
1356 gimple_set_location (g, loc);
1357 gsi_insert_before (gsip, g, GSI_SAME_STMT);
1358 g = gimple_build_assign (make_ssa_name (htype), BIT_XOR_EXPR,
1359 t3, gimple_assign_lhs (g));
1360 gimple_set_location (g, loc);
1361 gsi_insert_before (gsip, g, GSI_SAME_STMT);
1362 g = gimple_build_assign (make_ssa_name (htype), MULT_EXPR,
1363 gimple_assign_lhs (g), cst);
1364 gimple_set_location (g, loc);
1365 gsi_insert_before (gsip, g, GSI_SAME_STMT);
1366 if (!useless_type_conversion_p (pointer_sized_int_node, htype))
1368 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
1369 NOP_EXPR, gimple_assign_lhs (g));
1370 gimple_set_location (g, loc);
1371 gsi_insert_before (gsip, g, GSI_SAME_STMT);
1373 tree hash = gimple_assign_lhs (g);
1375 if (ubsan_vptr_type_cache_decl == NULL_TREE)
1377 tree atype = build_array_type_nelts (pointer_sized_int_node, 128);
1378 tree array = build_decl (UNKNOWN_LOCATION, VAR_DECL,
1379 get_identifier ("__ubsan_vptr_type_cache"),
1380 atype);
1381 DECL_ARTIFICIAL (array) = 1;
1382 DECL_IGNORED_P (array) = 1;
1383 TREE_PUBLIC (array) = 1;
1384 TREE_STATIC (array) = 1;
1385 DECL_EXTERNAL (array) = 1;
1386 DECL_VISIBILITY (array) = VISIBILITY_DEFAULT;
1387 DECL_VISIBILITY_SPECIFIED (array) = 1;
1388 varpool_node::finalize_decl (array);
1389 ubsan_vptr_type_cache_decl = array;
1392 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
1393 BIT_AND_EXPR, hash,
1394 build_int_cst (pointer_sized_int_node, 127));
1395 gimple_set_location (g, loc);
1396 gsi_insert_before (gsip, g, GSI_SAME_STMT);
1398 tree c = build4_loc (loc, ARRAY_REF, pointer_sized_int_node,
1399 ubsan_vptr_type_cache_decl, gimple_assign_lhs (g),
1400 NULL_TREE, NULL_TREE);
1401 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
1402 ARRAY_REF, c);
1403 gimple_set_location (g, loc);
1404 gsi_insert_before (gsip, g, GSI_SAME_STMT);
1406 basic_block then_bb, fallthru2_bb;
1407 gimple_stmt_iterator cond_insert_point
1408 = create_cond_insert_point (gsip, false, false, true,
1409 &then_bb, &fallthru2_bb);
1410 g = gimple_build_cond (NE_EXPR, gimple_assign_lhs (g), hash,
1411 NULL_TREE, NULL_TREE);
1412 gimple_set_location (g, loc);
1413 gsi_insert_after (&cond_insert_point, g, GSI_NEW_STMT);
1414 *gsip = gsi_after_labels (then_bb);
1415 if (fallthru_bb == NULL)
1416 fallthru_bb = fallthru2_bb;
1418 tree data
1419 = ubsan_create_data ("__ubsan_vptr_data", 1, &loc,
1420 ubsan_type_descriptor (type), NULL_TREE, ti_decl_addr,
1421 build_int_cst (unsigned_char_type_node, ckind),
1422 NULL_TREE);
1423 data = build_fold_addr_expr_loc (loc, data);
1424 enum built_in_function bcode
1425 = (flag_sanitize_recover & SANITIZE_VPTR)
1426 ? BUILT_IN_UBSAN_HANDLE_DYNAMIC_TYPE_CACHE_MISS
1427 : BUILT_IN_UBSAN_HANDLE_DYNAMIC_TYPE_CACHE_MISS_ABORT;
1429 g = gimple_build_call (builtin_decl_explicit (bcode), 3, data, op, hash);
1430 gimple_set_location (g, loc);
1431 gsi_insert_before (gsip, g, GSI_SAME_STMT);
1433 /* Point GSI to next logical statement. */
1434 *gsip = gsi_start_bb (fallthru_bb);
1436 /* Get rid of the UBSAN_VPTR call from the IR. */
1437 unlink_stmt_vdef (stmt);
1438 gsi_remove (&gsi, true);
1439 return true;
1442 /* Instrument a memory reference. BASE is the base of MEM, IS_LHS says
1443 whether the pointer is on the left hand side of the assignment. */
1445 static void
1446 instrument_mem_ref (tree mem, tree base, gimple_stmt_iterator *iter,
1447 bool is_lhs)
1449 enum ubsan_null_ckind ikind = is_lhs ? UBSAN_STORE_OF : UBSAN_LOAD_OF;
1450 unsigned int align = 0;
1451 if (sanitize_flags_p (SANITIZE_ALIGNMENT))
1453 align = min_align_of_type (TREE_TYPE (base));
1454 if (align <= 1)
1455 align = 0;
1457 if (align == 0)
1459 if (!sanitize_flags_p (SANITIZE_NULL))
1460 return;
1461 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1462 if (!ADDR_SPACE_GENERIC_P (as)
1463 && targetm.addr_space.zero_address_valid (as))
1464 return;
1466 tree t = TREE_OPERAND (base, 0);
1467 if (!POINTER_TYPE_P (TREE_TYPE (t)))
1468 return;
1469 if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (base)) && mem != base)
1470 ikind = UBSAN_MEMBER_ACCESS;
1471 tree kind = build_int_cst (build_pointer_type (TREE_TYPE (base)), ikind);
1472 tree alignt = build_int_cst (pointer_sized_int_node, align);
1473 gcall *g = gimple_build_call_internal (IFN_UBSAN_NULL, 3, t, kind, alignt);
1474 gimple_set_location (g, gimple_location (gsi_stmt (*iter)));
1475 gsi_safe_insert_before (iter, g);
1478 /* Perform the pointer instrumentation. */
1480 static void
1481 instrument_null (gimple_stmt_iterator gsi, tree t, bool is_lhs)
1483 /* Handle also e.g. &s->i. */
1484 if (TREE_CODE (t) == ADDR_EXPR)
1485 t = TREE_OPERAND (t, 0);
1486 tree base = get_base_address (t);
1487 if (base != NULL_TREE
1488 && TREE_CODE (base) == MEM_REF
1489 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
1490 instrument_mem_ref (t, base, &gsi, is_lhs);
1493 /* Instrument pointer arithmetics PTR p+ OFF. */
1495 static void
1496 instrument_pointer_overflow (gimple_stmt_iterator *gsi, tree ptr, tree off)
1498 if (TYPE_PRECISION (sizetype) != POINTER_SIZE)
1499 return;
1500 gcall *g = gimple_build_call_internal (IFN_UBSAN_PTR, 2, ptr, off);
1501 gimple_set_location (g, gimple_location (gsi_stmt (*gsi)));
1502 gsi_safe_insert_before (gsi, g);
1505 /* Instrument pointer arithmetics if any. */
1507 static void
1508 maybe_instrument_pointer_overflow (gimple_stmt_iterator *gsi, tree t)
1510 if (TYPE_PRECISION (sizetype) != POINTER_SIZE)
1511 return;
1513 /* Handle also e.g. &s->i. */
1514 if (TREE_CODE (t) == ADDR_EXPR)
1515 t = TREE_OPERAND (t, 0);
1517 if (!handled_component_p (t) && TREE_CODE (t) != MEM_REF)
1518 return;
1520 poly_int64 bitsize, bitpos, bytepos;
1521 tree offset;
1522 machine_mode mode;
1523 int volatilep = 0, reversep, unsignedp = 0;
1524 tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset, &mode,
1525 &unsignedp, &reversep, &volatilep);
1526 tree moff = NULL_TREE;
1528 bool decl_p = DECL_P (inner);
1529 tree base;
1530 if (decl_p)
1532 if ((VAR_P (inner)
1533 || TREE_CODE (inner) == PARM_DECL
1534 || TREE_CODE (inner) == RESULT_DECL)
1535 && DECL_REGISTER (inner))
1536 return;
1537 base = inner;
1538 /* If BASE is a fixed size automatic variable or
1539 global variable defined in the current TU and bitpos
1540 fits, don't instrument anything. */
1541 poly_int64 base_size;
1542 if (offset == NULL_TREE
1543 && maybe_ne (bitpos, 0)
1544 && (VAR_P (base)
1545 || TREE_CODE (base) == PARM_DECL
1546 || TREE_CODE (base) == RESULT_DECL)
1547 && poly_int_tree_p (DECL_SIZE (base), &base_size)
1548 && known_ge (base_size, bitpos)
1549 && (!is_global_var (base) || decl_binds_to_current_def_p (base)))
1550 return;
1552 else if (TREE_CODE (inner) == MEM_REF)
1554 base = TREE_OPERAND (inner, 0);
1555 if (TREE_CODE (base) == ADDR_EXPR
1556 && DECL_P (TREE_OPERAND (base, 0))
1557 && !TREE_ADDRESSABLE (TREE_OPERAND (base, 0))
1558 && !is_global_var (TREE_OPERAND (base, 0)))
1559 return;
1560 moff = TREE_OPERAND (inner, 1);
1561 if (integer_zerop (moff))
1562 moff = NULL_TREE;
1564 else
1565 return;
1567 if (!POINTER_TYPE_P (TREE_TYPE (base)) && !DECL_P (base))
1568 return;
1569 bytepos = bits_to_bytes_round_down (bitpos);
1570 if (offset == NULL_TREE && known_eq (bytepos, 0) && moff == NULL_TREE)
1571 return;
1573 tree base_addr = base;
1574 if (decl_p)
1575 base_addr = build1 (ADDR_EXPR,
1576 build_pointer_type (TREE_TYPE (base)), base);
1577 t = offset;
1578 if (maybe_ne (bytepos, 0))
1580 if (t)
1581 t = fold_build2 (PLUS_EXPR, TREE_TYPE (t), t,
1582 build_int_cst (TREE_TYPE (t), bytepos));
1583 else
1584 t = size_int (bytepos);
1586 if (moff)
1588 if (t)
1589 t = fold_build2 (PLUS_EXPR, TREE_TYPE (t), t,
1590 fold_convert (TREE_TYPE (t), moff));
1591 else
1592 t = fold_convert (sizetype, moff);
1594 gimple_seq seq, this_seq;
1595 t = force_gimple_operand (t, &seq, true, NULL_TREE);
1596 base_addr = force_gimple_operand (base_addr, &this_seq, true, NULL_TREE);
1597 gimple_seq_add_seq_without_update (&seq, this_seq);
1598 gsi_safe_insert_seq_before (gsi, seq);
1599 instrument_pointer_overflow (gsi, base_addr, t);
1602 /* Build an ubsan builtin call for the signed-integer-overflow
1603 sanitization. CODE says what kind of builtin are we building,
1604 LOC is a location, LHSTYPE is the type of LHS, OP0 and OP1
1605 are operands of the binary operation. */
1607 tree
1608 ubsan_build_overflow_builtin (tree_code code, location_t loc, tree lhstype,
1609 tree op0, tree op1, tree *datap)
1611 if (flag_sanitize_trap & SANITIZE_SI_OVERFLOW)
1612 return build_call_expr_loc (loc, builtin_decl_explicit (BUILT_IN_TRAP), 0);
1614 tree data;
1615 if (datap && *datap)
1616 data = *datap;
1617 else
1618 data = ubsan_create_data ("__ubsan_overflow_data", 1, &loc,
1619 ubsan_type_descriptor (lhstype), NULL_TREE,
1620 NULL_TREE);
1621 if (datap)
1622 *datap = data;
1623 enum built_in_function fn_code;
1625 switch (code)
1627 case PLUS_EXPR:
1628 fn_code = (flag_sanitize_recover & SANITIZE_SI_OVERFLOW)
1629 ? BUILT_IN_UBSAN_HANDLE_ADD_OVERFLOW
1630 : BUILT_IN_UBSAN_HANDLE_ADD_OVERFLOW_ABORT;
1631 break;
1632 case MINUS_EXPR:
1633 fn_code = (flag_sanitize_recover & SANITIZE_SI_OVERFLOW)
1634 ? BUILT_IN_UBSAN_HANDLE_SUB_OVERFLOW
1635 : BUILT_IN_UBSAN_HANDLE_SUB_OVERFLOW_ABORT;
1636 break;
1637 case MULT_EXPR:
1638 fn_code = (flag_sanitize_recover & SANITIZE_SI_OVERFLOW)
1639 ? BUILT_IN_UBSAN_HANDLE_MUL_OVERFLOW
1640 : BUILT_IN_UBSAN_HANDLE_MUL_OVERFLOW_ABORT;
1641 break;
1642 case NEGATE_EXPR:
1643 fn_code = (flag_sanitize_recover & SANITIZE_SI_OVERFLOW)
1644 ? BUILT_IN_UBSAN_HANDLE_NEGATE_OVERFLOW
1645 : BUILT_IN_UBSAN_HANDLE_NEGATE_OVERFLOW_ABORT;
1646 break;
1647 default:
1648 gcc_unreachable ();
1650 tree fn = builtin_decl_explicit (fn_code);
1651 return build_call_expr_loc (loc, fn, 2 + (code != NEGATE_EXPR),
1652 build_fold_addr_expr_loc (loc, data),
1653 ubsan_encode_value (op0, UBSAN_ENCODE_VALUE_RTL),
1655 ? ubsan_encode_value (op1,
1656 UBSAN_ENCODE_VALUE_RTL)
1657 : NULL_TREE);
1660 /* Perform the signed integer instrumentation. GSI is the iterator
1661 pointing at statement we are trying to instrument. */
1663 static void
1664 instrument_si_overflow (gimple_stmt_iterator gsi)
1666 gimple *stmt = gsi_stmt (gsi);
1667 tree_code code = gimple_assign_rhs_code (stmt);
1668 tree lhs = gimple_assign_lhs (stmt);
1669 tree lhstype = TREE_TYPE (lhs);
1670 tree lhsinner = VECTOR_TYPE_P (lhstype) ? TREE_TYPE (lhstype) : lhstype;
1671 tree a, b;
1672 gimple *g;
1674 /* If this is not a signed operation, don't instrument anything here.
1675 Also punt on bit-fields. */
1676 if (!INTEGRAL_TYPE_P (lhsinner)
1677 || TYPE_OVERFLOW_WRAPS (lhsinner)
1678 || (TREE_CODE (lhsinner) != BITINT_TYPE
1679 && maybe_ne (GET_MODE_BITSIZE (TYPE_MODE (lhsinner)),
1680 TYPE_PRECISION (lhsinner))))
1681 return;
1683 switch (code)
1685 case MINUS_EXPR:
1686 case PLUS_EXPR:
1687 case MULT_EXPR:
1688 /* Transform
1689 i = u {+,-,*} 5;
1690 into
1691 i = UBSAN_CHECK_{ADD,SUB,MUL} (u, 5); */
1692 a = gimple_assign_rhs1 (stmt);
1693 b = gimple_assign_rhs2 (stmt);
1694 g = gimple_build_call_internal (code == PLUS_EXPR
1695 ? IFN_UBSAN_CHECK_ADD
1696 : code == MINUS_EXPR
1697 ? IFN_UBSAN_CHECK_SUB
1698 : IFN_UBSAN_CHECK_MUL, 2, a, b);
1699 gimple_call_set_lhs (g, lhs);
1700 gsi_replace (&gsi, g, true);
1701 break;
1702 case NEGATE_EXPR:
1703 /* Represent i = -u;
1705 i = UBSAN_CHECK_SUB (0, u); */
1706 a = build_zero_cst (lhstype);
1707 b = gimple_assign_rhs1 (stmt);
1708 g = gimple_build_call_internal (IFN_UBSAN_CHECK_SUB, 2, a, b);
1709 gimple_call_set_lhs (g, lhs);
1710 gsi_replace (&gsi, g, true);
1711 break;
1712 case ABS_EXPR:
1713 /* Transform i = ABS_EXPR<u>;
1714 into
1715 _N = UBSAN_CHECK_SUB (0, u);
1716 i = ABS_EXPR<_N>; */
1717 a = build_zero_cst (lhstype);
1718 b = gimple_assign_rhs1 (stmt);
1719 g = gimple_build_call_internal (IFN_UBSAN_CHECK_SUB, 2, a, b);
1720 a = make_ssa_name (lhstype);
1721 gimple_call_set_lhs (g, a);
1722 gimple_set_location (g, gimple_location (stmt));
1723 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
1724 gimple_assign_set_rhs1 (stmt, a);
1725 update_stmt (stmt);
1726 break;
1727 default:
1728 break;
1732 /* Instrument loads from (non-bitfield) bool and C++ enum values
1733 to check if the memory value is outside of the range of the valid
1734 type values. */
1736 static void
1737 instrument_bool_enum_load (gimple_stmt_iterator *gsi)
1739 gimple *stmt = gsi_stmt (*gsi);
1740 tree rhs = gimple_assign_rhs1 (stmt);
1741 tree type = TREE_TYPE (rhs);
1742 tree minv = NULL_TREE, maxv = NULL_TREE;
1744 if (TREE_CODE (type) == BOOLEAN_TYPE
1745 && sanitize_flags_p (SANITIZE_BOOL))
1747 minv = boolean_false_node;
1748 maxv = boolean_true_node;
1750 else if (TREE_CODE (type) == ENUMERAL_TYPE
1751 && sanitize_flags_p (SANITIZE_ENUM)
1752 && TREE_TYPE (type) != NULL_TREE
1753 && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
1754 && (TYPE_PRECISION (TREE_TYPE (type))
1755 < GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (type))))
1757 minv = TYPE_MIN_VALUE (TREE_TYPE (type));
1758 maxv = TYPE_MAX_VALUE (TREE_TYPE (type));
1760 else
1761 return;
1763 int modebitsize = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type));
1764 poly_int64 bitsize, bitpos;
1765 tree offset;
1766 machine_mode mode;
1767 int volatilep = 0, reversep, unsignedp = 0;
1768 tree base = get_inner_reference (rhs, &bitsize, &bitpos, &offset, &mode,
1769 &unsignedp, &reversep, &volatilep);
1770 tree utype = build_nonstandard_integer_type (modebitsize, 1);
1772 if ((VAR_P (base) && DECL_HARD_REGISTER (base))
1773 || !multiple_p (bitpos, modebitsize)
1774 || maybe_ne (bitsize, modebitsize)
1775 || GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (utype)) != modebitsize
1776 || TREE_CODE (gimple_assign_lhs (stmt)) != SSA_NAME)
1777 return;
1779 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (rhs));
1780 if (as != TYPE_ADDR_SPACE (utype))
1781 utype = build_qualified_type (utype, TYPE_QUALS (utype)
1782 | ENCODE_QUAL_ADDR_SPACE (as));
1783 bool ends_bb = stmt_ends_bb_p (stmt);
1784 location_t loc = gimple_location (stmt);
1785 tree lhs = gimple_assign_lhs (stmt);
1786 tree ptype = build_pointer_type (TREE_TYPE (rhs));
1787 tree atype = reference_alias_ptr_type (rhs);
1788 gimple *g = gimple_build_assign (make_ssa_name (ptype),
1789 build_fold_addr_expr (rhs));
1790 gimple_set_location (g, loc);
1791 gsi_insert_before (gsi, g, GSI_SAME_STMT);
1792 tree mem = build2 (MEM_REF, utype, gimple_assign_lhs (g),
1793 build_int_cst (atype, 0));
1794 tree urhs = make_ssa_name (utype);
1795 if (ends_bb)
1797 gimple_assign_set_lhs (stmt, urhs);
1798 g = gimple_build_assign (lhs, NOP_EXPR, urhs);
1799 gimple_set_location (g, loc);
1800 edge e = find_fallthru_edge (gimple_bb (stmt)->succs);
1801 gsi_insert_on_edge_immediate (e, g);
1802 gimple_assign_set_rhs_from_tree (gsi, mem);
1803 update_stmt (stmt);
1804 *gsi = gsi_for_stmt (g);
1805 g = stmt;
1807 else
1809 g = gimple_build_assign (urhs, mem);
1810 gimple_set_location (g, loc);
1811 gsi_insert_before (gsi, g, GSI_SAME_STMT);
1813 minv = fold_convert (utype, minv);
1814 maxv = fold_convert (utype, maxv);
1815 if (!integer_zerop (minv))
1817 g = gimple_build_assign (make_ssa_name (utype), MINUS_EXPR, urhs, minv);
1818 gimple_set_location (g, loc);
1819 gsi_insert_before (gsi, g, GSI_SAME_STMT);
1822 gimple_stmt_iterator gsi2 = *gsi;
1823 basic_block then_bb, fallthru_bb;
1824 *gsi = create_cond_insert_point (gsi, true, false, true,
1825 &then_bb, &fallthru_bb);
1826 g = gimple_build_cond (GT_EXPR, gimple_assign_lhs (g),
1827 int_const_binop (MINUS_EXPR, maxv, minv),
1828 NULL_TREE, NULL_TREE);
1829 gimple_set_location (g, loc);
1830 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1832 if (!ends_bb)
1834 gimple_assign_set_rhs_with_ops (&gsi2, NOP_EXPR, urhs);
1835 update_stmt (stmt);
1838 gsi2 = gsi_after_labels (then_bb);
1839 if (flag_sanitize_trap & (TREE_CODE (type) == BOOLEAN_TYPE
1840 ? SANITIZE_BOOL : SANITIZE_ENUM))
1841 g = gimple_build_call (builtin_decl_explicit (BUILT_IN_TRAP), 0);
1842 else
1844 tree data = ubsan_create_data ("__ubsan_invalid_value_data", 1, &loc,
1845 ubsan_type_descriptor (type), NULL_TREE,
1846 NULL_TREE);
1847 data = build_fold_addr_expr_loc (loc, data);
1848 enum built_in_function bcode
1849 = (flag_sanitize_recover & (TREE_CODE (type) == BOOLEAN_TYPE
1850 ? SANITIZE_BOOL : SANITIZE_ENUM))
1851 ? BUILT_IN_UBSAN_HANDLE_LOAD_INVALID_VALUE
1852 : BUILT_IN_UBSAN_HANDLE_LOAD_INVALID_VALUE_ABORT;
1853 tree fn = builtin_decl_explicit (bcode);
1855 tree val = ubsan_encode_value (urhs, UBSAN_ENCODE_VALUE_GIMPLE);
1856 val = force_gimple_operand_gsi (&gsi2, val, true, NULL_TREE, true,
1857 GSI_SAME_STMT);
1858 g = gimple_build_call (fn, 2, data, val);
1860 gimple_set_location (g, loc);
1861 gsi_insert_before (&gsi2, g, GSI_SAME_STMT);
1862 ubsan_create_edge (g);
1863 *gsi = gsi_for_stmt (stmt);
1866 /* Determine if we can propagate given LOCATION to ubsan_data descriptor to use
1867 new style handlers. Libubsan uses heuristics to destinguish between old and
1868 new styles and relies on these properties for filename:
1870 a) Location's filename must not be NULL.
1871 b) Location's filename must not be equal to "".
1872 c) Location's filename must not be equal to "\1".
1873 d) First two bytes of filename must not contain '\xff' symbol. */
1875 static bool
1876 ubsan_use_new_style_p (location_t loc)
1878 if (loc == UNKNOWN_LOCATION)
1879 return false;
1881 expanded_location xloc = expand_location (loc);
1882 if (xloc.file == NULL || startswith (xloc.file, "\1")
1883 || xloc.file[0] == '\0' || xloc.file[0] == '\xff'
1884 || xloc.file[1] == '\xff')
1885 return false;
1887 return true;
1890 /* Instrument float point-to-integer conversion. TYPE is an integer type of
1891 destination, EXPR is floating-point expression. */
1893 tree
1894 ubsan_instrument_float_cast (location_t loc, tree type, tree expr)
1896 tree expr_type = TREE_TYPE (expr);
1897 tree t, tt, fn, min, max;
1898 machine_mode mode = TYPE_MODE (expr_type);
1899 int prec = TYPE_PRECISION (type);
1900 bool uns_p = TYPE_UNSIGNED (type);
1901 if (loc == UNKNOWN_LOCATION)
1902 loc = input_location;
1904 /* Float to integer conversion first truncates toward zero, so
1905 even signed char c = 127.875f; is not problematic.
1906 Therefore, we should complain only if EXPR is unordered or smaller
1907 or equal than TYPE_MIN_VALUE - 1.0 or greater or equal than
1908 TYPE_MAX_VALUE + 1.0. */
1909 if (REAL_MODE_FORMAT (mode)->b == 2)
1911 /* For maximum, TYPE_MAX_VALUE might not be representable
1912 in EXPR_TYPE, e.g. if TYPE is 64-bit long long and
1913 EXPR_TYPE is IEEE single float, but TYPE_MAX_VALUE + 1.0 is
1914 either representable or infinity. */
1915 REAL_VALUE_TYPE maxval = dconst1;
1916 SET_REAL_EXP (&maxval, REAL_EXP (&maxval) + prec - !uns_p);
1917 real_convert (&maxval, mode, &maxval);
1918 max = build_real (expr_type, maxval);
1920 /* For unsigned, assume -1.0 is always representable. */
1921 if (uns_p)
1922 min = build_minus_one_cst (expr_type);
1923 else
1925 /* TYPE_MIN_VALUE is generally representable (or -inf),
1926 but TYPE_MIN_VALUE - 1.0 might not be. */
1927 REAL_VALUE_TYPE minval = dconstm1, minval2;
1928 SET_REAL_EXP (&minval, REAL_EXP (&minval) + prec - 1);
1929 real_convert (&minval, mode, &minval);
1930 real_arithmetic (&minval2, MINUS_EXPR, &minval, &dconst1);
1931 real_convert (&minval2, mode, &minval2);
1932 if (real_compare (EQ_EXPR, &minval, &minval2)
1933 && !real_isinf (&minval))
1935 /* If TYPE_MIN_VALUE - 1.0 is not representable and
1936 rounds to TYPE_MIN_VALUE, we need to subtract
1937 more. As REAL_MODE_FORMAT (mode)->p is the number
1938 of base digits, we want to subtract a number that
1939 will be 1 << (REAL_MODE_FORMAT (mode)->p - 1)
1940 times smaller than minval. */
1941 minval2 = dconst1;
1942 gcc_assert (prec > REAL_MODE_FORMAT (mode)->p);
1943 SET_REAL_EXP (&minval2,
1944 REAL_EXP (&minval2) + prec - 1
1945 - REAL_MODE_FORMAT (mode)->p + 1);
1946 real_arithmetic (&minval2, MINUS_EXPR, &minval, &minval2);
1947 real_convert (&minval2, mode, &minval2);
1949 min = build_real (expr_type, minval2);
1952 else if (REAL_MODE_FORMAT (mode)->b == 10)
1954 /* For _Decimal128 up to 34 decimal digits, - sign,
1955 dot, e, exponent. */
1956 char buf[64];
1957 int p = REAL_MODE_FORMAT (mode)->p;
1958 REAL_VALUE_TYPE maxval, minval;
1960 /* Use mpfr_snprintf rounding to compute the smallest
1961 representable decimal number greater or equal than
1962 1 << (prec - !uns_p). */
1963 auto_mpfr m (prec + 2);
1964 mpfr_set_ui_2exp (m, 1, prec - !uns_p, MPFR_RNDN);
1965 mpfr_snprintf (buf, sizeof buf, "%.*RUe", p - 1, (mpfr_srcptr) m);
1966 decimal_real_from_string (&maxval, buf);
1967 max = build_real (expr_type, maxval);
1969 /* For unsigned, assume -1.0 is always representable. */
1970 if (uns_p)
1971 min = build_minus_one_cst (expr_type);
1972 else
1974 /* Use mpfr_snprintf rounding to compute the largest
1975 representable decimal number less or equal than
1976 (-1 << (prec - 1)) - 1. */
1977 mpfr_set_si_2exp (m, -1, prec - 1, MPFR_RNDN);
1978 mpfr_sub_ui (m, m, 1, MPFR_RNDN);
1979 mpfr_snprintf (buf, sizeof buf, "%.*RDe", p - 1, (mpfr_srcptr) m);
1980 decimal_real_from_string (&minval, buf);
1981 min = build_real (expr_type, minval);
1984 else
1985 return NULL_TREE;
1987 if (HONOR_NANS (mode))
1989 t = fold_build2 (UNLE_EXPR, boolean_type_node, expr, min);
1990 tt = fold_build2 (UNGE_EXPR, boolean_type_node, expr, max);
1992 else
1994 t = fold_build2 (LE_EXPR, boolean_type_node, expr, min);
1995 tt = fold_build2 (GE_EXPR, boolean_type_node, expr, max);
1997 t = fold_build2 (TRUTH_OR_EXPR, boolean_type_node, t, tt);
1998 if (integer_zerop (t))
1999 return NULL_TREE;
2001 if (flag_sanitize_trap & SANITIZE_FLOAT_CAST)
2002 fn = build_call_expr_loc (loc, builtin_decl_explicit (BUILT_IN_TRAP), 0);
2003 else
2005 location_t *loc_ptr = NULL;
2006 unsigned num_locations = 0;
2007 /* Figure out if we can propagate location to ubsan_data and use new
2008 style handlers in libubsan. */
2009 if (ubsan_use_new_style_p (loc))
2011 loc_ptr = &loc;
2012 num_locations = 1;
2014 /* Create the __ubsan_handle_float_cast_overflow fn call. */
2015 tree data = ubsan_create_data ("__ubsan_float_cast_overflow_data",
2016 num_locations, loc_ptr,
2017 ubsan_type_descriptor (expr_type),
2018 ubsan_type_descriptor (type), NULL_TREE,
2019 NULL_TREE);
2020 enum built_in_function bcode
2021 = (flag_sanitize_recover & SANITIZE_FLOAT_CAST)
2022 ? BUILT_IN_UBSAN_HANDLE_FLOAT_CAST_OVERFLOW
2023 : BUILT_IN_UBSAN_HANDLE_FLOAT_CAST_OVERFLOW_ABORT;
2024 fn = builtin_decl_explicit (bcode);
2025 fn = build_call_expr_loc (loc, fn, 2,
2026 build_fold_addr_expr_loc (loc, data),
2027 ubsan_encode_value (expr));
2030 return fold_build3 (COND_EXPR, void_type_node, t, fn, integer_zero_node);
2033 /* Instrument values passed to function arguments with nonnull attribute. */
2035 static void
2036 instrument_nonnull_arg (gimple_stmt_iterator *gsi)
2038 gimple *stmt = gsi_stmt (*gsi);
2039 location_t loc[2];
2040 /* infer_nonnull_range needs flag_delete_null_pointer_checks set,
2041 while for nonnull sanitization it is clear. */
2042 int save_flag_delete_null_pointer_checks = flag_delete_null_pointer_checks;
2043 flag_delete_null_pointer_checks = 1;
2044 loc[0] = gimple_location (stmt);
2045 loc[1] = UNKNOWN_LOCATION;
2046 for (unsigned int i = 0; i < gimple_call_num_args (stmt); i++)
2048 tree arg = gimple_call_arg (stmt, i);
2049 if (POINTER_TYPE_P (TREE_TYPE (arg))
2050 && infer_nonnull_range_by_attribute (stmt, arg))
2052 gimple *g;
2053 if (!is_gimple_val (arg))
2055 g = gimple_build_assign (make_ssa_name (TREE_TYPE (arg)), arg);
2056 gimple_set_location (g, loc[0]);
2057 gsi_safe_insert_before (gsi, g);
2058 arg = gimple_assign_lhs (g);
2061 basic_block then_bb, fallthru_bb;
2062 *gsi = create_cond_insert_point (gsi, true, false, true,
2063 &then_bb, &fallthru_bb);
2064 g = gimple_build_cond (EQ_EXPR, arg,
2065 build_zero_cst (TREE_TYPE (arg)),
2066 NULL_TREE, NULL_TREE);
2067 gimple_set_location (g, loc[0]);
2068 gsi_insert_after (gsi, g, GSI_NEW_STMT);
2070 *gsi = gsi_after_labels (then_bb);
2071 if (flag_sanitize_trap & SANITIZE_NONNULL_ATTRIBUTE)
2072 g = gimple_build_call (builtin_decl_explicit (BUILT_IN_TRAP), 0);
2073 else
2075 tree data = ubsan_create_data ("__ubsan_nonnull_arg_data",
2076 2, loc, NULL_TREE,
2077 build_int_cst (integer_type_node,
2078 i + 1),
2079 NULL_TREE);
2080 data = build_fold_addr_expr_loc (loc[0], data);
2081 enum built_in_function bcode
2082 = (flag_sanitize_recover & SANITIZE_NONNULL_ATTRIBUTE)
2083 ? BUILT_IN_UBSAN_HANDLE_NONNULL_ARG
2084 : BUILT_IN_UBSAN_HANDLE_NONNULL_ARG_ABORT;
2085 tree fn = builtin_decl_explicit (bcode);
2087 g = gimple_build_call (fn, 1, data);
2089 gimple_set_location (g, loc[0]);
2090 gsi_safe_insert_before (gsi, g);
2091 ubsan_create_edge (g);
2093 *gsi = gsi_for_stmt (stmt);
2095 flag_delete_null_pointer_checks = save_flag_delete_null_pointer_checks;
2098 /* Instrument returns in functions with returns_nonnull attribute. */
2100 static void
2101 instrument_nonnull_return (gimple_stmt_iterator *gsi)
2103 greturn *stmt = as_a <greturn *> (gsi_stmt (*gsi));
2104 location_t loc[2];
2105 tree arg = gimple_return_retval (stmt);
2106 /* infer_nonnull_range needs flag_delete_null_pointer_checks set,
2107 while for nonnull return sanitization it is clear. */
2108 int save_flag_delete_null_pointer_checks = flag_delete_null_pointer_checks;
2109 flag_delete_null_pointer_checks = 1;
2110 loc[0] = gimple_location (stmt);
2111 loc[1] = UNKNOWN_LOCATION;
2112 if (arg
2113 && POINTER_TYPE_P (TREE_TYPE (arg))
2114 && is_gimple_val (arg)
2115 && infer_nonnull_range_by_attribute (stmt, arg))
2117 basic_block then_bb, fallthru_bb;
2118 *gsi = create_cond_insert_point (gsi, true, false, true,
2119 &then_bb, &fallthru_bb);
2120 gimple *g = gimple_build_cond (EQ_EXPR, arg,
2121 build_zero_cst (TREE_TYPE (arg)),
2122 NULL_TREE, NULL_TREE);
2123 gimple_set_location (g, loc[0]);
2124 gsi_insert_after (gsi, g, GSI_NEW_STMT);
2126 *gsi = gsi_after_labels (then_bb);
2127 if (flag_sanitize_trap & SANITIZE_RETURNS_NONNULL_ATTRIBUTE)
2128 g = gimple_build_call (builtin_decl_explicit (BUILT_IN_TRAP), 0);
2129 else
2131 tree data = ubsan_create_data ("__ubsan_nonnull_return_data",
2132 1, &loc[1], NULL_TREE, NULL_TREE);
2133 data = build_fold_addr_expr_loc (loc[0], data);
2134 tree data2 = ubsan_create_data ("__ubsan_nonnull_return_data",
2135 1, &loc[0], NULL_TREE, NULL_TREE);
2136 data2 = build_fold_addr_expr_loc (loc[0], data2);
2137 enum built_in_function bcode
2138 = (flag_sanitize_recover & SANITIZE_RETURNS_NONNULL_ATTRIBUTE)
2139 ? BUILT_IN_UBSAN_HANDLE_NONNULL_RETURN_V1
2140 : BUILT_IN_UBSAN_HANDLE_NONNULL_RETURN_V1_ABORT;
2141 tree fn = builtin_decl_explicit (bcode);
2143 g = gimple_build_call (fn, 2, data, data2);
2145 gimple_set_location (g, loc[0]);
2146 gsi_safe_insert_before (gsi, g);
2147 ubsan_create_edge (g);
2148 *gsi = gsi_for_stmt (stmt);
2150 flag_delete_null_pointer_checks = save_flag_delete_null_pointer_checks;
2153 /* Instrument memory references. Here we check whether the pointer
2154 points to an out-of-bounds location. */
2156 static void
2157 instrument_object_size (gimple_stmt_iterator *gsi, tree t, bool is_lhs)
2159 gimple *stmt = gsi_stmt (*gsi);
2160 location_t loc = gimple_location (stmt);
2161 tree type;
2162 tree index = NULL_TREE;
2163 HOST_WIDE_INT size_in_bytes;
2165 type = TREE_TYPE (t);
2166 if (VOID_TYPE_P (type))
2167 return;
2169 switch (TREE_CODE (t))
2171 case COMPONENT_REF:
2172 if (TREE_CODE (t) == COMPONENT_REF
2173 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)) != NULL_TREE)
2175 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1));
2176 t = build3 (COMPONENT_REF, TREE_TYPE (repr), TREE_OPERAND (t, 0),
2177 repr, TREE_OPERAND (t, 2));
2179 break;
2180 case ARRAY_REF:
2181 index = TREE_OPERAND (t, 1);
2182 break;
2183 case INDIRECT_REF:
2184 case MEM_REF:
2185 case VAR_DECL:
2186 case PARM_DECL:
2187 case RESULT_DECL:
2188 break;
2189 default:
2190 return;
2193 size_in_bytes = int_size_in_bytes (type);
2194 if (size_in_bytes <= 0)
2195 return;
2197 poly_int64 bitsize, bitpos;
2198 tree offset;
2199 machine_mode mode;
2200 int volatilep = 0, reversep, unsignedp = 0;
2201 tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset, &mode,
2202 &unsignedp, &reversep, &volatilep);
2204 if (!multiple_p (bitpos, BITS_PER_UNIT)
2205 || maybe_ne (bitsize, size_in_bytes * BITS_PER_UNIT))
2206 return;
2208 bool decl_p = DECL_P (inner);
2209 tree base;
2210 if (decl_p)
2212 if ((VAR_P (inner)
2213 || TREE_CODE (inner) == PARM_DECL
2214 || TREE_CODE (inner) == RESULT_DECL)
2215 && DECL_REGISTER (inner))
2216 return;
2217 if (t == inner && !is_global_var (t))
2218 return;
2219 base = inner;
2221 else if (TREE_CODE (inner) == MEM_REF)
2222 base = TREE_OPERAND (inner, 0);
2223 else
2224 return;
2225 tree ptr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (t)), t);
2227 while (TREE_CODE (base) == SSA_NAME)
2229 gimple *def_stmt = SSA_NAME_DEF_STMT (base);
2230 if (gimple_assign_ssa_name_copy_p (def_stmt)
2231 || (gimple_assign_cast_p (def_stmt)
2232 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (def_stmt))))
2233 || (is_gimple_assign (def_stmt)
2234 && gimple_assign_rhs_code (def_stmt) == POINTER_PLUS_EXPR))
2236 tree rhs1 = gimple_assign_rhs1 (def_stmt);
2237 if (TREE_CODE (rhs1) == SSA_NAME
2238 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1))
2239 break;
2240 else
2241 base = rhs1;
2243 else
2244 break;
2247 if (!POINTER_TYPE_P (TREE_TYPE (base)) && !DECL_P (base))
2248 return;
2250 tree sizet;
2251 tree base_addr = base;
2252 gimple *bos_stmt = NULL;
2253 gimple_seq seq = NULL;
2254 if (decl_p)
2255 base_addr = build1 (ADDR_EXPR,
2256 build_pointer_type (TREE_TYPE (base)), base);
2257 if (compute_builtin_object_size (base_addr, OST_DYNAMIC, &sizet))
2259 else if (optimize)
2261 if (LOCATION_LOCUS (loc) == UNKNOWN_LOCATION)
2262 loc = input_location;
2263 /* Generate __builtin_dynamic_object_size call. */
2264 sizet = builtin_decl_explicit (BUILT_IN_DYNAMIC_OBJECT_SIZE);
2265 sizet = build_call_expr_loc (loc, sizet, 2, base_addr,
2266 integer_zero_node);
2267 sizet = force_gimple_operand (sizet, &seq, false, NULL_TREE);
2268 /* If the call above didn't end up being an integer constant, go one
2269 statement back and get the __builtin_object_size stmt. Save it,
2270 we might need it later. */
2271 if (SSA_VAR_P (sizet))
2272 bos_stmt = gsi_stmt (gsi_last (seq));
2274 else
2275 return;
2277 /* Generate UBSAN_OBJECT_SIZE (ptr, ptr+sizeof(*ptr)-base, objsize, ckind)
2278 call. */
2279 /* ptr + sizeof (*ptr) - base */
2280 t = fold_build2 (MINUS_EXPR, sizetype,
2281 fold_convert (pointer_sized_int_node, ptr),
2282 fold_convert (pointer_sized_int_node, base_addr));
2283 t = fold_build2 (PLUS_EXPR, sizetype, t, TYPE_SIZE_UNIT (type));
2285 /* Perhaps we can omit the check. */
2286 if (TREE_CODE (t) == INTEGER_CST
2287 && TREE_CODE (sizet) == INTEGER_CST
2288 && tree_int_cst_le (t, sizet))
2289 return;
2291 if (index != NULL_TREE
2292 && TREE_CODE (index) == SSA_NAME
2293 && TREE_CODE (sizet) == INTEGER_CST)
2295 gimple *def = SSA_NAME_DEF_STMT (index);
2296 if (is_gimple_assign (def)
2297 && gimple_assign_rhs_code (def) == BIT_AND_EXPR
2298 && TREE_CODE (gimple_assign_rhs2 (def)) == INTEGER_CST)
2300 tree cst = gimple_assign_rhs2 (def);
2301 tree sz = fold_build2 (EXACT_DIV_EXPR, sizetype, sizet,
2302 TYPE_SIZE_UNIT (type));
2303 if (tree_int_cst_sgn (cst) >= 0
2304 && tree_int_cst_lt (cst, sz))
2305 return;
2309 if (DECL_P (base)
2310 && decl_function_context (base) == current_function_decl
2311 && !TREE_ADDRESSABLE (base))
2312 mark_addressable (base);
2314 /* We have to emit the check. */
2315 gimple_seq this_seq;
2316 t = force_gimple_operand (t, &this_seq, true, NULL_TREE);
2317 gimple_seq_add_seq_without_update (&seq, this_seq);
2318 ptr = force_gimple_operand (ptr, &this_seq, true, NULL_TREE);
2319 gimple_seq_add_seq_without_update (&seq, this_seq);
2320 gsi_safe_insert_seq_before (gsi, seq);
2322 if (bos_stmt
2323 && gimple_call_builtin_p (bos_stmt, BUILT_IN_DYNAMIC_OBJECT_SIZE))
2324 ubsan_create_edge (bos_stmt);
2326 tree ckind = build_int_cst (unsigned_char_type_node,
2327 is_lhs ? UBSAN_STORE_OF : UBSAN_LOAD_OF);
2328 gimple *g = gimple_build_call_internal (IFN_UBSAN_OBJECT_SIZE, 4,
2329 ptr, t, sizet, ckind);
2330 gimple_set_location (g, loc);
2331 gsi_safe_insert_before (gsi, g);
2334 /* Instrument values passed to builtin functions. */
2336 static void
2337 instrument_builtin (gimple_stmt_iterator *gsi)
2339 gimple *stmt = gsi_stmt (*gsi);
2340 location_t loc = gimple_location (stmt);
2341 tree arg;
2342 enum built_in_function fcode
2343 = DECL_FUNCTION_CODE (gimple_call_fndecl (stmt));
2344 int kind = 0;
2345 switch (fcode)
2347 CASE_INT_FN (BUILT_IN_CLZ):
2348 kind = 1;
2349 gcc_fallthrough ();
2350 CASE_INT_FN (BUILT_IN_CTZ):
2351 arg = gimple_call_arg (stmt, 0);
2352 if (!integer_nonzerop (arg))
2354 gimple *g;
2355 if (!is_gimple_val (arg))
2357 g = gimple_build_assign (make_ssa_name (TREE_TYPE (arg)), arg);
2358 gimple_set_location (g, loc);
2359 gsi_insert_before (gsi, g, GSI_SAME_STMT);
2360 arg = gimple_assign_lhs (g);
2363 basic_block then_bb, fallthru_bb;
2364 *gsi = create_cond_insert_point (gsi, true, false, true,
2365 &then_bb, &fallthru_bb);
2366 g = gimple_build_cond (EQ_EXPR, arg,
2367 build_zero_cst (TREE_TYPE (arg)),
2368 NULL_TREE, NULL_TREE);
2369 gimple_set_location (g, loc);
2370 gsi_insert_after (gsi, g, GSI_NEW_STMT);
2372 *gsi = gsi_after_labels (then_bb);
2373 if (flag_sanitize_trap & SANITIZE_BUILTIN)
2374 g = gimple_build_call (builtin_decl_explicit (BUILT_IN_TRAP), 0);
2375 else
2377 tree t = build_int_cst (unsigned_char_type_node, kind);
2378 tree data = ubsan_create_data ("__ubsan_builtin_data",
2379 1, &loc, NULL_TREE, t, NULL_TREE);
2380 data = build_fold_addr_expr_loc (loc, data);
2381 enum built_in_function bcode
2382 = (flag_sanitize_recover & SANITIZE_BUILTIN)
2383 ? BUILT_IN_UBSAN_HANDLE_INVALID_BUILTIN
2384 : BUILT_IN_UBSAN_HANDLE_INVALID_BUILTIN_ABORT;
2385 tree fn = builtin_decl_explicit (bcode);
2387 g = gimple_build_call (fn, 1, data);
2389 gimple_set_location (g, loc);
2390 gsi_insert_before (gsi, g, GSI_SAME_STMT);
2391 ubsan_create_edge (g);
2393 *gsi = gsi_for_stmt (stmt);
2394 break;
2395 default:
2396 break;
2400 namespace {
2402 const pass_data pass_data_ubsan =
2404 GIMPLE_PASS, /* type */
2405 "ubsan", /* name */
2406 OPTGROUP_NONE, /* optinfo_flags */
2407 TV_TREE_UBSAN, /* tv_id */
2408 ( PROP_cfg | PROP_ssa ), /* properties_required */
2409 0, /* properties_provided */
2410 0, /* properties_destroyed */
2411 0, /* todo_flags_start */
2412 TODO_update_ssa, /* todo_flags_finish */
2415 class pass_ubsan : public gimple_opt_pass
2417 public:
2418 pass_ubsan (gcc::context *ctxt)
2419 : gimple_opt_pass (pass_data_ubsan, ctxt)
2422 /* opt_pass methods: */
2423 bool gate (function *) final override
2425 return sanitize_flags_p ((SANITIZE_NULL | SANITIZE_SI_OVERFLOW
2426 | SANITIZE_BOOL | SANITIZE_ENUM
2427 | SANITIZE_ALIGNMENT
2428 | SANITIZE_NONNULL_ATTRIBUTE
2429 | SANITIZE_RETURNS_NONNULL_ATTRIBUTE
2430 | SANITIZE_OBJECT_SIZE
2431 | SANITIZE_POINTER_OVERFLOW
2432 | SANITIZE_BUILTIN));
2435 unsigned int execute (function *) final override;
2437 }; // class pass_ubsan
2439 unsigned int
2440 pass_ubsan::execute (function *fun)
2442 basic_block bb;
2443 gimple_stmt_iterator gsi;
2444 unsigned int ret = 0;
2446 initialize_sanitizer_builtins ();
2448 FOR_EACH_BB_FN (bb, fun)
2450 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
2452 gimple *stmt = gsi_stmt (gsi);
2453 if (is_gimple_debug (stmt) || gimple_clobber_p (stmt))
2455 gsi_next (&gsi);
2456 continue;
2459 if ((sanitize_flags_p (SANITIZE_SI_OVERFLOW, fun->decl))
2460 && is_gimple_assign (stmt))
2461 instrument_si_overflow (gsi);
2463 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT, fun->decl))
2465 if (gimple_store_p (stmt))
2466 instrument_null (gsi, gimple_get_lhs (stmt), true);
2467 if (gimple_assign_single_p (stmt))
2468 instrument_null (gsi, gimple_assign_rhs1 (stmt), false);
2469 if (is_gimple_call (stmt))
2471 unsigned args_num = gimple_call_num_args (stmt);
2472 for (unsigned i = 0; i < args_num; ++i)
2474 tree arg = gimple_call_arg (stmt, i);
2475 if (is_gimple_reg (arg) || is_gimple_min_invariant (arg))
2476 continue;
2477 instrument_null (gsi, arg, false);
2482 if (sanitize_flags_p (SANITIZE_BOOL | SANITIZE_ENUM, fun->decl)
2483 && gimple_assign_load_p (stmt))
2485 instrument_bool_enum_load (&gsi);
2486 bb = gimple_bb (stmt);
2489 if (sanitize_flags_p (SANITIZE_NONNULL_ATTRIBUTE, fun->decl)
2490 && is_gimple_call (stmt)
2491 && !gimple_call_internal_p (stmt))
2493 instrument_nonnull_arg (&gsi);
2494 bb = gimple_bb (stmt);
2497 if (sanitize_flags_p (SANITIZE_BUILTIN, fun->decl)
2498 && gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
2500 instrument_builtin (&gsi);
2501 bb = gimple_bb (stmt);
2504 if (sanitize_flags_p (SANITIZE_RETURNS_NONNULL_ATTRIBUTE, fun->decl)
2505 && gimple_code (stmt) == GIMPLE_RETURN)
2507 instrument_nonnull_return (&gsi);
2508 bb = gimple_bb (stmt);
2511 if (sanitize_flags_p (SANITIZE_OBJECT_SIZE, fun->decl))
2513 if (gimple_store_p (stmt))
2514 instrument_object_size (&gsi, gimple_get_lhs (stmt), true);
2515 if (gimple_assign_load_p (stmt))
2516 instrument_object_size (&gsi, gimple_assign_rhs1 (stmt),
2517 false);
2518 if (is_gimple_call (stmt))
2520 unsigned args_num = gimple_call_num_args (stmt);
2521 for (unsigned i = 0; i < args_num; ++i)
2523 tree arg = gimple_call_arg (stmt, i);
2524 if (is_gimple_reg (arg) || is_gimple_min_invariant (arg))
2525 continue;
2526 instrument_object_size (&gsi, arg, false);
2531 if (sanitize_flags_p (SANITIZE_POINTER_OVERFLOW, fun->decl))
2533 if (is_gimple_assign (stmt)
2534 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2535 instrument_pointer_overflow (&gsi,
2536 gimple_assign_rhs1 (stmt),
2537 gimple_assign_rhs2 (stmt));
2538 if (gimple_store_p (stmt))
2539 maybe_instrument_pointer_overflow (&gsi,
2540 gimple_get_lhs (stmt));
2541 if (gimple_assign_single_p (stmt))
2542 maybe_instrument_pointer_overflow (&gsi,
2543 gimple_assign_rhs1 (stmt));
2544 if (is_gimple_call (stmt))
2546 unsigned args_num = gimple_call_num_args (stmt);
2547 for (unsigned i = 0; i < args_num; ++i)
2549 tree arg = gimple_call_arg (stmt, i);
2550 if (is_gimple_reg (arg))
2551 continue;
2552 maybe_instrument_pointer_overflow (&gsi, arg);
2557 gsi_next (&gsi);
2559 if (gimple_purge_dead_eh_edges (bb))
2560 ret = TODO_cleanup_cfg;
2562 return ret;
2565 } // anon namespace
2567 gimple_opt_pass *
2568 make_pass_ubsan (gcc::context *ctxt)
2570 return new pass_ubsan (ctxt);
2573 #include "gt-ubsan.h"