* gcc.dg/vect/vect-outer-simd-1.c: Remove cleanup-tree-dump directive.
[official-gcc.git] / gcc / asan.c
blobed5bc9e382483b846f3a156688b36ade26e1c929
1 /* AddressSanitizer, a fast memory error detector.
2 Copyright (C) 2012-2015 Free Software Foundation, Inc.
3 Contributed by Kostya Serebryany <kcc@google.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "hash-set.h"
26 #include "machmode.h"
27 #include "vec.h"
28 #include "double-int.h"
29 #include "input.h"
30 #include "alias.h"
31 #include "symtab.h"
32 #include "options.h"
33 #include "wide-int.h"
34 #include "inchash.h"
35 #include "tree.h"
36 #include "fold-const.h"
37 #include "hash-table.h"
38 #include "predict.h"
39 #include "tm.h"
40 #include "hard-reg-set.h"
41 #include "function.h"
42 #include "dominance.h"
43 #include "cfg.h"
44 #include "cfganal.h"
45 #include "basic-block.h"
46 #include "tree-ssa-alias.h"
47 #include "internal-fn.h"
48 #include "gimple-expr.h"
49 #include "is-a.h"
50 #include "gimple.h"
51 #include "gimplify.h"
52 #include "gimple-iterator.h"
53 #include "calls.h"
54 #include "varasm.h"
55 #include "stor-layout.h"
56 #include "tree-iterator.h"
57 #include "hash-map.h"
58 #include "plugin-api.h"
59 #include "ipa-ref.h"
60 #include "cgraph.h"
61 #include "stringpool.h"
62 #include "tree-ssanames.h"
63 #include "tree-pass.h"
64 #include "asan.h"
65 #include "gimple-pretty-print.h"
66 #include "target.h"
67 #include "hashtab.h"
68 #include "rtl.h"
69 #include "flags.h"
70 #include "statistics.h"
71 #include "real.h"
72 #include "fixed-value.h"
73 #include "insn-config.h"
74 #include "expmed.h"
75 #include "dojump.h"
76 #include "explow.h"
77 #include "emit-rtl.h"
78 #include "stmt.h"
79 #include "expr.h"
80 #include "insn-codes.h"
81 #include "optabs.h"
82 #include "output.h"
83 #include "tm_p.h"
84 #include "langhooks.h"
85 #include "alloc-pool.h"
86 #include "cfgloop.h"
87 #include "gimple-builder.h"
88 #include "ubsan.h"
89 #include "params.h"
90 #include "builtins.h"
91 #include "fnmatch.h"
93 /* AddressSanitizer finds out-of-bounds and use-after-free bugs
94 with <2x slowdown on average.
96 The tool consists of two parts:
97 instrumentation module (this file) and a run-time library.
98 The instrumentation module adds a run-time check before every memory insn.
99 For a 8- or 16- byte load accessing address X:
100 ShadowAddr = (X >> 3) + Offset
101 ShadowValue = *(char*)ShadowAddr; // *(short*) for 16-byte access.
102 if (ShadowValue)
103 __asan_report_load8(X);
104 For a load of N bytes (N=1, 2 or 4) from address X:
105 ShadowAddr = (X >> 3) + Offset
106 ShadowValue = *(char*)ShadowAddr;
107 if (ShadowValue)
108 if ((X & 7) + N - 1 > ShadowValue)
109 __asan_report_loadN(X);
110 Stores are instrumented similarly, but using __asan_report_storeN functions.
111 A call too __asan_init_vN() is inserted to the list of module CTORs.
112 N is the version number of the AddressSanitizer API. The changes between the
113 API versions are listed in libsanitizer/asan/asan_interface_internal.h.
115 The run-time library redefines malloc (so that redzone are inserted around
116 the allocated memory) and free (so that reuse of free-ed memory is delayed),
117 provides __asan_report* and __asan_init_vN functions.
119 Read more:
120 http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm
122 The current implementation supports detection of out-of-bounds and
123 use-after-free in the heap, on the stack and for global variables.
125 [Protection of stack variables]
127 To understand how detection of out-of-bounds and use-after-free works
128 for stack variables, lets look at this example on x86_64 where the
129 stack grows downward:
132 foo ()
134 char a[23] = {0};
135 int b[2] = {0};
137 a[5] = 1;
138 b[1] = 2;
140 return a[5] + b[1];
143 For this function, the stack protected by asan will be organized as
144 follows, from the top of the stack to the bottom:
146 Slot 1/ [red zone of 32 bytes called 'RIGHT RedZone']
148 Slot 2/ [8 bytes of red zone, that adds up to the space of 'a' to make
149 the next slot be 32 bytes aligned; this one is called Partial
150 Redzone; this 32 bytes alignment is an asan constraint]
152 Slot 3/ [24 bytes for variable 'a']
154 Slot 4/ [red zone of 32 bytes called 'Middle RedZone']
156 Slot 5/ [24 bytes of Partial Red Zone (similar to slot 2]
158 Slot 6/ [8 bytes for variable 'b']
160 Slot 7/ [32 bytes of Red Zone at the bottom of the stack, called
161 'LEFT RedZone']
163 The 32 bytes of LEFT red zone at the bottom of the stack can be
164 decomposed as such:
166 1/ The first 8 bytes contain a magical asan number that is always
167 0x41B58AB3.
169 2/ The following 8 bytes contains a pointer to a string (to be
170 parsed at runtime by the runtime asan library), which format is
171 the following:
173 "<function-name> <space> <num-of-variables-on-the-stack>
174 (<32-bytes-aligned-offset-in-bytes-of-variable> <space>
175 <length-of-var-in-bytes> ){n} "
177 where '(...){n}' means the content inside the parenthesis occurs 'n'
178 times, with 'n' being the number of variables on the stack.
180 3/ The following 8 bytes contain the PC of the current function which
181 will be used by the run-time library to print an error message.
183 4/ The following 8 bytes are reserved for internal use by the run-time.
185 The shadow memory for that stack layout is going to look like this:
187 - content of shadow memory 8 bytes for slot 7: 0xF1F1F1F1.
188 The F1 byte pattern is a magic number called
189 ASAN_STACK_MAGIC_LEFT and is a way for the runtime to know that
190 the memory for that shadow byte is part of a the LEFT red zone
191 intended to seat at the bottom of the variables on the stack.
193 - content of shadow memory 8 bytes for slots 6 and 5:
194 0xF4F4F400. The F4 byte pattern is a magic number
195 called ASAN_STACK_MAGIC_PARTIAL. It flags the fact that the
196 memory region for this shadow byte is a PARTIAL red zone
197 intended to pad a variable A, so that the slot following
198 {A,padding} is 32 bytes aligned.
200 Note that the fact that the least significant byte of this
201 shadow memory content is 00 means that 8 bytes of its
202 corresponding memory (which corresponds to the memory of
203 variable 'b') is addressable.
205 - content of shadow memory 8 bytes for slot 4: 0xF2F2F2F2.
206 The F2 byte pattern is a magic number called
207 ASAN_STACK_MAGIC_MIDDLE. It flags the fact that the memory
208 region for this shadow byte is a MIDDLE red zone intended to
209 seat between two 32 aligned slots of {variable,padding}.
211 - content of shadow memory 8 bytes for slot 3 and 2:
212 0xF4000000. This represents is the concatenation of
213 variable 'a' and the partial red zone following it, like what we
214 had for variable 'b'. The least significant 3 bytes being 00
215 means that the 3 bytes of variable 'a' are addressable.
217 - content of shadow memory 8 bytes for slot 1: 0xF3F3F3F3.
218 The F3 byte pattern is a magic number called
219 ASAN_STACK_MAGIC_RIGHT. It flags the fact that the memory
220 region for this shadow byte is a RIGHT red zone intended to seat
221 at the top of the variables of the stack.
223 Note that the real variable layout is done in expand_used_vars in
224 cfgexpand.c. As far as Address Sanitizer is concerned, it lays out
225 stack variables as well as the different red zones, emits some
226 prologue code to populate the shadow memory as to poison (mark as
227 non-accessible) the regions of the red zones and mark the regions of
228 stack variables as accessible, and emit some epilogue code to
229 un-poison (mark as accessible) the regions of red zones right before
230 the function exits.
232 [Protection of global variables]
234 The basic idea is to insert a red zone between two global variables
235 and install a constructor function that calls the asan runtime to do
236 the populating of the relevant shadow memory regions at load time.
238 So the global variables are laid out as to insert a red zone between
239 them. The size of the red zones is so that each variable starts on a
240 32 bytes boundary.
242 Then a constructor function is installed so that, for each global
243 variable, it calls the runtime asan library function
244 __asan_register_globals_with an instance of this type:
246 struct __asan_global
248 // Address of the beginning of the global variable.
249 const void *__beg;
251 // Initial size of the global variable.
252 uptr __size;
254 // Size of the global variable + size of the red zone. This
255 // size is 32 bytes aligned.
256 uptr __size_with_redzone;
258 // Name of the global variable.
259 const void *__name;
261 // Name of the module where the global variable is declared.
262 const void *__module_name;
264 // 1 if it has dynamic initialization, 0 otherwise.
265 uptr __has_dynamic_init;
267 // A pointer to struct that contains source location, could be NULL.
268 __asan_global_source_location *__location;
271 A destructor function that calls the runtime asan library function
272 _asan_unregister_globals is also installed. */
274 static unsigned HOST_WIDE_INT asan_shadow_offset_value;
275 static bool asan_shadow_offset_computed;
276 static vec<char *> sanitized_sections;
278 /* Sets shadow offset to value in string VAL. */
280 bool
281 set_asan_shadow_offset (const char *val)
283 char *endp;
285 errno = 0;
286 #ifdef HAVE_LONG_LONG
287 asan_shadow_offset_value = strtoull (val, &endp, 0);
288 #else
289 asan_shadow_offset_value = strtoul (val, &endp, 0);
290 #endif
291 if (!(*val != '\0' && *endp == '\0' && errno == 0))
292 return false;
294 asan_shadow_offset_computed = true;
296 return true;
299 /* Set list of user-defined sections that need to be sanitized. */
301 void
302 set_sanitized_sections (const char *sections)
304 char *pat;
305 unsigned i;
306 FOR_EACH_VEC_ELT (sanitized_sections, i, pat)
307 free (pat);
308 sanitized_sections.truncate (0);
310 for (const char *s = sections; *s; )
312 const char *end;
313 for (end = s; *end && *end != ','; ++end);
314 size_t len = end - s;
315 sanitized_sections.safe_push (xstrndup (s, len));
316 s = *end ? end + 1 : end;
320 /* Checks whether section SEC should be sanitized. */
322 static bool
323 section_sanitized_p (const char *sec)
325 char *pat;
326 unsigned i;
327 FOR_EACH_VEC_ELT (sanitized_sections, i, pat)
328 if (fnmatch (pat, sec, FNM_PERIOD) == 0)
329 return true;
330 return false;
333 /* Returns Asan shadow offset. */
335 static unsigned HOST_WIDE_INT
336 asan_shadow_offset ()
338 if (!asan_shadow_offset_computed)
340 asan_shadow_offset_computed = true;
341 asan_shadow_offset_value = targetm.asan_shadow_offset ();
343 return asan_shadow_offset_value;
346 alias_set_type asan_shadow_set = -1;
348 /* Pointer types to 1 resp. 2 byte integers in shadow memory. A separate
349 alias set is used for all shadow memory accesses. */
350 static GTY(()) tree shadow_ptr_types[2];
352 /* Decl for __asan_option_detect_stack_use_after_return. */
353 static GTY(()) tree asan_detect_stack_use_after_return;
355 /* Various flags for Asan builtins. */
356 enum asan_check_flags
358 ASAN_CHECK_STORE = 1 << 0,
359 ASAN_CHECK_SCALAR_ACCESS = 1 << 1,
360 ASAN_CHECK_NON_ZERO_LEN = 1 << 2,
361 ASAN_CHECK_LAST = 1 << 3
364 /* Hashtable support for memory references used by gimple
365 statements. */
367 /* This type represents a reference to a memory region. */
368 struct asan_mem_ref
370 /* The expression of the beginning of the memory region. */
371 tree start;
373 /* The size of the access. */
374 HOST_WIDE_INT access_size;
376 /* Pool allocation new operator. */
377 inline void *operator new (size_t)
379 return pool.allocate ();
382 /* Delete operator utilizing pool allocation. */
383 inline void operator delete (void *ptr)
385 pool.remove ((asan_mem_ref *) ptr);
388 /* Memory allocation pool. */
389 static pool_allocator<asan_mem_ref> pool;
392 pool_allocator<asan_mem_ref> asan_mem_ref::pool ("asan_mem_ref", 10);
394 /* Initializes an instance of asan_mem_ref. */
396 static void
397 asan_mem_ref_init (asan_mem_ref *ref, tree start, HOST_WIDE_INT access_size)
399 ref->start = start;
400 ref->access_size = access_size;
403 /* Allocates memory for an instance of asan_mem_ref into the memory
404 pool returned by asan_mem_ref_get_alloc_pool and initialize it.
405 START is the address of (or the expression pointing to) the
406 beginning of memory reference. ACCESS_SIZE is the size of the
407 access to the referenced memory. */
409 static asan_mem_ref*
410 asan_mem_ref_new (tree start, HOST_WIDE_INT access_size)
412 asan_mem_ref *ref = new asan_mem_ref;
414 asan_mem_ref_init (ref, start, access_size);
415 return ref;
418 /* This builds and returns a pointer to the end of the memory region
419 that starts at START and of length LEN. */
421 tree
422 asan_mem_ref_get_end (tree start, tree len)
424 if (len == NULL_TREE || integer_zerop (len))
425 return start;
427 if (!ptrofftype_p (len))
428 len = convert_to_ptrofftype (len);
430 return fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (start), start, len);
433 /* Return a tree expression that represents the end of the referenced
434 memory region. Beware that this function can actually build a new
435 tree expression. */
437 tree
438 asan_mem_ref_get_end (const asan_mem_ref *ref, tree len)
440 return asan_mem_ref_get_end (ref->start, len);
443 struct asan_mem_ref_hasher
444 : typed_noop_remove <asan_mem_ref>
446 typedef asan_mem_ref *value_type;
447 typedef asan_mem_ref *compare_type;
449 static inline hashval_t hash (const asan_mem_ref *);
450 static inline bool equal (const asan_mem_ref *, const asan_mem_ref *);
453 /* Hash a memory reference. */
455 inline hashval_t
456 asan_mem_ref_hasher::hash (const asan_mem_ref *mem_ref)
458 return iterative_hash_expr (mem_ref->start, 0);
461 /* Compare two memory references. We accept the length of either
462 memory references to be NULL_TREE. */
464 inline bool
465 asan_mem_ref_hasher::equal (const asan_mem_ref *m1,
466 const asan_mem_ref *m2)
468 return operand_equal_p (m1->start, m2->start, 0);
471 static hash_table<asan_mem_ref_hasher> *asan_mem_ref_ht;
473 /* Returns a reference to the hash table containing memory references.
474 This function ensures that the hash table is created. Note that
475 this hash table is updated by the function
476 update_mem_ref_hash_table. */
478 static hash_table<asan_mem_ref_hasher> *
479 get_mem_ref_hash_table ()
481 if (!asan_mem_ref_ht)
482 asan_mem_ref_ht = new hash_table<asan_mem_ref_hasher> (10);
484 return asan_mem_ref_ht;
487 /* Clear all entries from the memory references hash table. */
489 static void
490 empty_mem_ref_hash_table ()
492 if (asan_mem_ref_ht)
493 asan_mem_ref_ht->empty ();
496 /* Free the memory references hash table. */
498 static void
499 free_mem_ref_resources ()
501 delete asan_mem_ref_ht;
502 asan_mem_ref_ht = NULL;
504 asan_mem_ref::pool.release ();
507 /* Return true iff the memory reference REF has been instrumented. */
509 static bool
510 has_mem_ref_been_instrumented (tree ref, HOST_WIDE_INT access_size)
512 asan_mem_ref r;
513 asan_mem_ref_init (&r, ref, access_size);
515 asan_mem_ref *saved_ref = get_mem_ref_hash_table ()->find (&r);
516 return saved_ref && saved_ref->access_size >= access_size;
519 /* Return true iff the memory reference REF has been instrumented. */
521 static bool
522 has_mem_ref_been_instrumented (const asan_mem_ref *ref)
524 return has_mem_ref_been_instrumented (ref->start, ref->access_size);
527 /* Return true iff access to memory region starting at REF and of
528 length LEN has been instrumented. */
530 static bool
531 has_mem_ref_been_instrumented (const asan_mem_ref *ref, tree len)
533 HOST_WIDE_INT size_in_bytes
534 = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
536 return size_in_bytes != -1
537 && has_mem_ref_been_instrumented (ref->start, size_in_bytes);
540 /* Set REF to the memory reference present in a gimple assignment
541 ASSIGNMENT. Return true upon successful completion, false
542 otherwise. */
544 static bool
545 get_mem_ref_of_assignment (const gassign *assignment,
546 asan_mem_ref *ref,
547 bool *ref_is_store)
549 gcc_assert (gimple_assign_single_p (assignment));
551 if (gimple_store_p (assignment)
552 && !gimple_clobber_p (assignment))
554 ref->start = gimple_assign_lhs (assignment);
555 *ref_is_store = true;
557 else if (gimple_assign_load_p (assignment))
559 ref->start = gimple_assign_rhs1 (assignment);
560 *ref_is_store = false;
562 else
563 return false;
565 ref->access_size = int_size_in_bytes (TREE_TYPE (ref->start));
566 return true;
569 /* Return the memory references contained in a gimple statement
570 representing a builtin call that has to do with memory access. */
572 static bool
573 get_mem_refs_of_builtin_call (const gcall *call,
574 asan_mem_ref *src0,
575 tree *src0_len,
576 bool *src0_is_store,
577 asan_mem_ref *src1,
578 tree *src1_len,
579 bool *src1_is_store,
580 asan_mem_ref *dst,
581 tree *dst_len,
582 bool *dst_is_store,
583 bool *dest_is_deref,
584 bool *intercepted_p)
586 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
588 tree callee = gimple_call_fndecl (call);
589 tree source0 = NULL_TREE, source1 = NULL_TREE,
590 dest = NULL_TREE, len = NULL_TREE;
591 bool is_store = true, got_reference_p = false;
592 HOST_WIDE_INT access_size = 1;
594 *intercepted_p = asan_intercepted_p ((DECL_FUNCTION_CODE (callee)));
596 switch (DECL_FUNCTION_CODE (callee))
598 /* (s, s, n) style memops. */
599 case BUILT_IN_BCMP:
600 case BUILT_IN_MEMCMP:
601 source0 = gimple_call_arg (call, 0);
602 source1 = gimple_call_arg (call, 1);
603 len = gimple_call_arg (call, 2);
604 break;
606 /* (src, dest, n) style memops. */
607 case BUILT_IN_BCOPY:
608 source0 = gimple_call_arg (call, 0);
609 dest = gimple_call_arg (call, 1);
610 len = gimple_call_arg (call, 2);
611 break;
613 /* (dest, src, n) style memops. */
614 case BUILT_IN_MEMCPY:
615 case BUILT_IN_MEMCPY_CHK:
616 case BUILT_IN_MEMMOVE:
617 case BUILT_IN_MEMMOVE_CHK:
618 case BUILT_IN_MEMPCPY:
619 case BUILT_IN_MEMPCPY_CHK:
620 dest = gimple_call_arg (call, 0);
621 source0 = gimple_call_arg (call, 1);
622 len = gimple_call_arg (call, 2);
623 break;
625 /* (dest, n) style memops. */
626 case BUILT_IN_BZERO:
627 dest = gimple_call_arg (call, 0);
628 len = gimple_call_arg (call, 1);
629 break;
631 /* (dest, x, n) style memops*/
632 case BUILT_IN_MEMSET:
633 case BUILT_IN_MEMSET_CHK:
634 dest = gimple_call_arg (call, 0);
635 len = gimple_call_arg (call, 2);
636 break;
638 case BUILT_IN_STRLEN:
639 source0 = gimple_call_arg (call, 0);
640 len = gimple_call_lhs (call);
641 break ;
643 /* And now the __atomic* and __sync builtins.
644 These are handled differently from the classical memory memory
645 access builtins above. */
647 case BUILT_IN_ATOMIC_LOAD_1:
648 case BUILT_IN_ATOMIC_LOAD_2:
649 case BUILT_IN_ATOMIC_LOAD_4:
650 case BUILT_IN_ATOMIC_LOAD_8:
651 case BUILT_IN_ATOMIC_LOAD_16:
652 is_store = false;
653 /* fall through. */
655 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
656 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
657 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
658 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
659 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
661 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
662 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
663 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
664 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
665 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
667 case BUILT_IN_SYNC_FETCH_AND_OR_1:
668 case BUILT_IN_SYNC_FETCH_AND_OR_2:
669 case BUILT_IN_SYNC_FETCH_AND_OR_4:
670 case BUILT_IN_SYNC_FETCH_AND_OR_8:
671 case BUILT_IN_SYNC_FETCH_AND_OR_16:
673 case BUILT_IN_SYNC_FETCH_AND_AND_1:
674 case BUILT_IN_SYNC_FETCH_AND_AND_2:
675 case BUILT_IN_SYNC_FETCH_AND_AND_4:
676 case BUILT_IN_SYNC_FETCH_AND_AND_8:
677 case BUILT_IN_SYNC_FETCH_AND_AND_16:
679 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
680 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
681 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
682 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
683 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
685 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
686 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
687 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
688 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
690 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
691 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
692 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
693 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
694 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
696 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
697 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
698 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
699 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
700 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
702 case BUILT_IN_SYNC_OR_AND_FETCH_1:
703 case BUILT_IN_SYNC_OR_AND_FETCH_2:
704 case BUILT_IN_SYNC_OR_AND_FETCH_4:
705 case BUILT_IN_SYNC_OR_AND_FETCH_8:
706 case BUILT_IN_SYNC_OR_AND_FETCH_16:
708 case BUILT_IN_SYNC_AND_AND_FETCH_1:
709 case BUILT_IN_SYNC_AND_AND_FETCH_2:
710 case BUILT_IN_SYNC_AND_AND_FETCH_4:
711 case BUILT_IN_SYNC_AND_AND_FETCH_8:
712 case BUILT_IN_SYNC_AND_AND_FETCH_16:
714 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
715 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
716 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
717 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
718 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
720 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
721 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
722 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
723 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
725 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
726 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
727 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
728 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
729 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
731 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
732 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
733 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
734 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
735 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
737 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
738 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
739 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
740 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
741 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
743 case BUILT_IN_SYNC_LOCK_RELEASE_1:
744 case BUILT_IN_SYNC_LOCK_RELEASE_2:
745 case BUILT_IN_SYNC_LOCK_RELEASE_4:
746 case BUILT_IN_SYNC_LOCK_RELEASE_8:
747 case BUILT_IN_SYNC_LOCK_RELEASE_16:
749 case BUILT_IN_ATOMIC_EXCHANGE_1:
750 case BUILT_IN_ATOMIC_EXCHANGE_2:
751 case BUILT_IN_ATOMIC_EXCHANGE_4:
752 case BUILT_IN_ATOMIC_EXCHANGE_8:
753 case BUILT_IN_ATOMIC_EXCHANGE_16:
755 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
756 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
757 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
758 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
759 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
761 case BUILT_IN_ATOMIC_STORE_1:
762 case BUILT_IN_ATOMIC_STORE_2:
763 case BUILT_IN_ATOMIC_STORE_4:
764 case BUILT_IN_ATOMIC_STORE_8:
765 case BUILT_IN_ATOMIC_STORE_16:
767 case BUILT_IN_ATOMIC_ADD_FETCH_1:
768 case BUILT_IN_ATOMIC_ADD_FETCH_2:
769 case BUILT_IN_ATOMIC_ADD_FETCH_4:
770 case BUILT_IN_ATOMIC_ADD_FETCH_8:
771 case BUILT_IN_ATOMIC_ADD_FETCH_16:
773 case BUILT_IN_ATOMIC_SUB_FETCH_1:
774 case BUILT_IN_ATOMIC_SUB_FETCH_2:
775 case BUILT_IN_ATOMIC_SUB_FETCH_4:
776 case BUILT_IN_ATOMIC_SUB_FETCH_8:
777 case BUILT_IN_ATOMIC_SUB_FETCH_16:
779 case BUILT_IN_ATOMIC_AND_FETCH_1:
780 case BUILT_IN_ATOMIC_AND_FETCH_2:
781 case BUILT_IN_ATOMIC_AND_FETCH_4:
782 case BUILT_IN_ATOMIC_AND_FETCH_8:
783 case BUILT_IN_ATOMIC_AND_FETCH_16:
785 case BUILT_IN_ATOMIC_NAND_FETCH_1:
786 case BUILT_IN_ATOMIC_NAND_FETCH_2:
787 case BUILT_IN_ATOMIC_NAND_FETCH_4:
788 case BUILT_IN_ATOMIC_NAND_FETCH_8:
789 case BUILT_IN_ATOMIC_NAND_FETCH_16:
791 case BUILT_IN_ATOMIC_XOR_FETCH_1:
792 case BUILT_IN_ATOMIC_XOR_FETCH_2:
793 case BUILT_IN_ATOMIC_XOR_FETCH_4:
794 case BUILT_IN_ATOMIC_XOR_FETCH_8:
795 case BUILT_IN_ATOMIC_XOR_FETCH_16:
797 case BUILT_IN_ATOMIC_OR_FETCH_1:
798 case BUILT_IN_ATOMIC_OR_FETCH_2:
799 case BUILT_IN_ATOMIC_OR_FETCH_4:
800 case BUILT_IN_ATOMIC_OR_FETCH_8:
801 case BUILT_IN_ATOMIC_OR_FETCH_16:
803 case BUILT_IN_ATOMIC_FETCH_ADD_1:
804 case BUILT_IN_ATOMIC_FETCH_ADD_2:
805 case BUILT_IN_ATOMIC_FETCH_ADD_4:
806 case BUILT_IN_ATOMIC_FETCH_ADD_8:
807 case BUILT_IN_ATOMIC_FETCH_ADD_16:
809 case BUILT_IN_ATOMIC_FETCH_SUB_1:
810 case BUILT_IN_ATOMIC_FETCH_SUB_2:
811 case BUILT_IN_ATOMIC_FETCH_SUB_4:
812 case BUILT_IN_ATOMIC_FETCH_SUB_8:
813 case BUILT_IN_ATOMIC_FETCH_SUB_16:
815 case BUILT_IN_ATOMIC_FETCH_AND_1:
816 case BUILT_IN_ATOMIC_FETCH_AND_2:
817 case BUILT_IN_ATOMIC_FETCH_AND_4:
818 case BUILT_IN_ATOMIC_FETCH_AND_8:
819 case BUILT_IN_ATOMIC_FETCH_AND_16:
821 case BUILT_IN_ATOMIC_FETCH_NAND_1:
822 case BUILT_IN_ATOMIC_FETCH_NAND_2:
823 case BUILT_IN_ATOMIC_FETCH_NAND_4:
824 case BUILT_IN_ATOMIC_FETCH_NAND_8:
825 case BUILT_IN_ATOMIC_FETCH_NAND_16:
827 case BUILT_IN_ATOMIC_FETCH_XOR_1:
828 case BUILT_IN_ATOMIC_FETCH_XOR_2:
829 case BUILT_IN_ATOMIC_FETCH_XOR_4:
830 case BUILT_IN_ATOMIC_FETCH_XOR_8:
831 case BUILT_IN_ATOMIC_FETCH_XOR_16:
833 case BUILT_IN_ATOMIC_FETCH_OR_1:
834 case BUILT_IN_ATOMIC_FETCH_OR_2:
835 case BUILT_IN_ATOMIC_FETCH_OR_4:
836 case BUILT_IN_ATOMIC_FETCH_OR_8:
837 case BUILT_IN_ATOMIC_FETCH_OR_16:
839 dest = gimple_call_arg (call, 0);
840 /* DEST represents the address of a memory location.
841 instrument_derefs wants the memory location, so lets
842 dereference the address DEST before handing it to
843 instrument_derefs. */
844 if (TREE_CODE (dest) == ADDR_EXPR)
845 dest = TREE_OPERAND (dest, 0);
846 else if (TREE_CODE (dest) == SSA_NAME || TREE_CODE (dest) == INTEGER_CST)
847 dest = build2 (MEM_REF, TREE_TYPE (TREE_TYPE (dest)),
848 dest, build_int_cst (TREE_TYPE (dest), 0));
849 else
850 gcc_unreachable ();
852 access_size = int_size_in_bytes (TREE_TYPE (dest));
855 default:
856 /* The other builtins memory access are not instrumented in this
857 function because they either don't have any length parameter,
858 or their length parameter is just a limit. */
859 break;
862 if (len != NULL_TREE)
864 if (source0 != NULL_TREE)
866 src0->start = source0;
867 src0->access_size = access_size;
868 *src0_len = len;
869 *src0_is_store = false;
872 if (source1 != NULL_TREE)
874 src1->start = source1;
875 src1->access_size = access_size;
876 *src1_len = len;
877 *src1_is_store = false;
880 if (dest != NULL_TREE)
882 dst->start = dest;
883 dst->access_size = access_size;
884 *dst_len = len;
885 *dst_is_store = true;
888 got_reference_p = true;
890 else if (dest)
892 dst->start = dest;
893 dst->access_size = access_size;
894 *dst_len = NULL_TREE;
895 *dst_is_store = is_store;
896 *dest_is_deref = true;
897 got_reference_p = true;
900 return got_reference_p;
903 /* Return true iff a given gimple statement has been instrumented.
904 Note that the statement is "defined" by the memory references it
905 contains. */
907 static bool
908 has_stmt_been_instrumented_p (gimple stmt)
910 if (gimple_assign_single_p (stmt))
912 bool r_is_store;
913 asan_mem_ref r;
914 asan_mem_ref_init (&r, NULL, 1);
916 if (get_mem_ref_of_assignment (as_a <gassign *> (stmt), &r,
917 &r_is_store))
918 return has_mem_ref_been_instrumented (&r);
920 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
922 asan_mem_ref src0, src1, dest;
923 asan_mem_ref_init (&src0, NULL, 1);
924 asan_mem_ref_init (&src1, NULL, 1);
925 asan_mem_ref_init (&dest, NULL, 1);
927 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
928 bool src0_is_store = false, src1_is_store = false,
929 dest_is_store = false, dest_is_deref = false, intercepted_p = true;
930 if (get_mem_refs_of_builtin_call (as_a <gcall *> (stmt),
931 &src0, &src0_len, &src0_is_store,
932 &src1, &src1_len, &src1_is_store,
933 &dest, &dest_len, &dest_is_store,
934 &dest_is_deref, &intercepted_p))
936 if (src0.start != NULL_TREE
937 && !has_mem_ref_been_instrumented (&src0, src0_len))
938 return false;
940 if (src1.start != NULL_TREE
941 && !has_mem_ref_been_instrumented (&src1, src1_len))
942 return false;
944 if (dest.start != NULL_TREE
945 && !has_mem_ref_been_instrumented (&dest, dest_len))
946 return false;
948 return true;
951 return false;
954 /* Insert a memory reference into the hash table. */
956 static void
957 update_mem_ref_hash_table (tree ref, HOST_WIDE_INT access_size)
959 hash_table<asan_mem_ref_hasher> *ht = get_mem_ref_hash_table ();
961 asan_mem_ref r;
962 asan_mem_ref_init (&r, ref, access_size);
964 asan_mem_ref **slot = ht->find_slot (&r, INSERT);
965 if (*slot == NULL || (*slot)->access_size < access_size)
966 *slot = asan_mem_ref_new (ref, access_size);
969 /* Initialize shadow_ptr_types array. */
971 static void
972 asan_init_shadow_ptr_types (void)
974 asan_shadow_set = new_alias_set ();
975 shadow_ptr_types[0] = build_distinct_type_copy (signed_char_type_node);
976 TYPE_ALIAS_SET (shadow_ptr_types[0]) = asan_shadow_set;
977 shadow_ptr_types[0] = build_pointer_type (shadow_ptr_types[0]);
978 shadow_ptr_types[1] = build_distinct_type_copy (short_integer_type_node);
979 TYPE_ALIAS_SET (shadow_ptr_types[1]) = asan_shadow_set;
980 shadow_ptr_types[1] = build_pointer_type (shadow_ptr_types[1]);
981 initialize_sanitizer_builtins ();
984 /* Create ADDR_EXPR of STRING_CST with the PP pretty printer text. */
986 static tree
987 asan_pp_string (pretty_printer *pp)
989 const char *buf = pp_formatted_text (pp);
990 size_t len = strlen (buf);
991 tree ret = build_string (len + 1, buf);
992 TREE_TYPE (ret)
993 = build_array_type (TREE_TYPE (shadow_ptr_types[0]),
994 build_index_type (size_int (len)));
995 TREE_READONLY (ret) = 1;
996 TREE_STATIC (ret) = 1;
997 return build1 (ADDR_EXPR, shadow_ptr_types[0], ret);
1000 /* Return a CONST_INT representing 4 subsequent shadow memory bytes. */
1002 static rtx
1003 asan_shadow_cst (unsigned char shadow_bytes[4])
1005 int i;
1006 unsigned HOST_WIDE_INT val = 0;
1007 gcc_assert (WORDS_BIG_ENDIAN == BYTES_BIG_ENDIAN);
1008 for (i = 0; i < 4; i++)
1009 val |= (unsigned HOST_WIDE_INT) shadow_bytes[BYTES_BIG_ENDIAN ? 3 - i : i]
1010 << (BITS_PER_UNIT * i);
1011 return gen_int_mode (val, SImode);
1014 /* Clear shadow memory at SHADOW_MEM, LEN bytes. Can't call a library call here
1015 though. */
1017 static void
1018 asan_clear_shadow (rtx shadow_mem, HOST_WIDE_INT len)
1020 rtx_insn *insn, *insns, *jump;
1021 rtx_code_label *top_label;
1022 rtx end, addr, tmp;
1024 start_sequence ();
1025 clear_storage (shadow_mem, GEN_INT (len), BLOCK_OP_NORMAL);
1026 insns = get_insns ();
1027 end_sequence ();
1028 for (insn = insns; insn; insn = NEXT_INSN (insn))
1029 if (CALL_P (insn))
1030 break;
1031 if (insn == NULL_RTX)
1033 emit_insn (insns);
1034 return;
1037 gcc_assert ((len & 3) == 0);
1038 top_label = gen_label_rtx ();
1039 addr = copy_to_mode_reg (Pmode, XEXP (shadow_mem, 0));
1040 shadow_mem = adjust_automodify_address (shadow_mem, SImode, addr, 0);
1041 end = force_reg (Pmode, plus_constant (Pmode, addr, len));
1042 emit_label (top_label);
1044 emit_move_insn (shadow_mem, const0_rtx);
1045 tmp = expand_simple_binop (Pmode, PLUS, addr, gen_int_mode (4, Pmode), addr,
1046 true, OPTAB_LIB_WIDEN);
1047 if (tmp != addr)
1048 emit_move_insn (addr, tmp);
1049 emit_cmp_and_jump_insns (addr, end, LT, NULL_RTX, Pmode, true, top_label);
1050 jump = get_last_insn ();
1051 gcc_assert (JUMP_P (jump));
1052 add_int_reg_note (jump, REG_BR_PROB, REG_BR_PROB_BASE * 80 / 100);
1055 void
1056 asan_function_start (void)
1058 section *fnsec = function_section (current_function_decl);
1059 switch_to_section (fnsec);
1060 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LASANPC",
1061 current_function_funcdef_no);
1064 /* Insert code to protect stack vars. The prologue sequence should be emitted
1065 directly, epilogue sequence returned. BASE is the register holding the
1066 stack base, against which OFFSETS array offsets are relative to, OFFSETS
1067 array contains pairs of offsets in reverse order, always the end offset
1068 of some gap that needs protection followed by starting offset,
1069 and DECLS is an array of representative decls for each var partition.
1070 LENGTH is the length of the OFFSETS array, DECLS array is LENGTH / 2 - 1
1071 elements long (OFFSETS include gap before the first variable as well
1072 as gaps after each stack variable). PBASE is, if non-NULL, some pseudo
1073 register which stack vars DECL_RTLs are based on. Either BASE should be
1074 assigned to PBASE, when not doing use after return protection, or
1075 corresponding address based on __asan_stack_malloc* return value. */
1077 rtx_insn *
1078 asan_emit_stack_protection (rtx base, rtx pbase, unsigned int alignb,
1079 HOST_WIDE_INT *offsets, tree *decls, int length)
1081 rtx shadow_base, shadow_mem, ret, mem, orig_base;
1082 rtx_code_label *lab;
1083 rtx_insn *insns;
1084 char buf[30];
1085 unsigned char shadow_bytes[4];
1086 HOST_WIDE_INT base_offset = offsets[length - 1];
1087 HOST_WIDE_INT base_align_bias = 0, offset, prev_offset;
1088 HOST_WIDE_INT asan_frame_size = offsets[0] - base_offset;
1089 HOST_WIDE_INT last_offset, last_size;
1090 int l;
1091 unsigned char cur_shadow_byte = ASAN_STACK_MAGIC_LEFT;
1092 tree str_cst, decl, id;
1093 int use_after_return_class = -1;
1095 if (shadow_ptr_types[0] == NULL_TREE)
1096 asan_init_shadow_ptr_types ();
1098 /* First of all, prepare the description string. */
1099 pretty_printer asan_pp;
1101 pp_decimal_int (&asan_pp, length / 2 - 1);
1102 pp_space (&asan_pp);
1103 for (l = length - 2; l; l -= 2)
1105 tree decl = decls[l / 2 - 1];
1106 pp_wide_integer (&asan_pp, offsets[l] - base_offset);
1107 pp_space (&asan_pp);
1108 pp_wide_integer (&asan_pp, offsets[l - 1] - offsets[l]);
1109 pp_space (&asan_pp);
1110 if (DECL_P (decl) && DECL_NAME (decl))
1112 pp_decimal_int (&asan_pp, IDENTIFIER_LENGTH (DECL_NAME (decl)));
1113 pp_space (&asan_pp);
1114 pp_tree_identifier (&asan_pp, DECL_NAME (decl));
1116 else
1117 pp_string (&asan_pp, "9 <unknown>");
1118 pp_space (&asan_pp);
1120 str_cst = asan_pp_string (&asan_pp);
1122 /* Emit the prologue sequence. */
1123 if (asan_frame_size > 32 && asan_frame_size <= 65536 && pbase
1124 && ASAN_USE_AFTER_RETURN)
1126 use_after_return_class = floor_log2 (asan_frame_size - 1) - 5;
1127 /* __asan_stack_malloc_N guarantees alignment
1128 N < 6 ? (64 << N) : 4096 bytes. */
1129 if (alignb > (use_after_return_class < 6
1130 ? (64U << use_after_return_class) : 4096U))
1131 use_after_return_class = -1;
1132 else if (alignb > ASAN_RED_ZONE_SIZE && (asan_frame_size & (alignb - 1)))
1133 base_align_bias = ((asan_frame_size + alignb - 1)
1134 & ~(alignb - HOST_WIDE_INT_1)) - asan_frame_size;
1136 /* Align base if target is STRICT_ALIGNMENT. */
1137 if (STRICT_ALIGNMENT)
1138 base = expand_binop (Pmode, and_optab, base,
1139 gen_int_mode (-((GET_MODE_ALIGNMENT (SImode)
1140 << ASAN_SHADOW_SHIFT)
1141 / BITS_PER_UNIT), Pmode), NULL_RTX,
1142 1, OPTAB_DIRECT);
1144 if (use_after_return_class == -1 && pbase)
1145 emit_move_insn (pbase, base);
1147 base = expand_binop (Pmode, add_optab, base,
1148 gen_int_mode (base_offset - base_align_bias, Pmode),
1149 NULL_RTX, 1, OPTAB_DIRECT);
1150 orig_base = NULL_RTX;
1151 if (use_after_return_class != -1)
1153 if (asan_detect_stack_use_after_return == NULL_TREE)
1155 id = get_identifier ("__asan_option_detect_stack_use_after_return");
1156 decl = build_decl (BUILTINS_LOCATION, VAR_DECL, id,
1157 integer_type_node);
1158 SET_DECL_ASSEMBLER_NAME (decl, id);
1159 TREE_ADDRESSABLE (decl) = 1;
1160 DECL_ARTIFICIAL (decl) = 1;
1161 DECL_IGNORED_P (decl) = 1;
1162 DECL_EXTERNAL (decl) = 1;
1163 TREE_STATIC (decl) = 1;
1164 TREE_PUBLIC (decl) = 1;
1165 TREE_USED (decl) = 1;
1166 asan_detect_stack_use_after_return = decl;
1168 orig_base = gen_reg_rtx (Pmode);
1169 emit_move_insn (orig_base, base);
1170 ret = expand_normal (asan_detect_stack_use_after_return);
1171 lab = gen_label_rtx ();
1172 int very_likely = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
1173 emit_cmp_and_jump_insns (ret, const0_rtx, EQ, NULL_RTX,
1174 VOIDmode, 0, lab, very_likely);
1175 snprintf (buf, sizeof buf, "__asan_stack_malloc_%d",
1176 use_after_return_class);
1177 ret = init_one_libfunc (buf);
1178 rtx addr = convert_memory_address (ptr_mode, base);
1179 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode, 2,
1180 GEN_INT (asan_frame_size
1181 + base_align_bias),
1182 TYPE_MODE (pointer_sized_int_node),
1183 addr, ptr_mode);
1184 ret = convert_memory_address (Pmode, ret);
1185 emit_move_insn (base, ret);
1186 emit_label (lab);
1187 emit_move_insn (pbase, expand_binop (Pmode, add_optab, base,
1188 gen_int_mode (base_align_bias
1189 - base_offset, Pmode),
1190 NULL_RTX, 1, OPTAB_DIRECT));
1192 mem = gen_rtx_MEM (ptr_mode, base);
1193 mem = adjust_address (mem, VOIDmode, base_align_bias);
1194 emit_move_insn (mem, gen_int_mode (ASAN_STACK_FRAME_MAGIC, ptr_mode));
1195 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1196 emit_move_insn (mem, expand_normal (str_cst));
1197 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1198 ASM_GENERATE_INTERNAL_LABEL (buf, "LASANPC", current_function_funcdef_no);
1199 id = get_identifier (buf);
1200 decl = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1201 VAR_DECL, id, char_type_node);
1202 SET_DECL_ASSEMBLER_NAME (decl, id);
1203 TREE_ADDRESSABLE (decl) = 1;
1204 TREE_READONLY (decl) = 1;
1205 DECL_ARTIFICIAL (decl) = 1;
1206 DECL_IGNORED_P (decl) = 1;
1207 TREE_STATIC (decl) = 1;
1208 TREE_PUBLIC (decl) = 0;
1209 TREE_USED (decl) = 1;
1210 DECL_INITIAL (decl) = decl;
1211 TREE_ASM_WRITTEN (decl) = 1;
1212 TREE_ASM_WRITTEN (id) = 1;
1213 emit_move_insn (mem, expand_normal (build_fold_addr_expr (decl)));
1214 shadow_base = expand_binop (Pmode, lshr_optab, base,
1215 GEN_INT (ASAN_SHADOW_SHIFT),
1216 NULL_RTX, 1, OPTAB_DIRECT);
1217 shadow_base
1218 = plus_constant (Pmode, shadow_base,
1219 asan_shadow_offset ()
1220 + (base_align_bias >> ASAN_SHADOW_SHIFT));
1221 gcc_assert (asan_shadow_set != -1
1222 && (ASAN_RED_ZONE_SIZE >> ASAN_SHADOW_SHIFT) == 4);
1223 shadow_mem = gen_rtx_MEM (SImode, shadow_base);
1224 set_mem_alias_set (shadow_mem, asan_shadow_set);
1225 if (STRICT_ALIGNMENT)
1226 set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
1227 prev_offset = base_offset;
1228 for (l = length; l; l -= 2)
1230 if (l == 2)
1231 cur_shadow_byte = ASAN_STACK_MAGIC_RIGHT;
1232 offset = offsets[l - 1];
1233 if ((offset - base_offset) & (ASAN_RED_ZONE_SIZE - 1))
1235 int i;
1236 HOST_WIDE_INT aoff
1237 = base_offset + ((offset - base_offset)
1238 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1239 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1240 (aoff - prev_offset)
1241 >> ASAN_SHADOW_SHIFT);
1242 prev_offset = aoff;
1243 for (i = 0; i < 4; i++, aoff += (1 << ASAN_SHADOW_SHIFT))
1244 if (aoff < offset)
1246 if (aoff < offset - (1 << ASAN_SHADOW_SHIFT) + 1)
1247 shadow_bytes[i] = 0;
1248 else
1249 shadow_bytes[i] = offset - aoff;
1251 else
1252 shadow_bytes[i] = ASAN_STACK_MAGIC_PARTIAL;
1253 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1254 offset = aoff;
1256 while (offset <= offsets[l - 2] - ASAN_RED_ZONE_SIZE)
1258 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1259 (offset - prev_offset)
1260 >> ASAN_SHADOW_SHIFT);
1261 prev_offset = offset;
1262 memset (shadow_bytes, cur_shadow_byte, 4);
1263 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1264 offset += ASAN_RED_ZONE_SIZE;
1266 cur_shadow_byte = ASAN_STACK_MAGIC_MIDDLE;
1268 do_pending_stack_adjust ();
1270 /* Construct epilogue sequence. */
1271 start_sequence ();
1273 lab = NULL;
1274 if (use_after_return_class != -1)
1276 rtx_code_label *lab2 = gen_label_rtx ();
1277 char c = (char) ASAN_STACK_MAGIC_USE_AFTER_RET;
1278 int very_likely = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
1279 emit_cmp_and_jump_insns (orig_base, base, EQ, NULL_RTX,
1280 VOIDmode, 0, lab2, very_likely);
1281 shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1282 set_mem_alias_set (shadow_mem, asan_shadow_set);
1283 mem = gen_rtx_MEM (ptr_mode, base);
1284 mem = adjust_address (mem, VOIDmode, base_align_bias);
1285 emit_move_insn (mem, gen_int_mode (ASAN_STACK_RETIRED_MAGIC, ptr_mode));
1286 unsigned HOST_WIDE_INT sz = asan_frame_size >> ASAN_SHADOW_SHIFT;
1287 if (use_after_return_class < 5
1288 && can_store_by_pieces (sz, builtin_memset_read_str, &c,
1289 BITS_PER_UNIT, true))
1290 store_by_pieces (shadow_mem, sz, builtin_memset_read_str, &c,
1291 BITS_PER_UNIT, true, 0);
1292 else if (use_after_return_class >= 5
1293 || !set_storage_via_setmem (shadow_mem,
1294 GEN_INT (sz),
1295 gen_int_mode (c, QImode),
1296 BITS_PER_UNIT, BITS_PER_UNIT,
1297 -1, sz, sz, sz))
1299 snprintf (buf, sizeof buf, "__asan_stack_free_%d",
1300 use_after_return_class);
1301 ret = init_one_libfunc (buf);
1302 rtx addr = convert_memory_address (ptr_mode, base);
1303 rtx orig_addr = convert_memory_address (ptr_mode, orig_base);
1304 emit_library_call (ret, LCT_NORMAL, ptr_mode, 3, addr, ptr_mode,
1305 GEN_INT (asan_frame_size + base_align_bias),
1306 TYPE_MODE (pointer_sized_int_node),
1307 orig_addr, ptr_mode);
1309 lab = gen_label_rtx ();
1310 emit_jump (lab);
1311 emit_label (lab2);
1314 shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1315 set_mem_alias_set (shadow_mem, asan_shadow_set);
1317 if (STRICT_ALIGNMENT)
1318 set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
1320 prev_offset = base_offset;
1321 last_offset = base_offset;
1322 last_size = 0;
1323 for (l = length; l; l -= 2)
1325 offset = base_offset + ((offsets[l - 1] - base_offset)
1326 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1327 if (last_offset + last_size != offset)
1329 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1330 (last_offset - prev_offset)
1331 >> ASAN_SHADOW_SHIFT);
1332 prev_offset = last_offset;
1333 asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
1334 last_offset = offset;
1335 last_size = 0;
1337 last_size += base_offset + ((offsets[l - 2] - base_offset)
1338 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1))
1339 - offset;
1341 if (last_size)
1343 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1344 (last_offset - prev_offset)
1345 >> ASAN_SHADOW_SHIFT);
1346 asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
1349 do_pending_stack_adjust ();
1350 if (lab)
1351 emit_label (lab);
1353 insns = get_insns ();
1354 end_sequence ();
1355 return insns;
1358 /* Return true if DECL, a global var, might be overridden and needs
1359 therefore a local alias. */
1361 static bool
1362 asan_needs_local_alias (tree decl)
1364 return DECL_WEAK (decl) || !targetm.binds_local_p (decl);
1367 /* Return true if DECL is a VAR_DECL that should be protected
1368 by Address Sanitizer, by appending a red zone with protected
1369 shadow memory after it and aligning it to at least
1370 ASAN_RED_ZONE_SIZE bytes. */
1372 bool
1373 asan_protect_global (tree decl)
1375 if (!ASAN_GLOBALS)
1376 return false;
1378 rtx rtl, symbol;
1380 if (TREE_CODE (decl) == STRING_CST)
1382 /* Instrument all STRING_CSTs except those created
1383 by asan_pp_string here. */
1384 if (shadow_ptr_types[0] != NULL_TREE
1385 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1386 && TREE_TYPE (TREE_TYPE (decl)) == TREE_TYPE (shadow_ptr_types[0]))
1387 return false;
1388 return true;
1390 if (TREE_CODE (decl) != VAR_DECL
1391 /* TLS vars aren't statically protectable. */
1392 || DECL_THREAD_LOCAL_P (decl)
1393 /* Externs will be protected elsewhere. */
1394 || DECL_EXTERNAL (decl)
1395 || !DECL_RTL_SET_P (decl)
1396 /* Comdat vars pose an ABI problem, we can't know if
1397 the var that is selected by the linker will have
1398 padding or not. */
1399 || DECL_ONE_ONLY (decl)
1400 /* Similarly for common vars. People can use -fno-common.
1401 Note: Linux kernel is built with -fno-common, so we do instrument
1402 globals there even if it is C. */
1403 || (DECL_COMMON (decl) && TREE_PUBLIC (decl))
1404 /* Don't protect if using user section, often vars placed
1405 into user section from multiple TUs are then assumed
1406 to be an array of such vars, putting padding in there
1407 breaks this assumption. */
1408 || (DECL_SECTION_NAME (decl) != NULL
1409 && !symtab_node::get (decl)->implicit_section
1410 && !section_sanitized_p (DECL_SECTION_NAME (decl)))
1411 || DECL_SIZE (decl) == 0
1412 || ASAN_RED_ZONE_SIZE * BITS_PER_UNIT > MAX_OFILE_ALIGNMENT
1413 || !valid_constant_size_p (DECL_SIZE_UNIT (decl))
1414 || DECL_ALIGN_UNIT (decl) > 2 * ASAN_RED_ZONE_SIZE
1415 || TREE_TYPE (decl) == ubsan_get_source_location_type ())
1416 return false;
1418 rtl = DECL_RTL (decl);
1419 if (!MEM_P (rtl) || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF)
1420 return false;
1421 symbol = XEXP (rtl, 0);
1423 if (CONSTANT_POOL_ADDRESS_P (symbol)
1424 || TREE_CONSTANT_POOL_ADDRESS_P (symbol))
1425 return false;
1427 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
1428 return false;
1430 #ifndef ASM_OUTPUT_DEF
1431 if (asan_needs_local_alias (decl))
1432 return false;
1433 #endif
1435 return true;
1438 /* Construct a function tree for __asan_report_{load,store}{1,2,4,8,16,_n}.
1439 IS_STORE is either 1 (for a store) or 0 (for a load). */
1441 static tree
1442 report_error_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
1443 int *nargs)
1445 static enum built_in_function report[2][2][6]
1446 = { { { BUILT_IN_ASAN_REPORT_LOAD1, BUILT_IN_ASAN_REPORT_LOAD2,
1447 BUILT_IN_ASAN_REPORT_LOAD4, BUILT_IN_ASAN_REPORT_LOAD8,
1448 BUILT_IN_ASAN_REPORT_LOAD16, BUILT_IN_ASAN_REPORT_LOAD_N },
1449 { BUILT_IN_ASAN_REPORT_STORE1, BUILT_IN_ASAN_REPORT_STORE2,
1450 BUILT_IN_ASAN_REPORT_STORE4, BUILT_IN_ASAN_REPORT_STORE8,
1451 BUILT_IN_ASAN_REPORT_STORE16, BUILT_IN_ASAN_REPORT_STORE_N } },
1452 { { BUILT_IN_ASAN_REPORT_LOAD1_NOABORT,
1453 BUILT_IN_ASAN_REPORT_LOAD2_NOABORT,
1454 BUILT_IN_ASAN_REPORT_LOAD4_NOABORT,
1455 BUILT_IN_ASAN_REPORT_LOAD8_NOABORT,
1456 BUILT_IN_ASAN_REPORT_LOAD16_NOABORT,
1457 BUILT_IN_ASAN_REPORT_LOAD_N_NOABORT },
1458 { BUILT_IN_ASAN_REPORT_STORE1_NOABORT,
1459 BUILT_IN_ASAN_REPORT_STORE2_NOABORT,
1460 BUILT_IN_ASAN_REPORT_STORE4_NOABORT,
1461 BUILT_IN_ASAN_REPORT_STORE8_NOABORT,
1462 BUILT_IN_ASAN_REPORT_STORE16_NOABORT,
1463 BUILT_IN_ASAN_REPORT_STORE_N_NOABORT } } };
1464 if (size_in_bytes == -1)
1466 *nargs = 2;
1467 return builtin_decl_implicit (report[recover_p][is_store][5]);
1469 *nargs = 1;
1470 int size_log2 = exact_log2 (size_in_bytes);
1471 return builtin_decl_implicit (report[recover_p][is_store][size_log2]);
1474 /* Construct a function tree for __asan_{load,store}{1,2,4,8,16,_n}.
1475 IS_STORE is either 1 (for a store) or 0 (for a load). */
1477 static tree
1478 check_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
1479 int *nargs)
1481 static enum built_in_function check[2][2][6]
1482 = { { { BUILT_IN_ASAN_LOAD1, BUILT_IN_ASAN_LOAD2,
1483 BUILT_IN_ASAN_LOAD4, BUILT_IN_ASAN_LOAD8,
1484 BUILT_IN_ASAN_LOAD16, BUILT_IN_ASAN_LOADN },
1485 { BUILT_IN_ASAN_STORE1, BUILT_IN_ASAN_STORE2,
1486 BUILT_IN_ASAN_STORE4, BUILT_IN_ASAN_STORE8,
1487 BUILT_IN_ASAN_STORE16, BUILT_IN_ASAN_STOREN } },
1488 { { BUILT_IN_ASAN_LOAD1_NOABORT,
1489 BUILT_IN_ASAN_LOAD2_NOABORT,
1490 BUILT_IN_ASAN_LOAD4_NOABORT,
1491 BUILT_IN_ASAN_LOAD8_NOABORT,
1492 BUILT_IN_ASAN_LOAD16_NOABORT,
1493 BUILT_IN_ASAN_LOADN_NOABORT },
1494 { BUILT_IN_ASAN_STORE1_NOABORT,
1495 BUILT_IN_ASAN_STORE2_NOABORT,
1496 BUILT_IN_ASAN_STORE4_NOABORT,
1497 BUILT_IN_ASAN_STORE8_NOABORT,
1498 BUILT_IN_ASAN_STORE16_NOABORT,
1499 BUILT_IN_ASAN_STOREN_NOABORT } } };
1500 if (size_in_bytes == -1)
1502 *nargs = 2;
1503 return builtin_decl_implicit (check[recover_p][is_store][5]);
1505 *nargs = 1;
1506 int size_log2 = exact_log2 (size_in_bytes);
1507 return builtin_decl_implicit (check[recover_p][is_store][size_log2]);
1510 /* Split the current basic block and create a condition statement
1511 insertion point right before or after the statement pointed to by
1512 ITER. Return an iterator to the point at which the caller might
1513 safely insert the condition statement.
1515 THEN_BLOCK must be set to the address of an uninitialized instance
1516 of basic_block. The function will then set *THEN_BLOCK to the
1517 'then block' of the condition statement to be inserted by the
1518 caller.
1520 If CREATE_THEN_FALLTHRU_EDGE is false, no edge will be created from
1521 *THEN_BLOCK to *FALLTHROUGH_BLOCK.
1523 Similarly, the function will set *FALLTRHOUGH_BLOCK to the 'else
1524 block' of the condition statement to be inserted by the caller.
1526 Note that *FALLTHROUGH_BLOCK is a new block that contains the
1527 statements starting from *ITER, and *THEN_BLOCK is a new empty
1528 block.
1530 *ITER is adjusted to point to always point to the first statement
1531 of the basic block * FALLTHROUGH_BLOCK. That statement is the
1532 same as what ITER was pointing to prior to calling this function,
1533 if BEFORE_P is true; otherwise, it is its following statement. */
1535 gimple_stmt_iterator
1536 create_cond_insert_point (gimple_stmt_iterator *iter,
1537 bool before_p,
1538 bool then_more_likely_p,
1539 bool create_then_fallthru_edge,
1540 basic_block *then_block,
1541 basic_block *fallthrough_block)
1543 gimple_stmt_iterator gsi = *iter;
1545 if (!gsi_end_p (gsi) && before_p)
1546 gsi_prev (&gsi);
1548 basic_block cur_bb = gsi_bb (*iter);
1550 edge e = split_block (cur_bb, gsi_stmt (gsi));
1552 /* Get a hold on the 'condition block', the 'then block' and the
1553 'else block'. */
1554 basic_block cond_bb = e->src;
1555 basic_block fallthru_bb = e->dest;
1556 basic_block then_bb = create_empty_bb (cond_bb);
1557 if (current_loops)
1559 add_bb_to_loop (then_bb, cond_bb->loop_father);
1560 loops_state_set (LOOPS_NEED_FIXUP);
1563 /* Set up the newly created 'then block'. */
1564 e = make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
1565 int fallthrough_probability
1566 = then_more_likely_p
1567 ? PROB_VERY_UNLIKELY
1568 : PROB_ALWAYS - PROB_VERY_UNLIKELY;
1569 e->probability = PROB_ALWAYS - fallthrough_probability;
1570 if (create_then_fallthru_edge)
1571 make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU);
1573 /* Set up the fallthrough basic block. */
1574 e = find_edge (cond_bb, fallthru_bb);
1575 e->flags = EDGE_FALSE_VALUE;
1576 e->count = cond_bb->count;
1577 e->probability = fallthrough_probability;
1579 /* Update dominance info for the newly created then_bb; note that
1580 fallthru_bb's dominance info has already been updated by
1581 split_bock. */
1582 if (dom_info_available_p (CDI_DOMINATORS))
1583 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
1585 *then_block = then_bb;
1586 *fallthrough_block = fallthru_bb;
1587 *iter = gsi_start_bb (fallthru_bb);
1589 return gsi_last_bb (cond_bb);
1592 /* Insert an if condition followed by a 'then block' right before the
1593 statement pointed to by ITER. The fallthrough block -- which is the
1594 else block of the condition as well as the destination of the
1595 outcoming edge of the 'then block' -- starts with the statement
1596 pointed to by ITER.
1598 COND is the condition of the if.
1600 If THEN_MORE_LIKELY_P is true, the probability of the edge to the
1601 'then block' is higher than the probability of the edge to the
1602 fallthrough block.
1604 Upon completion of the function, *THEN_BB is set to the newly
1605 inserted 'then block' and similarly, *FALLTHROUGH_BB is set to the
1606 fallthrough block.
1608 *ITER is adjusted to still point to the same statement it was
1609 pointing to initially. */
1611 static void
1612 insert_if_then_before_iter (gcond *cond,
1613 gimple_stmt_iterator *iter,
1614 bool then_more_likely_p,
1615 basic_block *then_bb,
1616 basic_block *fallthrough_bb)
1618 gimple_stmt_iterator cond_insert_point =
1619 create_cond_insert_point (iter,
1620 /*before_p=*/true,
1621 then_more_likely_p,
1622 /*create_then_fallthru_edge=*/true,
1623 then_bb,
1624 fallthrough_bb);
1625 gsi_insert_after (&cond_insert_point, cond, GSI_NEW_STMT);
1628 /* Build
1629 (base_addr >> ASAN_SHADOW_SHIFT) + asan_shadow_offset (). */
1631 static tree
1632 build_shadow_mem_access (gimple_stmt_iterator *gsi, location_t location,
1633 tree base_addr, tree shadow_ptr_type)
1635 tree t, uintptr_type = TREE_TYPE (base_addr);
1636 tree shadow_type = TREE_TYPE (shadow_ptr_type);
1637 gimple g;
1639 t = build_int_cst (uintptr_type, ASAN_SHADOW_SHIFT);
1640 g = gimple_build_assign (make_ssa_name (uintptr_type), RSHIFT_EXPR,
1641 base_addr, t);
1642 gimple_set_location (g, location);
1643 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1645 t = build_int_cst (uintptr_type, asan_shadow_offset ());
1646 g = gimple_build_assign (make_ssa_name (uintptr_type), PLUS_EXPR,
1647 gimple_assign_lhs (g), t);
1648 gimple_set_location (g, location);
1649 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1651 g = gimple_build_assign (make_ssa_name (shadow_ptr_type), NOP_EXPR,
1652 gimple_assign_lhs (g));
1653 gimple_set_location (g, location);
1654 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1656 t = build2 (MEM_REF, shadow_type, gimple_assign_lhs (g),
1657 build_int_cst (shadow_ptr_type, 0));
1658 g = gimple_build_assign (make_ssa_name (shadow_type), MEM_REF, t);
1659 gimple_set_location (g, location);
1660 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1661 return gimple_assign_lhs (g);
1664 /* BASE can already be an SSA_NAME; in that case, do not create a
1665 new SSA_NAME for it. */
1667 static tree
1668 maybe_create_ssa_name (location_t loc, tree base, gimple_stmt_iterator *iter,
1669 bool before_p)
1671 if (TREE_CODE (base) == SSA_NAME)
1672 return base;
1673 gimple g = gimple_build_assign (make_ssa_name (TREE_TYPE (base)),
1674 TREE_CODE (base), base);
1675 gimple_set_location (g, loc);
1676 if (before_p)
1677 gsi_insert_before (iter, g, GSI_SAME_STMT);
1678 else
1679 gsi_insert_after (iter, g, GSI_NEW_STMT);
1680 return gimple_assign_lhs (g);
1683 /* LEN can already have necessary size and precision;
1684 in that case, do not create a new variable. */
1686 tree
1687 maybe_cast_to_ptrmode (location_t loc, tree len, gimple_stmt_iterator *iter,
1688 bool before_p)
1690 if (ptrofftype_p (len))
1691 return len;
1692 gimple g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
1693 NOP_EXPR, len);
1694 gimple_set_location (g, loc);
1695 if (before_p)
1696 gsi_insert_before (iter, g, GSI_SAME_STMT);
1697 else
1698 gsi_insert_after (iter, g, GSI_NEW_STMT);
1699 return gimple_assign_lhs (g);
1702 /* Instrument the memory access instruction BASE. Insert new
1703 statements before or after ITER.
1705 Note that the memory access represented by BASE can be either an
1706 SSA_NAME, or a non-SSA expression. LOCATION is the source code
1707 location. IS_STORE is TRUE for a store, FALSE for a load.
1708 BEFORE_P is TRUE for inserting the instrumentation code before
1709 ITER, FALSE for inserting it after ITER. IS_SCALAR_ACCESS is TRUE
1710 for a scalar memory access and FALSE for memory region access.
1711 NON_ZERO_P is TRUE if memory region is guaranteed to have non-zero
1712 length. ALIGN tells alignment of accessed memory object.
1714 START_INSTRUMENTED and END_INSTRUMENTED are TRUE if start/end of
1715 memory region have already been instrumented.
1717 If BEFORE_P is TRUE, *ITER is arranged to still point to the
1718 statement it was pointing to prior to calling this function,
1719 otherwise, it points to the statement logically following it. */
1721 static void
1722 build_check_stmt (location_t loc, tree base, tree len,
1723 HOST_WIDE_INT size_in_bytes, gimple_stmt_iterator *iter,
1724 bool is_non_zero_len, bool before_p, bool is_store,
1725 bool is_scalar_access, unsigned int align = 0)
1727 gimple_stmt_iterator gsi = *iter;
1728 gimple g;
1730 gcc_assert (!(size_in_bytes > 0 && !is_non_zero_len));
1732 gsi = *iter;
1734 base = unshare_expr (base);
1735 base = maybe_create_ssa_name (loc, base, &gsi, before_p);
1737 if (len)
1739 len = unshare_expr (len);
1740 len = maybe_cast_to_ptrmode (loc, len, iter, before_p);
1742 else
1744 gcc_assert (size_in_bytes != -1);
1745 len = build_int_cst (pointer_sized_int_node, size_in_bytes);
1748 if (size_in_bytes > 1)
1750 if ((size_in_bytes & (size_in_bytes - 1)) != 0
1751 || size_in_bytes > 16)
1752 is_scalar_access = false;
1753 else if (align && align < size_in_bytes * BITS_PER_UNIT)
1755 /* On non-strict alignment targets, if
1756 16-byte access is just 8-byte aligned,
1757 this will result in misaligned shadow
1758 memory 2 byte load, but otherwise can
1759 be handled using one read. */
1760 if (size_in_bytes != 16
1761 || STRICT_ALIGNMENT
1762 || align < 8 * BITS_PER_UNIT)
1763 is_scalar_access = false;
1767 HOST_WIDE_INT flags = 0;
1768 if (is_store)
1769 flags |= ASAN_CHECK_STORE;
1770 if (is_non_zero_len)
1771 flags |= ASAN_CHECK_NON_ZERO_LEN;
1772 if (is_scalar_access)
1773 flags |= ASAN_CHECK_SCALAR_ACCESS;
1775 g = gimple_build_call_internal (IFN_ASAN_CHECK, 4,
1776 build_int_cst (integer_type_node, flags),
1777 base, len,
1778 build_int_cst (integer_type_node,
1779 align / BITS_PER_UNIT));
1780 gimple_set_location (g, loc);
1781 if (before_p)
1782 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
1783 else
1785 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1786 gsi_next (&gsi);
1787 *iter = gsi;
1791 /* If T represents a memory access, add instrumentation code before ITER.
1792 LOCATION is source code location.
1793 IS_STORE is either TRUE (for a store) or FALSE (for a load). */
1795 static void
1796 instrument_derefs (gimple_stmt_iterator *iter, tree t,
1797 location_t location, bool is_store)
1799 if (is_store && !ASAN_INSTRUMENT_WRITES)
1800 return;
1801 if (!is_store && !ASAN_INSTRUMENT_READS)
1802 return;
1804 tree type, base;
1805 HOST_WIDE_INT size_in_bytes;
1807 type = TREE_TYPE (t);
1808 switch (TREE_CODE (t))
1810 case ARRAY_REF:
1811 case COMPONENT_REF:
1812 case INDIRECT_REF:
1813 case MEM_REF:
1814 case VAR_DECL:
1815 case BIT_FIELD_REF:
1816 break;
1817 /* FALLTHRU */
1818 default:
1819 return;
1822 size_in_bytes = int_size_in_bytes (type);
1823 if (size_in_bytes <= 0)
1824 return;
1826 HOST_WIDE_INT bitsize, bitpos;
1827 tree offset;
1828 machine_mode mode;
1829 int volatilep = 0, unsignedp = 0;
1830 tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset,
1831 &mode, &unsignedp, &volatilep, false);
1833 if (TREE_CODE (t) == COMPONENT_REF
1834 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)) != NULL_TREE)
1836 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1));
1837 instrument_derefs (iter, build3 (COMPONENT_REF, TREE_TYPE (repr),
1838 TREE_OPERAND (t, 0), repr,
1839 NULL_TREE), location, is_store);
1840 return;
1843 if (bitpos % BITS_PER_UNIT
1844 || bitsize != size_in_bytes * BITS_PER_UNIT)
1845 return;
1847 if (TREE_CODE (inner) == VAR_DECL
1848 && offset == NULL_TREE
1849 && bitpos >= 0
1850 && DECL_SIZE (inner)
1851 && tree_fits_shwi_p (DECL_SIZE (inner))
1852 && bitpos + bitsize <= tree_to_shwi (DECL_SIZE (inner)))
1854 if (DECL_THREAD_LOCAL_P (inner))
1855 return;
1856 if (!ASAN_GLOBALS && is_global_var (inner))
1857 return;
1858 if (!TREE_STATIC (inner))
1860 /* Automatic vars in the current function will be always
1861 accessible. */
1862 if (decl_function_context (inner) == current_function_decl)
1863 return;
1865 /* Always instrument external vars, they might be dynamically
1866 initialized. */
1867 else if (!DECL_EXTERNAL (inner))
1869 /* For static vars if they are known not to be dynamically
1870 initialized, they will be always accessible. */
1871 varpool_node *vnode = varpool_node::get (inner);
1872 if (vnode && !vnode->dynamically_initialized)
1873 return;
1877 base = build_fold_addr_expr (t);
1878 if (!has_mem_ref_been_instrumented (base, size_in_bytes))
1880 unsigned int align = get_object_alignment (t);
1881 build_check_stmt (location, base, NULL_TREE, size_in_bytes, iter,
1882 /*is_non_zero_len*/size_in_bytes > 0, /*before_p=*/true,
1883 is_store, /*is_scalar_access*/true, align);
1884 update_mem_ref_hash_table (base, size_in_bytes);
1885 update_mem_ref_hash_table (t, size_in_bytes);
1890 /* Insert a memory reference into the hash table if access length
1891 can be determined in compile time. */
1893 static void
1894 maybe_update_mem_ref_hash_table (tree base, tree len)
1896 if (!POINTER_TYPE_P (TREE_TYPE (base))
1897 || !INTEGRAL_TYPE_P (TREE_TYPE (len)))
1898 return;
1900 HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
1902 if (size_in_bytes != -1)
1903 update_mem_ref_hash_table (base, size_in_bytes);
1906 /* Instrument an access to a contiguous memory region that starts at
1907 the address pointed to by BASE, over a length of LEN (expressed in
1908 the sizeof (*BASE) bytes). ITER points to the instruction before
1909 which the instrumentation instructions must be inserted. LOCATION
1910 is the source location that the instrumentation instructions must
1911 have. If IS_STORE is true, then the memory access is a store;
1912 otherwise, it's a load. */
1914 static void
1915 instrument_mem_region_access (tree base, tree len,
1916 gimple_stmt_iterator *iter,
1917 location_t location, bool is_store)
1919 if (!POINTER_TYPE_P (TREE_TYPE (base))
1920 || !INTEGRAL_TYPE_P (TREE_TYPE (len))
1921 || integer_zerop (len))
1922 return;
1924 HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
1926 if ((size_in_bytes == -1)
1927 || !has_mem_ref_been_instrumented (base, size_in_bytes))
1929 build_check_stmt (location, base, len, size_in_bytes, iter,
1930 /*is_non_zero_len*/size_in_bytes > 0, /*before_p*/true,
1931 is_store, /*is_scalar_access*/false, /*align*/0);
1934 maybe_update_mem_ref_hash_table (base, len);
1935 *iter = gsi_for_stmt (gsi_stmt (*iter));
1938 /* Instrument the call to a built-in memory access function that is
1939 pointed to by the iterator ITER.
1941 Upon completion, return TRUE iff *ITER has been advanced to the
1942 statement following the one it was originally pointing to. */
1944 static bool
1945 instrument_builtin_call (gimple_stmt_iterator *iter)
1947 if (!ASAN_MEMINTRIN)
1948 return false;
1950 bool iter_advanced_p = false;
1951 gcall *call = as_a <gcall *> (gsi_stmt (*iter));
1953 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
1955 location_t loc = gimple_location (call);
1957 asan_mem_ref src0, src1, dest;
1958 asan_mem_ref_init (&src0, NULL, 1);
1959 asan_mem_ref_init (&src1, NULL, 1);
1960 asan_mem_ref_init (&dest, NULL, 1);
1962 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
1963 bool src0_is_store = false, src1_is_store = false, dest_is_store = false,
1964 dest_is_deref = false, intercepted_p = true;
1966 if (get_mem_refs_of_builtin_call (call,
1967 &src0, &src0_len, &src0_is_store,
1968 &src1, &src1_len, &src1_is_store,
1969 &dest, &dest_len, &dest_is_store,
1970 &dest_is_deref, &intercepted_p))
1972 if (dest_is_deref)
1974 instrument_derefs (iter, dest.start, loc, dest_is_store);
1975 gsi_next (iter);
1976 iter_advanced_p = true;
1978 else if (!intercepted_p
1979 && (src0_len || src1_len || dest_len))
1981 if (src0.start != NULL_TREE)
1982 instrument_mem_region_access (src0.start, src0_len,
1983 iter, loc, /*is_store=*/false);
1984 if (src1.start != NULL_TREE)
1985 instrument_mem_region_access (src1.start, src1_len,
1986 iter, loc, /*is_store=*/false);
1987 if (dest.start != NULL_TREE)
1988 instrument_mem_region_access (dest.start, dest_len,
1989 iter, loc, /*is_store=*/true);
1991 *iter = gsi_for_stmt (call);
1992 gsi_next (iter);
1993 iter_advanced_p = true;
1995 else
1997 if (src0.start != NULL_TREE)
1998 maybe_update_mem_ref_hash_table (src0.start, src0_len);
1999 if (src1.start != NULL_TREE)
2000 maybe_update_mem_ref_hash_table (src1.start, src1_len);
2001 if (dest.start != NULL_TREE)
2002 maybe_update_mem_ref_hash_table (dest.start, dest_len);
2005 return iter_advanced_p;
2008 /* Instrument the assignment statement ITER if it is subject to
2009 instrumentation. Return TRUE iff instrumentation actually
2010 happened. In that case, the iterator ITER is advanced to the next
2011 logical expression following the one initially pointed to by ITER,
2012 and the relevant memory reference that which access has been
2013 instrumented is added to the memory references hash table. */
2015 static bool
2016 maybe_instrument_assignment (gimple_stmt_iterator *iter)
2018 gimple s = gsi_stmt (*iter);
2020 gcc_assert (gimple_assign_single_p (s));
2022 tree ref_expr = NULL_TREE;
2023 bool is_store, is_instrumented = false;
2025 if (gimple_store_p (s))
2027 ref_expr = gimple_assign_lhs (s);
2028 is_store = true;
2029 instrument_derefs (iter, ref_expr,
2030 gimple_location (s),
2031 is_store);
2032 is_instrumented = true;
2035 if (gimple_assign_load_p (s))
2037 ref_expr = gimple_assign_rhs1 (s);
2038 is_store = false;
2039 instrument_derefs (iter, ref_expr,
2040 gimple_location (s),
2041 is_store);
2042 is_instrumented = true;
2045 if (is_instrumented)
2046 gsi_next (iter);
2048 return is_instrumented;
2051 /* Instrument the function call pointed to by the iterator ITER, if it
2052 is subject to instrumentation. At the moment, the only function
2053 calls that are instrumented are some built-in functions that access
2054 memory. Look at instrument_builtin_call to learn more.
2056 Upon completion return TRUE iff *ITER was advanced to the statement
2057 following the one it was originally pointing to. */
2059 static bool
2060 maybe_instrument_call (gimple_stmt_iterator *iter)
2062 gimple stmt = gsi_stmt (*iter);
2063 bool is_builtin = gimple_call_builtin_p (stmt, BUILT_IN_NORMAL);
2065 if (is_builtin && instrument_builtin_call (iter))
2066 return true;
2068 if (gimple_call_noreturn_p (stmt))
2070 if (is_builtin)
2072 tree callee = gimple_call_fndecl (stmt);
2073 switch (DECL_FUNCTION_CODE (callee))
2075 case BUILT_IN_UNREACHABLE:
2076 case BUILT_IN_TRAP:
2077 /* Don't instrument these. */
2078 return false;
2079 default:
2080 break;
2083 tree decl = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN);
2084 gimple g = gimple_build_call (decl, 0);
2085 gimple_set_location (g, gimple_location (stmt));
2086 gsi_insert_before (iter, g, GSI_SAME_STMT);
2088 return false;
2091 /* Walk each instruction of all basic block and instrument those that
2092 represent memory references: loads, stores, or function calls.
2093 In a given basic block, this function avoids instrumenting memory
2094 references that have already been instrumented. */
2096 static void
2097 transform_statements (void)
2099 basic_block bb, last_bb = NULL;
2100 gimple_stmt_iterator i;
2101 int saved_last_basic_block = last_basic_block_for_fn (cfun);
2103 FOR_EACH_BB_FN (bb, cfun)
2105 basic_block prev_bb = bb;
2107 if (bb->index >= saved_last_basic_block) continue;
2109 /* Flush the mem ref hash table, if current bb doesn't have
2110 exactly one predecessor, or if that predecessor (skipping
2111 over asan created basic blocks) isn't the last processed
2112 basic block. Thus we effectively flush on extended basic
2113 block boundaries. */
2114 while (single_pred_p (prev_bb))
2116 prev_bb = single_pred (prev_bb);
2117 if (prev_bb->index < saved_last_basic_block)
2118 break;
2120 if (prev_bb != last_bb)
2121 empty_mem_ref_hash_table ();
2122 last_bb = bb;
2124 for (i = gsi_start_bb (bb); !gsi_end_p (i);)
2126 gimple s = gsi_stmt (i);
2128 if (has_stmt_been_instrumented_p (s))
2129 gsi_next (&i);
2130 else if (gimple_assign_single_p (s)
2131 && !gimple_clobber_p (s)
2132 && maybe_instrument_assignment (&i))
2133 /* Nothing to do as maybe_instrument_assignment advanced
2134 the iterator I. */;
2135 else if (is_gimple_call (s) && maybe_instrument_call (&i))
2136 /* Nothing to do as maybe_instrument_call
2137 advanced the iterator I. */;
2138 else
2140 /* No instrumentation happened.
2142 If the current instruction is a function call that
2143 might free something, let's forget about the memory
2144 references that got instrumented. Otherwise we might
2145 miss some instrumentation opportunities. */
2146 if (is_gimple_call (s) && !nonfreeing_call_p (s))
2147 empty_mem_ref_hash_table ();
2149 gsi_next (&i);
2153 free_mem_ref_resources ();
2156 /* Build
2157 __asan_before_dynamic_init (module_name)
2159 __asan_after_dynamic_init ()
2160 call. */
2162 tree
2163 asan_dynamic_init_call (bool after_p)
2165 tree fn = builtin_decl_implicit (after_p
2166 ? BUILT_IN_ASAN_AFTER_DYNAMIC_INIT
2167 : BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT);
2168 tree module_name_cst = NULL_TREE;
2169 if (!after_p)
2171 pretty_printer module_name_pp;
2172 pp_string (&module_name_pp, main_input_filename);
2174 if (shadow_ptr_types[0] == NULL_TREE)
2175 asan_init_shadow_ptr_types ();
2176 module_name_cst = asan_pp_string (&module_name_pp);
2177 module_name_cst = fold_convert (const_ptr_type_node,
2178 module_name_cst);
2181 return build_call_expr (fn, after_p ? 0 : 1, module_name_cst);
2184 /* Build
2185 struct __asan_global
2187 const void *__beg;
2188 uptr __size;
2189 uptr __size_with_redzone;
2190 const void *__name;
2191 const void *__module_name;
2192 uptr __has_dynamic_init;
2193 __asan_global_source_location *__location;
2194 } type. */
2196 static tree
2197 asan_global_struct (void)
2199 static const char *field_names[7]
2200 = { "__beg", "__size", "__size_with_redzone",
2201 "__name", "__module_name", "__has_dynamic_init", "__location"};
2202 tree fields[7], ret;
2203 int i;
2205 ret = make_node (RECORD_TYPE);
2206 for (i = 0; i < 7; i++)
2208 fields[i]
2209 = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
2210 get_identifier (field_names[i]),
2211 (i == 0 || i == 3) ? const_ptr_type_node
2212 : pointer_sized_int_node);
2213 DECL_CONTEXT (fields[i]) = ret;
2214 if (i)
2215 DECL_CHAIN (fields[i - 1]) = fields[i];
2217 tree type_decl = build_decl (input_location, TYPE_DECL,
2218 get_identifier ("__asan_global"), ret);
2219 DECL_IGNORED_P (type_decl) = 1;
2220 DECL_ARTIFICIAL (type_decl) = 1;
2221 TYPE_FIELDS (ret) = fields[0];
2222 TYPE_NAME (ret) = type_decl;
2223 TYPE_STUB_DECL (ret) = type_decl;
2224 layout_type (ret);
2225 return ret;
2228 /* Append description of a single global DECL into vector V.
2229 TYPE is __asan_global struct type as returned by asan_global_struct. */
2231 static void
2232 asan_add_global (tree decl, tree type, vec<constructor_elt, va_gc> *v)
2234 tree init, uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
2235 unsigned HOST_WIDE_INT size;
2236 tree str_cst, module_name_cst, refdecl = decl;
2237 vec<constructor_elt, va_gc> *vinner = NULL;
2239 pretty_printer asan_pp, module_name_pp;
2241 if (DECL_NAME (decl))
2242 pp_tree_identifier (&asan_pp, DECL_NAME (decl));
2243 else
2244 pp_string (&asan_pp, "<unknown>");
2245 str_cst = asan_pp_string (&asan_pp);
2247 pp_string (&module_name_pp, main_input_filename);
2248 module_name_cst = asan_pp_string (&module_name_pp);
2250 if (asan_needs_local_alias (decl))
2252 char buf[20];
2253 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", vec_safe_length (v) + 1);
2254 refdecl = build_decl (DECL_SOURCE_LOCATION (decl),
2255 VAR_DECL, get_identifier (buf), TREE_TYPE (decl));
2256 TREE_ADDRESSABLE (refdecl) = TREE_ADDRESSABLE (decl);
2257 TREE_READONLY (refdecl) = TREE_READONLY (decl);
2258 TREE_THIS_VOLATILE (refdecl) = TREE_THIS_VOLATILE (decl);
2259 DECL_GIMPLE_REG_P (refdecl) = DECL_GIMPLE_REG_P (decl);
2260 DECL_ARTIFICIAL (refdecl) = DECL_ARTIFICIAL (decl);
2261 DECL_IGNORED_P (refdecl) = DECL_IGNORED_P (decl);
2262 TREE_STATIC (refdecl) = 1;
2263 TREE_PUBLIC (refdecl) = 0;
2264 TREE_USED (refdecl) = 1;
2265 assemble_alias (refdecl, DECL_ASSEMBLER_NAME (decl));
2268 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2269 fold_convert (const_ptr_type_node,
2270 build_fold_addr_expr (refdecl)));
2271 size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
2272 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2273 size += asan_red_zone_size (size);
2274 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2275 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2276 fold_convert (const_ptr_type_node, str_cst));
2277 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2278 fold_convert (const_ptr_type_node, module_name_cst));
2279 varpool_node *vnode = varpool_node::get (decl);
2280 int has_dynamic_init = vnode ? vnode->dynamically_initialized : 0;
2281 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2282 build_int_cst (uptr, has_dynamic_init));
2283 tree locptr = NULL_TREE;
2284 location_t loc = DECL_SOURCE_LOCATION (decl);
2285 expanded_location xloc = expand_location (loc);
2286 if (xloc.file != NULL)
2288 static int lasanloccnt = 0;
2289 char buf[25];
2290 ASM_GENERATE_INTERNAL_LABEL (buf, "LASANLOC", ++lasanloccnt);
2291 tree var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
2292 ubsan_get_source_location_type ());
2293 TREE_STATIC (var) = 1;
2294 TREE_PUBLIC (var) = 0;
2295 DECL_ARTIFICIAL (var) = 1;
2296 DECL_IGNORED_P (var) = 1;
2297 pretty_printer filename_pp;
2298 pp_string (&filename_pp, xloc.file);
2299 tree str = asan_pp_string (&filename_pp);
2300 tree ctor = build_constructor_va (TREE_TYPE (var), 3,
2301 NULL_TREE, str, NULL_TREE,
2302 build_int_cst (unsigned_type_node,
2303 xloc.line), NULL_TREE,
2304 build_int_cst (unsigned_type_node,
2305 xloc.column));
2306 TREE_CONSTANT (ctor) = 1;
2307 TREE_STATIC (ctor) = 1;
2308 DECL_INITIAL (var) = ctor;
2309 varpool_node::finalize_decl (var);
2310 locptr = fold_convert (uptr, build_fold_addr_expr (var));
2312 else
2313 locptr = build_int_cst (uptr, 0);
2314 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, locptr);
2315 init = build_constructor (type, vinner);
2316 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, init);
2319 /* Initialize sanitizer.def builtins if the FE hasn't initialized them. */
2320 void
2321 initialize_sanitizer_builtins (void)
2323 tree decl;
2325 if (builtin_decl_implicit_p (BUILT_IN_ASAN_INIT))
2326 return;
2328 tree BT_FN_VOID = build_function_type_list (void_type_node, NULL_TREE);
2329 tree BT_FN_VOID_PTR
2330 = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
2331 tree BT_FN_VOID_CONST_PTR
2332 = build_function_type_list (void_type_node, const_ptr_type_node, NULL_TREE);
2333 tree BT_FN_VOID_PTR_PTR
2334 = build_function_type_list (void_type_node, ptr_type_node,
2335 ptr_type_node, NULL_TREE);
2336 tree BT_FN_VOID_PTR_PTR_PTR
2337 = build_function_type_list (void_type_node, ptr_type_node,
2338 ptr_type_node, ptr_type_node, NULL_TREE);
2339 tree BT_FN_VOID_PTR_PTRMODE
2340 = build_function_type_list (void_type_node, ptr_type_node,
2341 pointer_sized_int_node, NULL_TREE);
2342 tree BT_FN_VOID_INT
2343 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
2344 tree BT_FN_SIZE_CONST_PTR_INT
2345 = build_function_type_list (size_type_node, const_ptr_type_node,
2346 integer_type_node, NULL_TREE);
2347 tree BT_FN_BOOL_VPTR_PTR_IX_INT_INT[5];
2348 tree BT_FN_IX_CONST_VPTR_INT[5];
2349 tree BT_FN_IX_VPTR_IX_INT[5];
2350 tree BT_FN_VOID_VPTR_IX_INT[5];
2351 tree vptr
2352 = build_pointer_type (build_qualified_type (void_type_node,
2353 TYPE_QUAL_VOLATILE));
2354 tree cvptr
2355 = build_pointer_type (build_qualified_type (void_type_node,
2356 TYPE_QUAL_VOLATILE
2357 |TYPE_QUAL_CONST));
2358 tree boolt
2359 = lang_hooks.types.type_for_size (BOOL_TYPE_SIZE, 1);
2360 int i;
2361 for (i = 0; i < 5; i++)
2363 tree ix = build_nonstandard_integer_type (BITS_PER_UNIT * (1 << i), 1);
2364 BT_FN_BOOL_VPTR_PTR_IX_INT_INT[i]
2365 = build_function_type_list (boolt, vptr, ptr_type_node, ix,
2366 integer_type_node, integer_type_node,
2367 NULL_TREE);
2368 BT_FN_IX_CONST_VPTR_INT[i]
2369 = build_function_type_list (ix, cvptr, integer_type_node, NULL_TREE);
2370 BT_FN_IX_VPTR_IX_INT[i]
2371 = build_function_type_list (ix, vptr, ix, integer_type_node,
2372 NULL_TREE);
2373 BT_FN_VOID_VPTR_IX_INT[i]
2374 = build_function_type_list (void_type_node, vptr, ix,
2375 integer_type_node, NULL_TREE);
2377 #define BT_FN_BOOL_VPTR_PTR_I1_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[0]
2378 #define BT_FN_I1_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[0]
2379 #define BT_FN_I1_VPTR_I1_INT BT_FN_IX_VPTR_IX_INT[0]
2380 #define BT_FN_VOID_VPTR_I1_INT BT_FN_VOID_VPTR_IX_INT[0]
2381 #define BT_FN_BOOL_VPTR_PTR_I2_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[1]
2382 #define BT_FN_I2_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[1]
2383 #define BT_FN_I2_VPTR_I2_INT BT_FN_IX_VPTR_IX_INT[1]
2384 #define BT_FN_VOID_VPTR_I2_INT BT_FN_VOID_VPTR_IX_INT[1]
2385 #define BT_FN_BOOL_VPTR_PTR_I4_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[2]
2386 #define BT_FN_I4_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[2]
2387 #define BT_FN_I4_VPTR_I4_INT BT_FN_IX_VPTR_IX_INT[2]
2388 #define BT_FN_VOID_VPTR_I4_INT BT_FN_VOID_VPTR_IX_INT[2]
2389 #define BT_FN_BOOL_VPTR_PTR_I8_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[3]
2390 #define BT_FN_I8_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[3]
2391 #define BT_FN_I8_VPTR_I8_INT BT_FN_IX_VPTR_IX_INT[3]
2392 #define BT_FN_VOID_VPTR_I8_INT BT_FN_VOID_VPTR_IX_INT[3]
2393 #define BT_FN_BOOL_VPTR_PTR_I16_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[4]
2394 #define BT_FN_I16_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[4]
2395 #define BT_FN_I16_VPTR_I16_INT BT_FN_IX_VPTR_IX_INT[4]
2396 #define BT_FN_VOID_VPTR_I16_INT BT_FN_VOID_VPTR_IX_INT[4]
2397 #undef ATTR_NOTHROW_LEAF_LIST
2398 #define ATTR_NOTHROW_LEAF_LIST ECF_NOTHROW | ECF_LEAF
2399 #undef ATTR_TMPURE_NOTHROW_LEAF_LIST
2400 #define ATTR_TMPURE_NOTHROW_LEAF_LIST ECF_TM_PURE | ATTR_NOTHROW_LEAF_LIST
2401 #undef ATTR_NORETURN_NOTHROW_LEAF_LIST
2402 #define ATTR_NORETURN_NOTHROW_LEAF_LIST ECF_NORETURN | ATTR_NOTHROW_LEAF_LIST
2403 #undef ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
2404 #define ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST \
2405 ECF_CONST | ATTR_NORETURN_NOTHROW_LEAF_LIST
2406 #undef ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST
2407 #define ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST \
2408 ECF_TM_PURE | ATTR_NORETURN_NOTHROW_LEAF_LIST
2409 #undef ATTR_COLD_NOTHROW_LEAF_LIST
2410 #define ATTR_COLD_NOTHROW_LEAF_LIST \
2411 /* ECF_COLD missing */ ATTR_NOTHROW_LEAF_LIST
2412 #undef ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST
2413 #define ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST \
2414 /* ECF_COLD missing */ ATTR_NORETURN_NOTHROW_LEAF_LIST
2415 #undef ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST
2416 #define ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST \
2417 /* ECF_COLD missing */ ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
2418 #undef ATTR_PURE_NOTHROW_LEAF_LIST
2419 #define ATTR_PURE_NOTHROW_LEAF_LIST ECF_PURE | ATTR_NOTHROW_LEAF_LIST
2420 #undef DEF_SANITIZER_BUILTIN
2421 #define DEF_SANITIZER_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
2422 decl = add_builtin_function ("__builtin_" NAME, TYPE, ENUM, \
2423 BUILT_IN_NORMAL, NAME, NULL_TREE); \
2424 set_call_expr_flags (decl, ATTRS); \
2425 set_builtin_decl (ENUM, decl, true);
2427 #include "sanitizer.def"
2429 /* -fsanitize=object-size uses __builtin_object_size, but that might
2430 not be available for e.g. Fortran at this point. We use
2431 DEF_SANITIZER_BUILTIN here only as a convenience macro. */
2432 if ((flag_sanitize & SANITIZE_OBJECT_SIZE)
2433 && !builtin_decl_implicit_p (BUILT_IN_OBJECT_SIZE))
2434 DEF_SANITIZER_BUILTIN (BUILT_IN_OBJECT_SIZE, "object_size",
2435 BT_FN_SIZE_CONST_PTR_INT,
2436 ATTR_PURE_NOTHROW_LEAF_LIST)
2438 #undef DEF_SANITIZER_BUILTIN
2441 /* Called via htab_traverse. Count number of emitted
2442 STRING_CSTs in the constant hash table. */
2445 count_string_csts (constant_descriptor_tree **slot,
2446 unsigned HOST_WIDE_INT *data)
2448 struct constant_descriptor_tree *desc = *slot;
2449 if (TREE_CODE (desc->value) == STRING_CST
2450 && TREE_ASM_WRITTEN (desc->value)
2451 && asan_protect_global (desc->value))
2452 ++*data;
2453 return 1;
2456 /* Helper structure to pass two parameters to
2457 add_string_csts. */
2459 struct asan_add_string_csts_data
2461 tree type;
2462 vec<constructor_elt, va_gc> *v;
2465 /* Called via hash_table::traverse. Call asan_add_global
2466 on emitted STRING_CSTs from the constant hash table. */
2469 add_string_csts (constant_descriptor_tree **slot,
2470 asan_add_string_csts_data *aascd)
2472 struct constant_descriptor_tree *desc = *slot;
2473 if (TREE_CODE (desc->value) == STRING_CST
2474 && TREE_ASM_WRITTEN (desc->value)
2475 && asan_protect_global (desc->value))
2477 asan_add_global (SYMBOL_REF_DECL (XEXP (desc->rtl, 0)),
2478 aascd->type, aascd->v);
2480 return 1;
2483 /* Needs to be GTY(()), because cgraph_build_static_cdtor may
2484 invoke ggc_collect. */
2485 static GTY(()) tree asan_ctor_statements;
2487 /* Module-level instrumentation.
2488 - Insert __asan_init_vN() into the list of CTORs.
2489 - TODO: insert redzones around globals.
2492 void
2493 asan_finish_file (void)
2495 varpool_node *vnode;
2496 unsigned HOST_WIDE_INT gcount = 0;
2498 if (shadow_ptr_types[0] == NULL_TREE)
2499 asan_init_shadow_ptr_types ();
2500 /* Avoid instrumenting code in the asan ctors/dtors.
2501 We don't need to insert padding after the description strings,
2502 nor after .LASAN* array. */
2503 flag_sanitize &= ~SANITIZE_ADDRESS;
2505 /* For user-space we want asan constructors to run first.
2506 Linux kernel does not support priorities other than default, and the only
2507 other user of constructors is coverage. So we run with the default
2508 priority. */
2509 int priority = flag_sanitize & SANITIZE_USER_ADDRESS
2510 ? MAX_RESERVED_INIT_PRIORITY - 1 : DEFAULT_INIT_PRIORITY;
2512 if (flag_sanitize & SANITIZE_USER_ADDRESS)
2514 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_INIT);
2515 append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
2517 FOR_EACH_DEFINED_VARIABLE (vnode)
2518 if (TREE_ASM_WRITTEN (vnode->decl)
2519 && asan_protect_global (vnode->decl))
2520 ++gcount;
2521 hash_table<tree_descriptor_hasher> *const_desc_htab = constant_pool_htab ();
2522 const_desc_htab->traverse<unsigned HOST_WIDE_INT *, count_string_csts>
2523 (&gcount);
2524 if (gcount)
2526 tree type = asan_global_struct (), var, ctor;
2527 tree dtor_statements = NULL_TREE;
2528 vec<constructor_elt, va_gc> *v;
2529 char buf[20];
2531 type = build_array_type_nelts (type, gcount);
2532 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", 0);
2533 var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
2534 type);
2535 TREE_STATIC (var) = 1;
2536 TREE_PUBLIC (var) = 0;
2537 DECL_ARTIFICIAL (var) = 1;
2538 DECL_IGNORED_P (var) = 1;
2539 vec_alloc (v, gcount);
2540 FOR_EACH_DEFINED_VARIABLE (vnode)
2541 if (TREE_ASM_WRITTEN (vnode->decl)
2542 && asan_protect_global (vnode->decl))
2543 asan_add_global (vnode->decl, TREE_TYPE (type), v);
2544 struct asan_add_string_csts_data aascd;
2545 aascd.type = TREE_TYPE (type);
2546 aascd.v = v;
2547 const_desc_htab->traverse<asan_add_string_csts_data *, add_string_csts>
2548 (&aascd);
2549 ctor = build_constructor (type, v);
2550 TREE_CONSTANT (ctor) = 1;
2551 TREE_STATIC (ctor) = 1;
2552 DECL_INITIAL (var) = ctor;
2553 varpool_node::finalize_decl (var);
2555 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_REGISTER_GLOBALS);
2556 tree gcount_tree = build_int_cst (pointer_sized_int_node, gcount);
2557 append_to_statement_list (build_call_expr (fn, 2,
2558 build_fold_addr_expr (var),
2559 gcount_tree),
2560 &asan_ctor_statements);
2562 fn = builtin_decl_implicit (BUILT_IN_ASAN_UNREGISTER_GLOBALS);
2563 append_to_statement_list (build_call_expr (fn, 2,
2564 build_fold_addr_expr (var),
2565 gcount_tree),
2566 &dtor_statements);
2567 cgraph_build_static_cdtor ('D', dtor_statements, priority);
2569 if (asan_ctor_statements)
2570 cgraph_build_static_cdtor ('I', asan_ctor_statements, priority);
2571 flag_sanitize |= SANITIZE_ADDRESS;
2574 /* Expand the ASAN_{LOAD,STORE} builtins. */
2576 bool
2577 asan_expand_check_ifn (gimple_stmt_iterator *iter, bool use_calls)
2579 gimple g = gsi_stmt (*iter);
2580 location_t loc = gimple_location (g);
2582 bool recover_p
2583 = (flag_sanitize & flag_sanitize_recover & SANITIZE_KERNEL_ADDRESS) != 0;
2585 HOST_WIDE_INT flags = tree_to_shwi (gimple_call_arg (g, 0));
2586 gcc_assert (flags < ASAN_CHECK_LAST);
2587 bool is_scalar_access = (flags & ASAN_CHECK_SCALAR_ACCESS) != 0;
2588 bool is_store = (flags & ASAN_CHECK_STORE) != 0;
2589 bool is_non_zero_len = (flags & ASAN_CHECK_NON_ZERO_LEN) != 0;
2591 tree base = gimple_call_arg (g, 1);
2592 tree len = gimple_call_arg (g, 2);
2593 HOST_WIDE_INT align = tree_to_shwi (gimple_call_arg (g, 3));
2595 HOST_WIDE_INT size_in_bytes
2596 = is_scalar_access && tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
2598 if (use_calls)
2600 /* Instrument using callbacks. */
2601 gimple g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2602 NOP_EXPR, base);
2603 gimple_set_location (g, loc);
2604 gsi_insert_before (iter, g, GSI_SAME_STMT);
2605 tree base_addr = gimple_assign_lhs (g);
2607 int nargs;
2608 tree fun = check_func (is_store, recover_p, size_in_bytes, &nargs);
2609 if (nargs == 1)
2610 g = gimple_build_call (fun, 1, base_addr);
2611 else
2613 gcc_assert (nargs == 2);
2614 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2615 NOP_EXPR, len);
2616 gimple_set_location (g, loc);
2617 gsi_insert_before (iter, g, GSI_SAME_STMT);
2618 tree sz_arg = gimple_assign_lhs (g);
2619 g = gimple_build_call (fun, nargs, base_addr, sz_arg);
2621 gimple_set_location (g, loc);
2622 gsi_replace (iter, g, false);
2623 return false;
2626 HOST_WIDE_INT real_size_in_bytes = size_in_bytes == -1 ? 1 : size_in_bytes;
2628 tree shadow_ptr_type = shadow_ptr_types[real_size_in_bytes == 16 ? 1 : 0];
2629 tree shadow_type = TREE_TYPE (shadow_ptr_type);
2631 gimple_stmt_iterator gsi = *iter;
2633 if (!is_non_zero_len)
2635 /* So, the length of the memory area to asan-protect is
2636 non-constant. Let's guard the generated instrumentation code
2637 like:
2639 if (len != 0)
2641 //asan instrumentation code goes here.
2643 // falltrough instructions, starting with *ITER. */
2645 g = gimple_build_cond (NE_EXPR,
2646 len,
2647 build_int_cst (TREE_TYPE (len), 0),
2648 NULL_TREE, NULL_TREE);
2649 gimple_set_location (g, loc);
2651 basic_block then_bb, fallthrough_bb;
2652 insert_if_then_before_iter (as_a <gcond *> (g), iter,
2653 /*then_more_likely_p=*/true,
2654 &then_bb, &fallthrough_bb);
2655 /* Note that fallthrough_bb starts with the statement that was
2656 pointed to by ITER. */
2658 /* The 'then block' of the 'if (len != 0) condition is where
2659 we'll generate the asan instrumentation code now. */
2660 gsi = gsi_last_bb (then_bb);
2663 /* Get an iterator on the point where we can add the condition
2664 statement for the instrumentation. */
2665 basic_block then_bb, else_bb;
2666 gsi = create_cond_insert_point (&gsi, /*before_p*/false,
2667 /*then_more_likely_p=*/false,
2668 /*create_then_fallthru_edge*/recover_p,
2669 &then_bb,
2670 &else_bb);
2672 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2673 NOP_EXPR, base);
2674 gimple_set_location (g, loc);
2675 gsi_insert_before (&gsi, g, GSI_NEW_STMT);
2676 tree base_addr = gimple_assign_lhs (g);
2678 tree t = NULL_TREE;
2679 if (real_size_in_bytes >= 8)
2681 tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
2682 shadow_ptr_type);
2683 t = shadow;
2685 else
2687 /* Slow path for 1, 2 and 4 byte accesses. */
2688 /* Test (shadow != 0)
2689 & ((base_addr & 7) + (real_size_in_bytes - 1)) >= shadow). */
2690 tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
2691 shadow_ptr_type);
2692 gimple shadow_test = build_assign (NE_EXPR, shadow, 0);
2693 gimple_seq seq = NULL;
2694 gimple_seq_add_stmt (&seq, shadow_test);
2695 /* Aligned (>= 8 bytes) can test just
2696 (real_size_in_bytes - 1 >= shadow), as base_addr & 7 is known
2697 to be 0. */
2698 if (align < 8)
2700 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
2701 base_addr, 7));
2702 gimple_seq_add_stmt (&seq,
2703 build_type_cast (shadow_type,
2704 gimple_seq_last (seq)));
2705 if (real_size_in_bytes > 1)
2706 gimple_seq_add_stmt (&seq,
2707 build_assign (PLUS_EXPR,
2708 gimple_seq_last (seq),
2709 real_size_in_bytes - 1));
2710 t = gimple_assign_lhs (gimple_seq_last_stmt (seq));
2712 else
2713 t = build_int_cst (shadow_type, real_size_in_bytes - 1);
2714 gimple_seq_add_stmt (&seq, build_assign (GE_EXPR, t, shadow));
2715 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
2716 gimple_seq_last (seq)));
2717 t = gimple_assign_lhs (gimple_seq_last (seq));
2718 gimple_seq_set_location (seq, loc);
2719 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
2721 /* For non-constant, misaligned or otherwise weird access sizes,
2722 check first and last byte. */
2723 if (size_in_bytes == -1)
2725 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2726 MINUS_EXPR, len,
2727 build_int_cst (pointer_sized_int_node, 1));
2728 gimple_set_location (g, loc);
2729 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2730 tree last = gimple_assign_lhs (g);
2731 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2732 PLUS_EXPR, base_addr, last);
2733 gimple_set_location (g, loc);
2734 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2735 tree base_end_addr = gimple_assign_lhs (g);
2737 tree shadow = build_shadow_mem_access (&gsi, loc, base_end_addr,
2738 shadow_ptr_type);
2739 gimple shadow_test = build_assign (NE_EXPR, shadow, 0);
2740 gimple_seq seq = NULL;
2741 gimple_seq_add_stmt (&seq, shadow_test);
2742 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
2743 base_end_addr, 7));
2744 gimple_seq_add_stmt (&seq, build_type_cast (shadow_type,
2745 gimple_seq_last (seq)));
2746 gimple_seq_add_stmt (&seq, build_assign (GE_EXPR,
2747 gimple_seq_last (seq),
2748 shadow));
2749 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
2750 gimple_seq_last (seq)));
2751 gimple_seq_add_stmt (&seq, build_assign (BIT_IOR_EXPR, t,
2752 gimple_seq_last (seq)));
2753 t = gimple_assign_lhs (gimple_seq_last (seq));
2754 gimple_seq_set_location (seq, loc);
2755 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
2759 g = gimple_build_cond (NE_EXPR, t, build_int_cst (TREE_TYPE (t), 0),
2760 NULL_TREE, NULL_TREE);
2761 gimple_set_location (g, loc);
2762 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2764 /* Generate call to the run-time library (e.g. __asan_report_load8). */
2765 gsi = gsi_start_bb (then_bb);
2766 int nargs;
2767 tree fun = report_error_func (is_store, recover_p, size_in_bytes, &nargs);
2768 g = gimple_build_call (fun, nargs, base_addr, len);
2769 gimple_set_location (g, loc);
2770 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2772 gsi_remove (iter, true);
2773 *iter = gsi_start_bb (else_bb);
2775 return true;
2778 /* Instrument the current function. */
2780 static unsigned int
2781 asan_instrument (void)
2783 if (shadow_ptr_types[0] == NULL_TREE)
2784 asan_init_shadow_ptr_types ();
2785 transform_statements ();
2786 return 0;
2789 static bool
2790 gate_asan (void)
2792 return (flag_sanitize & SANITIZE_ADDRESS) != 0
2793 && !lookup_attribute ("no_sanitize_address",
2794 DECL_ATTRIBUTES (current_function_decl));
2797 namespace {
2799 const pass_data pass_data_asan =
2801 GIMPLE_PASS, /* type */
2802 "asan", /* name */
2803 OPTGROUP_NONE, /* optinfo_flags */
2804 TV_NONE, /* tv_id */
2805 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
2806 0, /* properties_provided */
2807 0, /* properties_destroyed */
2808 0, /* todo_flags_start */
2809 TODO_update_ssa, /* todo_flags_finish */
2812 class pass_asan : public gimple_opt_pass
2814 public:
2815 pass_asan (gcc::context *ctxt)
2816 : gimple_opt_pass (pass_data_asan, ctxt)
2819 /* opt_pass methods: */
2820 opt_pass * clone () { return new pass_asan (m_ctxt); }
2821 virtual bool gate (function *) { return gate_asan (); }
2822 virtual unsigned int execute (function *) { return asan_instrument (); }
2824 }; // class pass_asan
2826 } // anon namespace
2828 gimple_opt_pass *
2829 make_pass_asan (gcc::context *ctxt)
2831 return new pass_asan (ctxt);
2834 namespace {
2836 const pass_data pass_data_asan_O0 =
2838 GIMPLE_PASS, /* type */
2839 "asan0", /* name */
2840 OPTGROUP_NONE, /* optinfo_flags */
2841 TV_NONE, /* tv_id */
2842 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
2843 0, /* properties_provided */
2844 0, /* properties_destroyed */
2845 0, /* todo_flags_start */
2846 TODO_update_ssa, /* todo_flags_finish */
2849 class pass_asan_O0 : public gimple_opt_pass
2851 public:
2852 pass_asan_O0 (gcc::context *ctxt)
2853 : gimple_opt_pass (pass_data_asan_O0, ctxt)
2856 /* opt_pass methods: */
2857 virtual bool gate (function *) { return !optimize && gate_asan (); }
2858 virtual unsigned int execute (function *) { return asan_instrument (); }
2860 }; // class pass_asan_O0
2862 } // anon namespace
2864 gimple_opt_pass *
2865 make_pass_asan_O0 (gcc::context *ctxt)
2867 return new pass_asan_O0 (ctxt);
2870 #include "gt-asan.h"