2016-09-25 François Dumont <fdumont@gcc.gnu.org>
[official-gcc.git] / gcc / asan.c
blob4fe2447fe7f012294486ee881878ef03e8a3da9f
1 /* AddressSanitizer, a fast memory error detector.
2 Copyright (C) 2012-2016 Free Software Foundation, Inc.
3 Contributed by Kostya Serebryany <kcc@google.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "target.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "gimple.h"
30 #include "cfghooks.h"
31 #include "alloc-pool.h"
32 #include "tree-pass.h"
33 #include "tm_p.h"
34 #include "stringpool.h"
35 #include "tree-vrp.h"
36 #include "tree-ssanames.h"
37 #include "optabs.h"
38 #include "emit-rtl.h"
39 #include "cgraph.h"
40 #include "gimple-pretty-print.h"
41 #include "alias.h"
42 #include "fold-const.h"
43 #include "cfganal.h"
44 #include "gimplify.h"
45 #include "gimple-iterator.h"
46 #include "varasm.h"
47 #include "stor-layout.h"
48 #include "tree-iterator.h"
49 #include "asan.h"
50 #include "dojump.h"
51 #include "explow.h"
52 #include "expr.h"
53 #include "output.h"
54 #include "langhooks.h"
55 #include "cfgloop.h"
56 #include "gimple-builder.h"
57 #include "ubsan.h"
58 #include "params.h"
59 #include "builtins.h"
60 #include "fnmatch.h"
62 /* AddressSanitizer finds out-of-bounds and use-after-free bugs
63 with <2x slowdown on average.
65 The tool consists of two parts:
66 instrumentation module (this file) and a run-time library.
67 The instrumentation module adds a run-time check before every memory insn.
68 For a 8- or 16- byte load accessing address X:
69 ShadowAddr = (X >> 3) + Offset
70 ShadowValue = *(char*)ShadowAddr; // *(short*) for 16-byte access.
71 if (ShadowValue)
72 __asan_report_load8(X);
73 For a load of N bytes (N=1, 2 or 4) from address X:
74 ShadowAddr = (X >> 3) + Offset
75 ShadowValue = *(char*)ShadowAddr;
76 if (ShadowValue)
77 if ((X & 7) + N - 1 > ShadowValue)
78 __asan_report_loadN(X);
79 Stores are instrumented similarly, but using __asan_report_storeN functions.
80 A call too __asan_init_vN() is inserted to the list of module CTORs.
81 N is the version number of the AddressSanitizer API. The changes between the
82 API versions are listed in libsanitizer/asan/asan_interface_internal.h.
84 The run-time library redefines malloc (so that redzone are inserted around
85 the allocated memory) and free (so that reuse of free-ed memory is delayed),
86 provides __asan_report* and __asan_init_vN functions.
88 Read more:
89 http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm
91 The current implementation supports detection of out-of-bounds and
92 use-after-free in the heap, on the stack and for global variables.
94 [Protection of stack variables]
96 To understand how detection of out-of-bounds and use-after-free works
97 for stack variables, lets look at this example on x86_64 where the
98 stack grows downward:
101 foo ()
103 char a[23] = {0};
104 int b[2] = {0};
106 a[5] = 1;
107 b[1] = 2;
109 return a[5] + b[1];
112 For this function, the stack protected by asan will be organized as
113 follows, from the top of the stack to the bottom:
115 Slot 1/ [red zone of 32 bytes called 'RIGHT RedZone']
117 Slot 2/ [8 bytes of red zone, that adds up to the space of 'a' to make
118 the next slot be 32 bytes aligned; this one is called Partial
119 Redzone; this 32 bytes alignment is an asan constraint]
121 Slot 3/ [24 bytes for variable 'a']
123 Slot 4/ [red zone of 32 bytes called 'Middle RedZone']
125 Slot 5/ [24 bytes of Partial Red Zone (similar to slot 2]
127 Slot 6/ [8 bytes for variable 'b']
129 Slot 7/ [32 bytes of Red Zone at the bottom of the stack, called
130 'LEFT RedZone']
132 The 32 bytes of LEFT red zone at the bottom of the stack can be
133 decomposed as such:
135 1/ The first 8 bytes contain a magical asan number that is always
136 0x41B58AB3.
138 2/ The following 8 bytes contains a pointer to a string (to be
139 parsed at runtime by the runtime asan library), which format is
140 the following:
142 "<function-name> <space> <num-of-variables-on-the-stack>
143 (<32-bytes-aligned-offset-in-bytes-of-variable> <space>
144 <length-of-var-in-bytes> ){n} "
146 where '(...){n}' means the content inside the parenthesis occurs 'n'
147 times, with 'n' being the number of variables on the stack.
149 3/ The following 8 bytes contain the PC of the current function which
150 will be used by the run-time library to print an error message.
152 4/ The following 8 bytes are reserved for internal use by the run-time.
154 The shadow memory for that stack layout is going to look like this:
156 - content of shadow memory 8 bytes for slot 7: 0xF1F1F1F1.
157 The F1 byte pattern is a magic number called
158 ASAN_STACK_MAGIC_LEFT and is a way for the runtime to know that
159 the memory for that shadow byte is part of a the LEFT red zone
160 intended to seat at the bottom of the variables on the stack.
162 - content of shadow memory 8 bytes for slots 6 and 5:
163 0xF4F4F400. The F4 byte pattern is a magic number
164 called ASAN_STACK_MAGIC_PARTIAL. It flags the fact that the
165 memory region for this shadow byte is a PARTIAL red zone
166 intended to pad a variable A, so that the slot following
167 {A,padding} is 32 bytes aligned.
169 Note that the fact that the least significant byte of this
170 shadow memory content is 00 means that 8 bytes of its
171 corresponding memory (which corresponds to the memory of
172 variable 'b') is addressable.
174 - content of shadow memory 8 bytes for slot 4: 0xF2F2F2F2.
175 The F2 byte pattern is a magic number called
176 ASAN_STACK_MAGIC_MIDDLE. It flags the fact that the memory
177 region for this shadow byte is a MIDDLE red zone intended to
178 seat between two 32 aligned slots of {variable,padding}.
180 - content of shadow memory 8 bytes for slot 3 and 2:
181 0xF4000000. This represents is the concatenation of
182 variable 'a' and the partial red zone following it, like what we
183 had for variable 'b'. The least significant 3 bytes being 00
184 means that the 3 bytes of variable 'a' are addressable.
186 - content of shadow memory 8 bytes for slot 1: 0xF3F3F3F3.
187 The F3 byte pattern is a magic number called
188 ASAN_STACK_MAGIC_RIGHT. It flags the fact that the memory
189 region for this shadow byte is a RIGHT red zone intended to seat
190 at the top of the variables of the stack.
192 Note that the real variable layout is done in expand_used_vars in
193 cfgexpand.c. As far as Address Sanitizer is concerned, it lays out
194 stack variables as well as the different red zones, emits some
195 prologue code to populate the shadow memory as to poison (mark as
196 non-accessible) the regions of the red zones and mark the regions of
197 stack variables as accessible, and emit some epilogue code to
198 un-poison (mark as accessible) the regions of red zones right before
199 the function exits.
201 [Protection of global variables]
203 The basic idea is to insert a red zone between two global variables
204 and install a constructor function that calls the asan runtime to do
205 the populating of the relevant shadow memory regions at load time.
207 So the global variables are laid out as to insert a red zone between
208 them. The size of the red zones is so that each variable starts on a
209 32 bytes boundary.
211 Then a constructor function is installed so that, for each global
212 variable, it calls the runtime asan library function
213 __asan_register_globals_with an instance of this type:
215 struct __asan_global
217 // Address of the beginning of the global variable.
218 const void *__beg;
220 // Initial size of the global variable.
221 uptr __size;
223 // Size of the global variable + size of the red zone. This
224 // size is 32 bytes aligned.
225 uptr __size_with_redzone;
227 // Name of the global variable.
228 const void *__name;
230 // Name of the module where the global variable is declared.
231 const void *__module_name;
233 // 1 if it has dynamic initialization, 0 otherwise.
234 uptr __has_dynamic_init;
236 // A pointer to struct that contains source location, could be NULL.
237 __asan_global_source_location *__location;
240 A destructor function that calls the runtime asan library function
241 _asan_unregister_globals is also installed. */
243 static unsigned HOST_WIDE_INT asan_shadow_offset_value;
244 static bool asan_shadow_offset_computed;
245 static vec<char *> sanitized_sections;
247 /* Sets shadow offset to value in string VAL. */
249 bool
250 set_asan_shadow_offset (const char *val)
252 char *endp;
254 errno = 0;
255 #ifdef HAVE_LONG_LONG
256 asan_shadow_offset_value = strtoull (val, &endp, 0);
257 #else
258 asan_shadow_offset_value = strtoul (val, &endp, 0);
259 #endif
260 if (!(*val != '\0' && *endp == '\0' && errno == 0))
261 return false;
263 asan_shadow_offset_computed = true;
265 return true;
268 /* Set list of user-defined sections that need to be sanitized. */
270 void
271 set_sanitized_sections (const char *sections)
273 char *pat;
274 unsigned i;
275 FOR_EACH_VEC_ELT (sanitized_sections, i, pat)
276 free (pat);
277 sanitized_sections.truncate (0);
279 for (const char *s = sections; *s; )
281 const char *end;
282 for (end = s; *end && *end != ','; ++end);
283 size_t len = end - s;
284 sanitized_sections.safe_push (xstrndup (s, len));
285 s = *end ? end + 1 : end;
289 /* Checks whether section SEC should be sanitized. */
291 static bool
292 section_sanitized_p (const char *sec)
294 char *pat;
295 unsigned i;
296 FOR_EACH_VEC_ELT (sanitized_sections, i, pat)
297 if (fnmatch (pat, sec, FNM_PERIOD) == 0)
298 return true;
299 return false;
302 /* Returns Asan shadow offset. */
304 static unsigned HOST_WIDE_INT
305 asan_shadow_offset ()
307 if (!asan_shadow_offset_computed)
309 asan_shadow_offset_computed = true;
310 asan_shadow_offset_value = targetm.asan_shadow_offset ();
312 return asan_shadow_offset_value;
315 alias_set_type asan_shadow_set = -1;
317 /* Pointer types to 1 resp. 2 byte integers in shadow memory. A separate
318 alias set is used for all shadow memory accesses. */
319 static GTY(()) tree shadow_ptr_types[2];
321 /* Decl for __asan_option_detect_stack_use_after_return. */
322 static GTY(()) tree asan_detect_stack_use_after_return;
324 /* Various flags for Asan builtins. */
325 enum asan_check_flags
327 ASAN_CHECK_STORE = 1 << 0,
328 ASAN_CHECK_SCALAR_ACCESS = 1 << 1,
329 ASAN_CHECK_NON_ZERO_LEN = 1 << 2,
330 ASAN_CHECK_LAST = 1 << 3
333 /* Hashtable support for memory references used by gimple
334 statements. */
336 /* This type represents a reference to a memory region. */
337 struct asan_mem_ref
339 /* The expression of the beginning of the memory region. */
340 tree start;
342 /* The size of the access. */
343 HOST_WIDE_INT access_size;
346 object_allocator <asan_mem_ref> asan_mem_ref_pool ("asan_mem_ref");
348 /* Initializes an instance of asan_mem_ref. */
350 static void
351 asan_mem_ref_init (asan_mem_ref *ref, tree start, HOST_WIDE_INT access_size)
353 ref->start = start;
354 ref->access_size = access_size;
357 /* Allocates memory for an instance of asan_mem_ref into the memory
358 pool returned by asan_mem_ref_get_alloc_pool and initialize it.
359 START is the address of (or the expression pointing to) the
360 beginning of memory reference. ACCESS_SIZE is the size of the
361 access to the referenced memory. */
363 static asan_mem_ref*
364 asan_mem_ref_new (tree start, HOST_WIDE_INT access_size)
366 asan_mem_ref *ref = asan_mem_ref_pool.allocate ();
368 asan_mem_ref_init (ref, start, access_size);
369 return ref;
372 /* This builds and returns a pointer to the end of the memory region
373 that starts at START and of length LEN. */
375 tree
376 asan_mem_ref_get_end (tree start, tree len)
378 if (len == NULL_TREE || integer_zerop (len))
379 return start;
381 if (!ptrofftype_p (len))
382 len = convert_to_ptrofftype (len);
384 return fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (start), start, len);
387 /* Return a tree expression that represents the end of the referenced
388 memory region. Beware that this function can actually build a new
389 tree expression. */
391 tree
392 asan_mem_ref_get_end (const asan_mem_ref *ref, tree len)
394 return asan_mem_ref_get_end (ref->start, len);
397 struct asan_mem_ref_hasher : nofree_ptr_hash <asan_mem_ref>
399 static inline hashval_t hash (const asan_mem_ref *);
400 static inline bool equal (const asan_mem_ref *, const asan_mem_ref *);
403 /* Hash a memory reference. */
405 inline hashval_t
406 asan_mem_ref_hasher::hash (const asan_mem_ref *mem_ref)
408 return iterative_hash_expr (mem_ref->start, 0);
411 /* Compare two memory references. We accept the length of either
412 memory references to be NULL_TREE. */
414 inline bool
415 asan_mem_ref_hasher::equal (const asan_mem_ref *m1,
416 const asan_mem_ref *m2)
418 return operand_equal_p (m1->start, m2->start, 0);
421 static hash_table<asan_mem_ref_hasher> *asan_mem_ref_ht;
423 /* Returns a reference to the hash table containing memory references.
424 This function ensures that the hash table is created. Note that
425 this hash table is updated by the function
426 update_mem_ref_hash_table. */
428 static hash_table<asan_mem_ref_hasher> *
429 get_mem_ref_hash_table ()
431 if (!asan_mem_ref_ht)
432 asan_mem_ref_ht = new hash_table<asan_mem_ref_hasher> (10);
434 return asan_mem_ref_ht;
437 /* Clear all entries from the memory references hash table. */
439 static void
440 empty_mem_ref_hash_table ()
442 if (asan_mem_ref_ht)
443 asan_mem_ref_ht->empty ();
446 /* Free the memory references hash table. */
448 static void
449 free_mem_ref_resources ()
451 delete asan_mem_ref_ht;
452 asan_mem_ref_ht = NULL;
454 asan_mem_ref_pool.release ();
457 /* Return true iff the memory reference REF has been instrumented. */
459 static bool
460 has_mem_ref_been_instrumented (tree ref, HOST_WIDE_INT access_size)
462 asan_mem_ref r;
463 asan_mem_ref_init (&r, ref, access_size);
465 asan_mem_ref *saved_ref = get_mem_ref_hash_table ()->find (&r);
466 return saved_ref && saved_ref->access_size >= access_size;
469 /* Return true iff the memory reference REF has been instrumented. */
471 static bool
472 has_mem_ref_been_instrumented (const asan_mem_ref *ref)
474 return has_mem_ref_been_instrumented (ref->start, ref->access_size);
477 /* Return true iff access to memory region starting at REF and of
478 length LEN has been instrumented. */
480 static bool
481 has_mem_ref_been_instrumented (const asan_mem_ref *ref, tree len)
483 HOST_WIDE_INT size_in_bytes
484 = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
486 return size_in_bytes != -1
487 && has_mem_ref_been_instrumented (ref->start, size_in_bytes);
490 /* Set REF to the memory reference present in a gimple assignment
491 ASSIGNMENT. Return true upon successful completion, false
492 otherwise. */
494 static bool
495 get_mem_ref_of_assignment (const gassign *assignment,
496 asan_mem_ref *ref,
497 bool *ref_is_store)
499 gcc_assert (gimple_assign_single_p (assignment));
501 if (gimple_store_p (assignment)
502 && !gimple_clobber_p (assignment))
504 ref->start = gimple_assign_lhs (assignment);
505 *ref_is_store = true;
507 else if (gimple_assign_load_p (assignment))
509 ref->start = gimple_assign_rhs1 (assignment);
510 *ref_is_store = false;
512 else
513 return false;
515 ref->access_size = int_size_in_bytes (TREE_TYPE (ref->start));
516 return true;
519 /* Return the memory references contained in a gimple statement
520 representing a builtin call that has to do with memory access. */
522 static bool
523 get_mem_refs_of_builtin_call (const gcall *call,
524 asan_mem_ref *src0,
525 tree *src0_len,
526 bool *src0_is_store,
527 asan_mem_ref *src1,
528 tree *src1_len,
529 bool *src1_is_store,
530 asan_mem_ref *dst,
531 tree *dst_len,
532 bool *dst_is_store,
533 bool *dest_is_deref,
534 bool *intercepted_p)
536 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
538 tree callee = gimple_call_fndecl (call);
539 tree source0 = NULL_TREE, source1 = NULL_TREE,
540 dest = NULL_TREE, len = NULL_TREE;
541 bool is_store = true, got_reference_p = false;
542 HOST_WIDE_INT access_size = 1;
544 *intercepted_p = asan_intercepted_p ((DECL_FUNCTION_CODE (callee)));
546 switch (DECL_FUNCTION_CODE (callee))
548 /* (s, s, n) style memops. */
549 case BUILT_IN_BCMP:
550 case BUILT_IN_MEMCMP:
551 source0 = gimple_call_arg (call, 0);
552 source1 = gimple_call_arg (call, 1);
553 len = gimple_call_arg (call, 2);
554 break;
556 /* (src, dest, n) style memops. */
557 case BUILT_IN_BCOPY:
558 source0 = gimple_call_arg (call, 0);
559 dest = gimple_call_arg (call, 1);
560 len = gimple_call_arg (call, 2);
561 break;
563 /* (dest, src, n) style memops. */
564 case BUILT_IN_MEMCPY:
565 case BUILT_IN_MEMCPY_CHK:
566 case BUILT_IN_MEMMOVE:
567 case BUILT_IN_MEMMOVE_CHK:
568 case BUILT_IN_MEMPCPY:
569 case BUILT_IN_MEMPCPY_CHK:
570 dest = gimple_call_arg (call, 0);
571 source0 = gimple_call_arg (call, 1);
572 len = gimple_call_arg (call, 2);
573 break;
575 /* (dest, n) style memops. */
576 case BUILT_IN_BZERO:
577 dest = gimple_call_arg (call, 0);
578 len = gimple_call_arg (call, 1);
579 break;
581 /* (dest, x, n) style memops*/
582 case BUILT_IN_MEMSET:
583 case BUILT_IN_MEMSET_CHK:
584 dest = gimple_call_arg (call, 0);
585 len = gimple_call_arg (call, 2);
586 break;
588 case BUILT_IN_STRLEN:
589 source0 = gimple_call_arg (call, 0);
590 len = gimple_call_lhs (call);
591 break ;
593 /* And now the __atomic* and __sync builtins.
594 These are handled differently from the classical memory memory
595 access builtins above. */
597 case BUILT_IN_ATOMIC_LOAD_1:
598 case BUILT_IN_ATOMIC_LOAD_2:
599 case BUILT_IN_ATOMIC_LOAD_4:
600 case BUILT_IN_ATOMIC_LOAD_8:
601 case BUILT_IN_ATOMIC_LOAD_16:
602 is_store = false;
603 /* fall through. */
605 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
606 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
607 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
608 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
609 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
611 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
612 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
613 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
614 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
615 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
617 case BUILT_IN_SYNC_FETCH_AND_OR_1:
618 case BUILT_IN_SYNC_FETCH_AND_OR_2:
619 case BUILT_IN_SYNC_FETCH_AND_OR_4:
620 case BUILT_IN_SYNC_FETCH_AND_OR_8:
621 case BUILT_IN_SYNC_FETCH_AND_OR_16:
623 case BUILT_IN_SYNC_FETCH_AND_AND_1:
624 case BUILT_IN_SYNC_FETCH_AND_AND_2:
625 case BUILT_IN_SYNC_FETCH_AND_AND_4:
626 case BUILT_IN_SYNC_FETCH_AND_AND_8:
627 case BUILT_IN_SYNC_FETCH_AND_AND_16:
629 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
630 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
631 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
632 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
633 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
635 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
636 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
637 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
638 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
640 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
641 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
642 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
643 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
644 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
646 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
647 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
648 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
649 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
650 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
652 case BUILT_IN_SYNC_OR_AND_FETCH_1:
653 case BUILT_IN_SYNC_OR_AND_FETCH_2:
654 case BUILT_IN_SYNC_OR_AND_FETCH_4:
655 case BUILT_IN_SYNC_OR_AND_FETCH_8:
656 case BUILT_IN_SYNC_OR_AND_FETCH_16:
658 case BUILT_IN_SYNC_AND_AND_FETCH_1:
659 case BUILT_IN_SYNC_AND_AND_FETCH_2:
660 case BUILT_IN_SYNC_AND_AND_FETCH_4:
661 case BUILT_IN_SYNC_AND_AND_FETCH_8:
662 case BUILT_IN_SYNC_AND_AND_FETCH_16:
664 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
665 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
666 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
667 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
668 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
670 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
671 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
672 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
673 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
675 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
676 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
677 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
678 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
679 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
681 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
682 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
683 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
684 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
685 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
687 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
688 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
689 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
690 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
691 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
693 case BUILT_IN_SYNC_LOCK_RELEASE_1:
694 case BUILT_IN_SYNC_LOCK_RELEASE_2:
695 case BUILT_IN_SYNC_LOCK_RELEASE_4:
696 case BUILT_IN_SYNC_LOCK_RELEASE_8:
697 case BUILT_IN_SYNC_LOCK_RELEASE_16:
699 case BUILT_IN_ATOMIC_EXCHANGE_1:
700 case BUILT_IN_ATOMIC_EXCHANGE_2:
701 case BUILT_IN_ATOMIC_EXCHANGE_4:
702 case BUILT_IN_ATOMIC_EXCHANGE_8:
703 case BUILT_IN_ATOMIC_EXCHANGE_16:
705 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
706 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
707 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
708 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
709 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
711 case BUILT_IN_ATOMIC_STORE_1:
712 case BUILT_IN_ATOMIC_STORE_2:
713 case BUILT_IN_ATOMIC_STORE_4:
714 case BUILT_IN_ATOMIC_STORE_8:
715 case BUILT_IN_ATOMIC_STORE_16:
717 case BUILT_IN_ATOMIC_ADD_FETCH_1:
718 case BUILT_IN_ATOMIC_ADD_FETCH_2:
719 case BUILT_IN_ATOMIC_ADD_FETCH_4:
720 case BUILT_IN_ATOMIC_ADD_FETCH_8:
721 case BUILT_IN_ATOMIC_ADD_FETCH_16:
723 case BUILT_IN_ATOMIC_SUB_FETCH_1:
724 case BUILT_IN_ATOMIC_SUB_FETCH_2:
725 case BUILT_IN_ATOMIC_SUB_FETCH_4:
726 case BUILT_IN_ATOMIC_SUB_FETCH_8:
727 case BUILT_IN_ATOMIC_SUB_FETCH_16:
729 case BUILT_IN_ATOMIC_AND_FETCH_1:
730 case BUILT_IN_ATOMIC_AND_FETCH_2:
731 case BUILT_IN_ATOMIC_AND_FETCH_4:
732 case BUILT_IN_ATOMIC_AND_FETCH_8:
733 case BUILT_IN_ATOMIC_AND_FETCH_16:
735 case BUILT_IN_ATOMIC_NAND_FETCH_1:
736 case BUILT_IN_ATOMIC_NAND_FETCH_2:
737 case BUILT_IN_ATOMIC_NAND_FETCH_4:
738 case BUILT_IN_ATOMIC_NAND_FETCH_8:
739 case BUILT_IN_ATOMIC_NAND_FETCH_16:
741 case BUILT_IN_ATOMIC_XOR_FETCH_1:
742 case BUILT_IN_ATOMIC_XOR_FETCH_2:
743 case BUILT_IN_ATOMIC_XOR_FETCH_4:
744 case BUILT_IN_ATOMIC_XOR_FETCH_8:
745 case BUILT_IN_ATOMIC_XOR_FETCH_16:
747 case BUILT_IN_ATOMIC_OR_FETCH_1:
748 case BUILT_IN_ATOMIC_OR_FETCH_2:
749 case BUILT_IN_ATOMIC_OR_FETCH_4:
750 case BUILT_IN_ATOMIC_OR_FETCH_8:
751 case BUILT_IN_ATOMIC_OR_FETCH_16:
753 case BUILT_IN_ATOMIC_FETCH_ADD_1:
754 case BUILT_IN_ATOMIC_FETCH_ADD_2:
755 case BUILT_IN_ATOMIC_FETCH_ADD_4:
756 case BUILT_IN_ATOMIC_FETCH_ADD_8:
757 case BUILT_IN_ATOMIC_FETCH_ADD_16:
759 case BUILT_IN_ATOMIC_FETCH_SUB_1:
760 case BUILT_IN_ATOMIC_FETCH_SUB_2:
761 case BUILT_IN_ATOMIC_FETCH_SUB_4:
762 case BUILT_IN_ATOMIC_FETCH_SUB_8:
763 case BUILT_IN_ATOMIC_FETCH_SUB_16:
765 case BUILT_IN_ATOMIC_FETCH_AND_1:
766 case BUILT_IN_ATOMIC_FETCH_AND_2:
767 case BUILT_IN_ATOMIC_FETCH_AND_4:
768 case BUILT_IN_ATOMIC_FETCH_AND_8:
769 case BUILT_IN_ATOMIC_FETCH_AND_16:
771 case BUILT_IN_ATOMIC_FETCH_NAND_1:
772 case BUILT_IN_ATOMIC_FETCH_NAND_2:
773 case BUILT_IN_ATOMIC_FETCH_NAND_4:
774 case BUILT_IN_ATOMIC_FETCH_NAND_8:
775 case BUILT_IN_ATOMIC_FETCH_NAND_16:
777 case BUILT_IN_ATOMIC_FETCH_XOR_1:
778 case BUILT_IN_ATOMIC_FETCH_XOR_2:
779 case BUILT_IN_ATOMIC_FETCH_XOR_4:
780 case BUILT_IN_ATOMIC_FETCH_XOR_8:
781 case BUILT_IN_ATOMIC_FETCH_XOR_16:
783 case BUILT_IN_ATOMIC_FETCH_OR_1:
784 case BUILT_IN_ATOMIC_FETCH_OR_2:
785 case BUILT_IN_ATOMIC_FETCH_OR_4:
786 case BUILT_IN_ATOMIC_FETCH_OR_8:
787 case BUILT_IN_ATOMIC_FETCH_OR_16:
789 dest = gimple_call_arg (call, 0);
790 /* DEST represents the address of a memory location.
791 instrument_derefs wants the memory location, so lets
792 dereference the address DEST before handing it to
793 instrument_derefs. */
794 if (TREE_CODE (dest) == ADDR_EXPR)
795 dest = TREE_OPERAND (dest, 0);
796 else if (TREE_CODE (dest) == SSA_NAME || TREE_CODE (dest) == INTEGER_CST)
797 dest = build2 (MEM_REF, TREE_TYPE (TREE_TYPE (dest)),
798 dest, build_int_cst (TREE_TYPE (dest), 0));
799 else
800 gcc_unreachable ();
802 access_size = int_size_in_bytes (TREE_TYPE (dest));
805 default:
806 /* The other builtins memory access are not instrumented in this
807 function because they either don't have any length parameter,
808 or their length parameter is just a limit. */
809 break;
812 if (len != NULL_TREE)
814 if (source0 != NULL_TREE)
816 src0->start = source0;
817 src0->access_size = access_size;
818 *src0_len = len;
819 *src0_is_store = false;
822 if (source1 != NULL_TREE)
824 src1->start = source1;
825 src1->access_size = access_size;
826 *src1_len = len;
827 *src1_is_store = false;
830 if (dest != NULL_TREE)
832 dst->start = dest;
833 dst->access_size = access_size;
834 *dst_len = len;
835 *dst_is_store = true;
838 got_reference_p = true;
840 else if (dest)
842 dst->start = dest;
843 dst->access_size = access_size;
844 *dst_len = NULL_TREE;
845 *dst_is_store = is_store;
846 *dest_is_deref = true;
847 got_reference_p = true;
850 return got_reference_p;
853 /* Return true iff a given gimple statement has been instrumented.
854 Note that the statement is "defined" by the memory references it
855 contains. */
857 static bool
858 has_stmt_been_instrumented_p (gimple *stmt)
860 if (gimple_assign_single_p (stmt))
862 bool r_is_store;
863 asan_mem_ref r;
864 asan_mem_ref_init (&r, NULL, 1);
866 if (get_mem_ref_of_assignment (as_a <gassign *> (stmt), &r,
867 &r_is_store))
868 return has_mem_ref_been_instrumented (&r);
870 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
872 asan_mem_ref src0, src1, dest;
873 asan_mem_ref_init (&src0, NULL, 1);
874 asan_mem_ref_init (&src1, NULL, 1);
875 asan_mem_ref_init (&dest, NULL, 1);
877 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
878 bool src0_is_store = false, src1_is_store = false,
879 dest_is_store = false, dest_is_deref = false, intercepted_p = true;
880 if (get_mem_refs_of_builtin_call (as_a <gcall *> (stmt),
881 &src0, &src0_len, &src0_is_store,
882 &src1, &src1_len, &src1_is_store,
883 &dest, &dest_len, &dest_is_store,
884 &dest_is_deref, &intercepted_p))
886 if (src0.start != NULL_TREE
887 && !has_mem_ref_been_instrumented (&src0, src0_len))
888 return false;
890 if (src1.start != NULL_TREE
891 && !has_mem_ref_been_instrumented (&src1, src1_len))
892 return false;
894 if (dest.start != NULL_TREE
895 && !has_mem_ref_been_instrumented (&dest, dest_len))
896 return false;
898 return true;
901 else if (is_gimple_call (stmt) && gimple_store_p (stmt))
903 asan_mem_ref r;
904 asan_mem_ref_init (&r, NULL, 1);
906 r.start = gimple_call_lhs (stmt);
907 r.access_size = int_size_in_bytes (TREE_TYPE (r.start));
908 return has_mem_ref_been_instrumented (&r);
911 return false;
914 /* Insert a memory reference into the hash table. */
916 static void
917 update_mem_ref_hash_table (tree ref, HOST_WIDE_INT access_size)
919 hash_table<asan_mem_ref_hasher> *ht = get_mem_ref_hash_table ();
921 asan_mem_ref r;
922 asan_mem_ref_init (&r, ref, access_size);
924 asan_mem_ref **slot = ht->find_slot (&r, INSERT);
925 if (*slot == NULL || (*slot)->access_size < access_size)
926 *slot = asan_mem_ref_new (ref, access_size);
929 /* Initialize shadow_ptr_types array. */
931 static void
932 asan_init_shadow_ptr_types (void)
934 asan_shadow_set = new_alias_set ();
935 shadow_ptr_types[0] = build_distinct_type_copy (signed_char_type_node);
936 TYPE_ALIAS_SET (shadow_ptr_types[0]) = asan_shadow_set;
937 shadow_ptr_types[0] = build_pointer_type (shadow_ptr_types[0]);
938 shadow_ptr_types[1] = build_distinct_type_copy (short_integer_type_node);
939 TYPE_ALIAS_SET (shadow_ptr_types[1]) = asan_shadow_set;
940 shadow_ptr_types[1] = build_pointer_type (shadow_ptr_types[1]);
941 initialize_sanitizer_builtins ();
944 /* Create ADDR_EXPR of STRING_CST with the PP pretty printer text. */
946 static tree
947 asan_pp_string (pretty_printer *pp)
949 const char *buf = pp_formatted_text (pp);
950 size_t len = strlen (buf);
951 tree ret = build_string (len + 1, buf);
952 TREE_TYPE (ret)
953 = build_array_type (TREE_TYPE (shadow_ptr_types[0]),
954 build_index_type (size_int (len)));
955 TREE_READONLY (ret) = 1;
956 TREE_STATIC (ret) = 1;
957 return build1 (ADDR_EXPR, shadow_ptr_types[0], ret);
960 /* Return a CONST_INT representing 4 subsequent shadow memory bytes. */
962 static rtx
963 asan_shadow_cst (unsigned char shadow_bytes[4])
965 int i;
966 unsigned HOST_WIDE_INT val = 0;
967 gcc_assert (WORDS_BIG_ENDIAN == BYTES_BIG_ENDIAN);
968 for (i = 0; i < 4; i++)
969 val |= (unsigned HOST_WIDE_INT) shadow_bytes[BYTES_BIG_ENDIAN ? 3 - i : i]
970 << (BITS_PER_UNIT * i);
971 return gen_int_mode (val, SImode);
974 /* Clear shadow memory at SHADOW_MEM, LEN bytes. Can't call a library call here
975 though. */
977 static void
978 asan_clear_shadow (rtx shadow_mem, HOST_WIDE_INT len)
980 rtx_insn *insn, *insns, *jump;
981 rtx_code_label *top_label;
982 rtx end, addr, tmp;
984 start_sequence ();
985 clear_storage (shadow_mem, GEN_INT (len), BLOCK_OP_NORMAL);
986 insns = get_insns ();
987 end_sequence ();
988 for (insn = insns; insn; insn = NEXT_INSN (insn))
989 if (CALL_P (insn))
990 break;
991 if (insn == NULL_RTX)
993 emit_insn (insns);
994 return;
997 gcc_assert ((len & 3) == 0);
998 top_label = gen_label_rtx ();
999 addr = copy_to_mode_reg (Pmode, XEXP (shadow_mem, 0));
1000 shadow_mem = adjust_automodify_address (shadow_mem, SImode, addr, 0);
1001 end = force_reg (Pmode, plus_constant (Pmode, addr, len));
1002 emit_label (top_label);
1004 emit_move_insn (shadow_mem, const0_rtx);
1005 tmp = expand_simple_binop (Pmode, PLUS, addr, gen_int_mode (4, Pmode), addr,
1006 true, OPTAB_LIB_WIDEN);
1007 if (tmp != addr)
1008 emit_move_insn (addr, tmp);
1009 emit_cmp_and_jump_insns (addr, end, LT, NULL_RTX, Pmode, true, top_label);
1010 jump = get_last_insn ();
1011 gcc_assert (JUMP_P (jump));
1012 add_int_reg_note (jump, REG_BR_PROB, REG_BR_PROB_BASE * 80 / 100);
1015 void
1016 asan_function_start (void)
1018 section *fnsec = function_section (current_function_decl);
1019 switch_to_section (fnsec);
1020 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LASANPC",
1021 current_function_funcdef_no);
1024 /* Insert code to protect stack vars. The prologue sequence should be emitted
1025 directly, epilogue sequence returned. BASE is the register holding the
1026 stack base, against which OFFSETS array offsets are relative to, OFFSETS
1027 array contains pairs of offsets in reverse order, always the end offset
1028 of some gap that needs protection followed by starting offset,
1029 and DECLS is an array of representative decls for each var partition.
1030 LENGTH is the length of the OFFSETS array, DECLS array is LENGTH / 2 - 1
1031 elements long (OFFSETS include gap before the first variable as well
1032 as gaps after each stack variable). PBASE is, if non-NULL, some pseudo
1033 register which stack vars DECL_RTLs are based on. Either BASE should be
1034 assigned to PBASE, when not doing use after return protection, or
1035 corresponding address based on __asan_stack_malloc* return value. */
1037 rtx_insn *
1038 asan_emit_stack_protection (rtx base, rtx pbase, unsigned int alignb,
1039 HOST_WIDE_INT *offsets, tree *decls, int length)
1041 rtx shadow_base, shadow_mem, ret, mem, orig_base;
1042 rtx_code_label *lab;
1043 rtx_insn *insns;
1044 char buf[30];
1045 unsigned char shadow_bytes[4];
1046 HOST_WIDE_INT base_offset = offsets[length - 1];
1047 HOST_WIDE_INT base_align_bias = 0, offset, prev_offset;
1048 HOST_WIDE_INT asan_frame_size = offsets[0] - base_offset;
1049 HOST_WIDE_INT last_offset, last_size;
1050 int l;
1051 unsigned char cur_shadow_byte = ASAN_STACK_MAGIC_LEFT;
1052 tree str_cst, decl, id;
1053 int use_after_return_class = -1;
1055 if (shadow_ptr_types[0] == NULL_TREE)
1056 asan_init_shadow_ptr_types ();
1058 /* First of all, prepare the description string. */
1059 pretty_printer asan_pp;
1061 pp_decimal_int (&asan_pp, length / 2 - 1);
1062 pp_space (&asan_pp);
1063 for (l = length - 2; l; l -= 2)
1065 tree decl = decls[l / 2 - 1];
1066 pp_wide_integer (&asan_pp, offsets[l] - base_offset);
1067 pp_space (&asan_pp);
1068 pp_wide_integer (&asan_pp, offsets[l - 1] - offsets[l]);
1069 pp_space (&asan_pp);
1070 if (DECL_P (decl) && DECL_NAME (decl))
1072 pp_decimal_int (&asan_pp, IDENTIFIER_LENGTH (DECL_NAME (decl)));
1073 pp_space (&asan_pp);
1074 pp_tree_identifier (&asan_pp, DECL_NAME (decl));
1076 else
1077 pp_string (&asan_pp, "9 <unknown>");
1078 pp_space (&asan_pp);
1080 str_cst = asan_pp_string (&asan_pp);
1082 /* Emit the prologue sequence. */
1083 if (asan_frame_size > 32 && asan_frame_size <= 65536 && pbase
1084 && ASAN_USE_AFTER_RETURN)
1086 use_after_return_class = floor_log2 (asan_frame_size - 1) - 5;
1087 /* __asan_stack_malloc_N guarantees alignment
1088 N < 6 ? (64 << N) : 4096 bytes. */
1089 if (alignb > (use_after_return_class < 6
1090 ? (64U << use_after_return_class) : 4096U))
1091 use_after_return_class = -1;
1092 else if (alignb > ASAN_RED_ZONE_SIZE && (asan_frame_size & (alignb - 1)))
1093 base_align_bias = ((asan_frame_size + alignb - 1)
1094 & ~(alignb - HOST_WIDE_INT_1)) - asan_frame_size;
1096 /* Align base if target is STRICT_ALIGNMENT. */
1097 if (STRICT_ALIGNMENT)
1098 base = expand_binop (Pmode, and_optab, base,
1099 gen_int_mode (-((GET_MODE_ALIGNMENT (SImode)
1100 << ASAN_SHADOW_SHIFT)
1101 / BITS_PER_UNIT), Pmode), NULL_RTX,
1102 1, OPTAB_DIRECT);
1104 if (use_after_return_class == -1 && pbase)
1105 emit_move_insn (pbase, base);
1107 base = expand_binop (Pmode, add_optab, base,
1108 gen_int_mode (base_offset - base_align_bias, Pmode),
1109 NULL_RTX, 1, OPTAB_DIRECT);
1110 orig_base = NULL_RTX;
1111 if (use_after_return_class != -1)
1113 if (asan_detect_stack_use_after_return == NULL_TREE)
1115 id = get_identifier ("__asan_option_detect_stack_use_after_return");
1116 decl = build_decl (BUILTINS_LOCATION, VAR_DECL, id,
1117 integer_type_node);
1118 SET_DECL_ASSEMBLER_NAME (decl, id);
1119 TREE_ADDRESSABLE (decl) = 1;
1120 DECL_ARTIFICIAL (decl) = 1;
1121 DECL_IGNORED_P (decl) = 1;
1122 DECL_EXTERNAL (decl) = 1;
1123 TREE_STATIC (decl) = 1;
1124 TREE_PUBLIC (decl) = 1;
1125 TREE_USED (decl) = 1;
1126 asan_detect_stack_use_after_return = decl;
1128 orig_base = gen_reg_rtx (Pmode);
1129 emit_move_insn (orig_base, base);
1130 ret = expand_normal (asan_detect_stack_use_after_return);
1131 lab = gen_label_rtx ();
1132 int very_likely = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
1133 emit_cmp_and_jump_insns (ret, const0_rtx, EQ, NULL_RTX,
1134 VOIDmode, 0, lab, very_likely);
1135 snprintf (buf, sizeof buf, "__asan_stack_malloc_%d",
1136 use_after_return_class);
1137 ret = init_one_libfunc (buf);
1138 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode, 1,
1139 GEN_INT (asan_frame_size
1140 + base_align_bias),
1141 TYPE_MODE (pointer_sized_int_node));
1142 /* __asan_stack_malloc_[n] returns a pointer to fake stack if succeeded
1143 and NULL otherwise. Check RET value is NULL here and jump over the
1144 BASE reassignment in this case. Otherwise, reassign BASE to RET. */
1145 int very_unlikely = REG_BR_PROB_BASE / 2000 - 1;
1146 emit_cmp_and_jump_insns (ret, const0_rtx, EQ, NULL_RTX,
1147 VOIDmode, 0, lab, very_unlikely);
1148 ret = convert_memory_address (Pmode, ret);
1149 emit_move_insn (base, ret);
1150 emit_label (lab);
1151 emit_move_insn (pbase, expand_binop (Pmode, add_optab, base,
1152 gen_int_mode (base_align_bias
1153 - base_offset, Pmode),
1154 NULL_RTX, 1, OPTAB_DIRECT));
1156 mem = gen_rtx_MEM (ptr_mode, base);
1157 mem = adjust_address (mem, VOIDmode, base_align_bias);
1158 emit_move_insn (mem, gen_int_mode (ASAN_STACK_FRAME_MAGIC, ptr_mode));
1159 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1160 emit_move_insn (mem, expand_normal (str_cst));
1161 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1162 ASM_GENERATE_INTERNAL_LABEL (buf, "LASANPC", current_function_funcdef_no);
1163 id = get_identifier (buf);
1164 decl = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1165 VAR_DECL, id, char_type_node);
1166 SET_DECL_ASSEMBLER_NAME (decl, id);
1167 TREE_ADDRESSABLE (decl) = 1;
1168 TREE_READONLY (decl) = 1;
1169 DECL_ARTIFICIAL (decl) = 1;
1170 DECL_IGNORED_P (decl) = 1;
1171 TREE_STATIC (decl) = 1;
1172 TREE_PUBLIC (decl) = 0;
1173 TREE_USED (decl) = 1;
1174 DECL_INITIAL (decl) = decl;
1175 TREE_ASM_WRITTEN (decl) = 1;
1176 TREE_ASM_WRITTEN (id) = 1;
1177 emit_move_insn (mem, expand_normal (build_fold_addr_expr (decl)));
1178 shadow_base = expand_binop (Pmode, lshr_optab, base,
1179 GEN_INT (ASAN_SHADOW_SHIFT),
1180 NULL_RTX, 1, OPTAB_DIRECT);
1181 shadow_base
1182 = plus_constant (Pmode, shadow_base,
1183 asan_shadow_offset ()
1184 + (base_align_bias >> ASAN_SHADOW_SHIFT));
1185 gcc_assert (asan_shadow_set != -1
1186 && (ASAN_RED_ZONE_SIZE >> ASAN_SHADOW_SHIFT) == 4);
1187 shadow_mem = gen_rtx_MEM (SImode, shadow_base);
1188 set_mem_alias_set (shadow_mem, asan_shadow_set);
1189 if (STRICT_ALIGNMENT)
1190 set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
1191 prev_offset = base_offset;
1192 for (l = length; l; l -= 2)
1194 if (l == 2)
1195 cur_shadow_byte = ASAN_STACK_MAGIC_RIGHT;
1196 offset = offsets[l - 1];
1197 if ((offset - base_offset) & (ASAN_RED_ZONE_SIZE - 1))
1199 int i;
1200 HOST_WIDE_INT aoff
1201 = base_offset + ((offset - base_offset)
1202 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1203 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1204 (aoff - prev_offset)
1205 >> ASAN_SHADOW_SHIFT);
1206 prev_offset = aoff;
1207 for (i = 0; i < 4; i++, aoff += (1 << ASAN_SHADOW_SHIFT))
1208 if (aoff < offset)
1210 if (aoff < offset - (1 << ASAN_SHADOW_SHIFT) + 1)
1211 shadow_bytes[i] = 0;
1212 else
1213 shadow_bytes[i] = offset - aoff;
1215 else
1216 shadow_bytes[i] = ASAN_STACK_MAGIC_PARTIAL;
1217 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1218 offset = aoff;
1220 while (offset <= offsets[l - 2] - ASAN_RED_ZONE_SIZE)
1222 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1223 (offset - prev_offset)
1224 >> ASAN_SHADOW_SHIFT);
1225 prev_offset = offset;
1226 memset (shadow_bytes, cur_shadow_byte, 4);
1227 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1228 offset += ASAN_RED_ZONE_SIZE;
1230 cur_shadow_byte = ASAN_STACK_MAGIC_MIDDLE;
1232 do_pending_stack_adjust ();
1234 /* Construct epilogue sequence. */
1235 start_sequence ();
1237 lab = NULL;
1238 if (use_after_return_class != -1)
1240 rtx_code_label *lab2 = gen_label_rtx ();
1241 char c = (char) ASAN_STACK_MAGIC_USE_AFTER_RET;
1242 int very_likely = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
1243 emit_cmp_and_jump_insns (orig_base, base, EQ, NULL_RTX,
1244 VOIDmode, 0, lab2, very_likely);
1245 shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1246 set_mem_alias_set (shadow_mem, asan_shadow_set);
1247 mem = gen_rtx_MEM (ptr_mode, base);
1248 mem = adjust_address (mem, VOIDmode, base_align_bias);
1249 emit_move_insn (mem, gen_int_mode (ASAN_STACK_RETIRED_MAGIC, ptr_mode));
1250 unsigned HOST_WIDE_INT sz = asan_frame_size >> ASAN_SHADOW_SHIFT;
1251 if (use_after_return_class < 5
1252 && can_store_by_pieces (sz, builtin_memset_read_str, &c,
1253 BITS_PER_UNIT, true))
1254 store_by_pieces (shadow_mem, sz, builtin_memset_read_str, &c,
1255 BITS_PER_UNIT, true, 0);
1256 else if (use_after_return_class >= 5
1257 || !set_storage_via_setmem (shadow_mem,
1258 GEN_INT (sz),
1259 gen_int_mode (c, QImode),
1260 BITS_PER_UNIT, BITS_PER_UNIT,
1261 -1, sz, sz, sz))
1263 snprintf (buf, sizeof buf, "__asan_stack_free_%d",
1264 use_after_return_class);
1265 ret = init_one_libfunc (buf);
1266 rtx addr = convert_memory_address (ptr_mode, base);
1267 rtx orig_addr = convert_memory_address (ptr_mode, orig_base);
1268 emit_library_call (ret, LCT_NORMAL, ptr_mode, 3, addr, ptr_mode,
1269 GEN_INT (asan_frame_size + base_align_bias),
1270 TYPE_MODE (pointer_sized_int_node),
1271 orig_addr, ptr_mode);
1273 lab = gen_label_rtx ();
1274 emit_jump (lab);
1275 emit_label (lab2);
1278 shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1279 set_mem_alias_set (shadow_mem, asan_shadow_set);
1281 if (STRICT_ALIGNMENT)
1282 set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
1284 prev_offset = base_offset;
1285 last_offset = base_offset;
1286 last_size = 0;
1287 for (l = length; l; l -= 2)
1289 offset = base_offset + ((offsets[l - 1] - base_offset)
1290 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1291 if (last_offset + last_size != offset)
1293 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1294 (last_offset - prev_offset)
1295 >> ASAN_SHADOW_SHIFT);
1296 prev_offset = last_offset;
1297 asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
1298 last_offset = offset;
1299 last_size = 0;
1301 last_size += base_offset + ((offsets[l - 2] - base_offset)
1302 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1))
1303 - offset;
1305 if (last_size)
1307 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1308 (last_offset - prev_offset)
1309 >> ASAN_SHADOW_SHIFT);
1310 asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
1313 do_pending_stack_adjust ();
1314 if (lab)
1315 emit_label (lab);
1317 insns = get_insns ();
1318 end_sequence ();
1319 return insns;
1322 /* Return true if DECL, a global var, might be overridden and needs
1323 therefore a local alias. */
1325 static bool
1326 asan_needs_local_alias (tree decl)
1328 return DECL_WEAK (decl) || !targetm.binds_local_p (decl);
1331 /* Return true if DECL is a VAR_DECL that should be protected
1332 by Address Sanitizer, by appending a red zone with protected
1333 shadow memory after it and aligning it to at least
1334 ASAN_RED_ZONE_SIZE bytes. */
1336 bool
1337 asan_protect_global (tree decl)
1339 if (!ASAN_GLOBALS)
1340 return false;
1342 rtx rtl, symbol;
1344 if (TREE_CODE (decl) == STRING_CST)
1346 /* Instrument all STRING_CSTs except those created
1347 by asan_pp_string here. */
1348 if (shadow_ptr_types[0] != NULL_TREE
1349 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1350 && TREE_TYPE (TREE_TYPE (decl)) == TREE_TYPE (shadow_ptr_types[0]))
1351 return false;
1352 return true;
1354 if (TREE_CODE (decl) != VAR_DECL
1355 /* TLS vars aren't statically protectable. */
1356 || DECL_THREAD_LOCAL_P (decl)
1357 /* Externs will be protected elsewhere. */
1358 || DECL_EXTERNAL (decl)
1359 || !DECL_RTL_SET_P (decl)
1360 /* Comdat vars pose an ABI problem, we can't know if
1361 the var that is selected by the linker will have
1362 padding or not. */
1363 || DECL_ONE_ONLY (decl)
1364 /* Similarly for common vars. People can use -fno-common.
1365 Note: Linux kernel is built with -fno-common, so we do instrument
1366 globals there even if it is C. */
1367 || (DECL_COMMON (decl) && TREE_PUBLIC (decl))
1368 /* Don't protect if using user section, often vars placed
1369 into user section from multiple TUs are then assumed
1370 to be an array of such vars, putting padding in there
1371 breaks this assumption. */
1372 || (DECL_SECTION_NAME (decl) != NULL
1373 && !symtab_node::get (decl)->implicit_section
1374 && !section_sanitized_p (DECL_SECTION_NAME (decl)))
1375 || DECL_SIZE (decl) == 0
1376 || ASAN_RED_ZONE_SIZE * BITS_PER_UNIT > MAX_OFILE_ALIGNMENT
1377 || !valid_constant_size_p (DECL_SIZE_UNIT (decl))
1378 || DECL_ALIGN_UNIT (decl) > 2 * ASAN_RED_ZONE_SIZE
1379 || TREE_TYPE (decl) == ubsan_get_source_location_type ())
1380 return false;
1382 rtl = DECL_RTL (decl);
1383 if (!MEM_P (rtl) || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF)
1384 return false;
1385 symbol = XEXP (rtl, 0);
1387 if (CONSTANT_POOL_ADDRESS_P (symbol)
1388 || TREE_CONSTANT_POOL_ADDRESS_P (symbol))
1389 return false;
1391 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
1392 return false;
1394 #ifndef ASM_OUTPUT_DEF
1395 if (asan_needs_local_alias (decl))
1396 return false;
1397 #endif
1399 return true;
1402 /* Construct a function tree for __asan_report_{load,store}{1,2,4,8,16,_n}.
1403 IS_STORE is either 1 (for a store) or 0 (for a load). */
1405 static tree
1406 report_error_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
1407 int *nargs)
1409 static enum built_in_function report[2][2][6]
1410 = { { { BUILT_IN_ASAN_REPORT_LOAD1, BUILT_IN_ASAN_REPORT_LOAD2,
1411 BUILT_IN_ASAN_REPORT_LOAD4, BUILT_IN_ASAN_REPORT_LOAD8,
1412 BUILT_IN_ASAN_REPORT_LOAD16, BUILT_IN_ASAN_REPORT_LOAD_N },
1413 { BUILT_IN_ASAN_REPORT_STORE1, BUILT_IN_ASAN_REPORT_STORE2,
1414 BUILT_IN_ASAN_REPORT_STORE4, BUILT_IN_ASAN_REPORT_STORE8,
1415 BUILT_IN_ASAN_REPORT_STORE16, BUILT_IN_ASAN_REPORT_STORE_N } },
1416 { { BUILT_IN_ASAN_REPORT_LOAD1_NOABORT,
1417 BUILT_IN_ASAN_REPORT_LOAD2_NOABORT,
1418 BUILT_IN_ASAN_REPORT_LOAD4_NOABORT,
1419 BUILT_IN_ASAN_REPORT_LOAD8_NOABORT,
1420 BUILT_IN_ASAN_REPORT_LOAD16_NOABORT,
1421 BUILT_IN_ASAN_REPORT_LOAD_N_NOABORT },
1422 { BUILT_IN_ASAN_REPORT_STORE1_NOABORT,
1423 BUILT_IN_ASAN_REPORT_STORE2_NOABORT,
1424 BUILT_IN_ASAN_REPORT_STORE4_NOABORT,
1425 BUILT_IN_ASAN_REPORT_STORE8_NOABORT,
1426 BUILT_IN_ASAN_REPORT_STORE16_NOABORT,
1427 BUILT_IN_ASAN_REPORT_STORE_N_NOABORT } } };
1428 if (size_in_bytes == -1)
1430 *nargs = 2;
1431 return builtin_decl_implicit (report[recover_p][is_store][5]);
1433 *nargs = 1;
1434 int size_log2 = exact_log2 (size_in_bytes);
1435 return builtin_decl_implicit (report[recover_p][is_store][size_log2]);
1438 /* Construct a function tree for __asan_{load,store}{1,2,4,8,16,_n}.
1439 IS_STORE is either 1 (for a store) or 0 (for a load). */
1441 static tree
1442 check_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
1443 int *nargs)
1445 static enum built_in_function check[2][2][6]
1446 = { { { BUILT_IN_ASAN_LOAD1, BUILT_IN_ASAN_LOAD2,
1447 BUILT_IN_ASAN_LOAD4, BUILT_IN_ASAN_LOAD8,
1448 BUILT_IN_ASAN_LOAD16, BUILT_IN_ASAN_LOADN },
1449 { BUILT_IN_ASAN_STORE1, BUILT_IN_ASAN_STORE2,
1450 BUILT_IN_ASAN_STORE4, BUILT_IN_ASAN_STORE8,
1451 BUILT_IN_ASAN_STORE16, BUILT_IN_ASAN_STOREN } },
1452 { { BUILT_IN_ASAN_LOAD1_NOABORT,
1453 BUILT_IN_ASAN_LOAD2_NOABORT,
1454 BUILT_IN_ASAN_LOAD4_NOABORT,
1455 BUILT_IN_ASAN_LOAD8_NOABORT,
1456 BUILT_IN_ASAN_LOAD16_NOABORT,
1457 BUILT_IN_ASAN_LOADN_NOABORT },
1458 { BUILT_IN_ASAN_STORE1_NOABORT,
1459 BUILT_IN_ASAN_STORE2_NOABORT,
1460 BUILT_IN_ASAN_STORE4_NOABORT,
1461 BUILT_IN_ASAN_STORE8_NOABORT,
1462 BUILT_IN_ASAN_STORE16_NOABORT,
1463 BUILT_IN_ASAN_STOREN_NOABORT } } };
1464 if (size_in_bytes == -1)
1466 *nargs = 2;
1467 return builtin_decl_implicit (check[recover_p][is_store][5]);
1469 *nargs = 1;
1470 int size_log2 = exact_log2 (size_in_bytes);
1471 return builtin_decl_implicit (check[recover_p][is_store][size_log2]);
1474 /* Split the current basic block and create a condition statement
1475 insertion point right before or after the statement pointed to by
1476 ITER. Return an iterator to the point at which the caller might
1477 safely insert the condition statement.
1479 THEN_BLOCK must be set to the address of an uninitialized instance
1480 of basic_block. The function will then set *THEN_BLOCK to the
1481 'then block' of the condition statement to be inserted by the
1482 caller.
1484 If CREATE_THEN_FALLTHRU_EDGE is false, no edge will be created from
1485 *THEN_BLOCK to *FALLTHROUGH_BLOCK.
1487 Similarly, the function will set *FALLTRHOUGH_BLOCK to the 'else
1488 block' of the condition statement to be inserted by the caller.
1490 Note that *FALLTHROUGH_BLOCK is a new block that contains the
1491 statements starting from *ITER, and *THEN_BLOCK is a new empty
1492 block.
1494 *ITER is adjusted to point to always point to the first statement
1495 of the basic block * FALLTHROUGH_BLOCK. That statement is the
1496 same as what ITER was pointing to prior to calling this function,
1497 if BEFORE_P is true; otherwise, it is its following statement. */
1499 gimple_stmt_iterator
1500 create_cond_insert_point (gimple_stmt_iterator *iter,
1501 bool before_p,
1502 bool then_more_likely_p,
1503 bool create_then_fallthru_edge,
1504 basic_block *then_block,
1505 basic_block *fallthrough_block)
1507 gimple_stmt_iterator gsi = *iter;
1509 if (!gsi_end_p (gsi) && before_p)
1510 gsi_prev (&gsi);
1512 basic_block cur_bb = gsi_bb (*iter);
1514 edge e = split_block (cur_bb, gsi_stmt (gsi));
1516 /* Get a hold on the 'condition block', the 'then block' and the
1517 'else block'. */
1518 basic_block cond_bb = e->src;
1519 basic_block fallthru_bb = e->dest;
1520 basic_block then_bb = create_empty_bb (cond_bb);
1521 if (current_loops)
1523 add_bb_to_loop (then_bb, cond_bb->loop_father);
1524 loops_state_set (LOOPS_NEED_FIXUP);
1527 /* Set up the newly created 'then block'. */
1528 e = make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
1529 int fallthrough_probability
1530 = then_more_likely_p
1531 ? PROB_VERY_UNLIKELY
1532 : PROB_ALWAYS - PROB_VERY_UNLIKELY;
1533 e->probability = PROB_ALWAYS - fallthrough_probability;
1534 if (create_then_fallthru_edge)
1535 make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU);
1537 /* Set up the fallthrough basic block. */
1538 e = find_edge (cond_bb, fallthru_bb);
1539 e->flags = EDGE_FALSE_VALUE;
1540 e->count = cond_bb->count;
1541 e->probability = fallthrough_probability;
1543 /* Update dominance info for the newly created then_bb; note that
1544 fallthru_bb's dominance info has already been updated by
1545 split_bock. */
1546 if (dom_info_available_p (CDI_DOMINATORS))
1547 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
1549 *then_block = then_bb;
1550 *fallthrough_block = fallthru_bb;
1551 *iter = gsi_start_bb (fallthru_bb);
1553 return gsi_last_bb (cond_bb);
1556 /* Insert an if condition followed by a 'then block' right before the
1557 statement pointed to by ITER. The fallthrough block -- which is the
1558 else block of the condition as well as the destination of the
1559 outcoming edge of the 'then block' -- starts with the statement
1560 pointed to by ITER.
1562 COND is the condition of the if.
1564 If THEN_MORE_LIKELY_P is true, the probability of the edge to the
1565 'then block' is higher than the probability of the edge to the
1566 fallthrough block.
1568 Upon completion of the function, *THEN_BB is set to the newly
1569 inserted 'then block' and similarly, *FALLTHROUGH_BB is set to the
1570 fallthrough block.
1572 *ITER is adjusted to still point to the same statement it was
1573 pointing to initially. */
1575 static void
1576 insert_if_then_before_iter (gcond *cond,
1577 gimple_stmt_iterator *iter,
1578 bool then_more_likely_p,
1579 basic_block *then_bb,
1580 basic_block *fallthrough_bb)
1582 gimple_stmt_iterator cond_insert_point =
1583 create_cond_insert_point (iter,
1584 /*before_p=*/true,
1585 then_more_likely_p,
1586 /*create_then_fallthru_edge=*/true,
1587 then_bb,
1588 fallthrough_bb);
1589 gsi_insert_after (&cond_insert_point, cond, GSI_NEW_STMT);
1592 /* Build
1593 (base_addr >> ASAN_SHADOW_SHIFT) + asan_shadow_offset (). */
1595 static tree
1596 build_shadow_mem_access (gimple_stmt_iterator *gsi, location_t location,
1597 tree base_addr, tree shadow_ptr_type)
1599 tree t, uintptr_type = TREE_TYPE (base_addr);
1600 tree shadow_type = TREE_TYPE (shadow_ptr_type);
1601 gimple *g;
1603 t = build_int_cst (uintptr_type, ASAN_SHADOW_SHIFT);
1604 g = gimple_build_assign (make_ssa_name (uintptr_type), RSHIFT_EXPR,
1605 base_addr, t);
1606 gimple_set_location (g, location);
1607 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1609 t = build_int_cst (uintptr_type, asan_shadow_offset ());
1610 g = gimple_build_assign (make_ssa_name (uintptr_type), PLUS_EXPR,
1611 gimple_assign_lhs (g), t);
1612 gimple_set_location (g, location);
1613 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1615 g = gimple_build_assign (make_ssa_name (shadow_ptr_type), NOP_EXPR,
1616 gimple_assign_lhs (g));
1617 gimple_set_location (g, location);
1618 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1620 t = build2 (MEM_REF, shadow_type, gimple_assign_lhs (g),
1621 build_int_cst (shadow_ptr_type, 0));
1622 g = gimple_build_assign (make_ssa_name (shadow_type), MEM_REF, t);
1623 gimple_set_location (g, location);
1624 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1625 return gimple_assign_lhs (g);
1628 /* BASE can already be an SSA_NAME; in that case, do not create a
1629 new SSA_NAME for it. */
1631 static tree
1632 maybe_create_ssa_name (location_t loc, tree base, gimple_stmt_iterator *iter,
1633 bool before_p)
1635 if (TREE_CODE (base) == SSA_NAME)
1636 return base;
1637 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (base)),
1638 TREE_CODE (base), base);
1639 gimple_set_location (g, loc);
1640 if (before_p)
1641 gsi_insert_before (iter, g, GSI_SAME_STMT);
1642 else
1643 gsi_insert_after (iter, g, GSI_NEW_STMT);
1644 return gimple_assign_lhs (g);
1647 /* LEN can already have necessary size and precision;
1648 in that case, do not create a new variable. */
1650 tree
1651 maybe_cast_to_ptrmode (location_t loc, tree len, gimple_stmt_iterator *iter,
1652 bool before_p)
1654 if (ptrofftype_p (len))
1655 return len;
1656 gimple *g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
1657 NOP_EXPR, len);
1658 gimple_set_location (g, loc);
1659 if (before_p)
1660 gsi_insert_before (iter, g, GSI_SAME_STMT);
1661 else
1662 gsi_insert_after (iter, g, GSI_NEW_STMT);
1663 return gimple_assign_lhs (g);
1666 /* Instrument the memory access instruction BASE. Insert new
1667 statements before or after ITER.
1669 Note that the memory access represented by BASE can be either an
1670 SSA_NAME, or a non-SSA expression. LOCATION is the source code
1671 location. IS_STORE is TRUE for a store, FALSE for a load.
1672 BEFORE_P is TRUE for inserting the instrumentation code before
1673 ITER, FALSE for inserting it after ITER. IS_SCALAR_ACCESS is TRUE
1674 for a scalar memory access and FALSE for memory region access.
1675 NON_ZERO_P is TRUE if memory region is guaranteed to have non-zero
1676 length. ALIGN tells alignment of accessed memory object.
1678 START_INSTRUMENTED and END_INSTRUMENTED are TRUE if start/end of
1679 memory region have already been instrumented.
1681 If BEFORE_P is TRUE, *ITER is arranged to still point to the
1682 statement it was pointing to prior to calling this function,
1683 otherwise, it points to the statement logically following it. */
1685 static void
1686 build_check_stmt (location_t loc, tree base, tree len,
1687 HOST_WIDE_INT size_in_bytes, gimple_stmt_iterator *iter,
1688 bool is_non_zero_len, bool before_p, bool is_store,
1689 bool is_scalar_access, unsigned int align = 0)
1691 gimple_stmt_iterator gsi = *iter;
1692 gimple *g;
1694 gcc_assert (!(size_in_bytes > 0 && !is_non_zero_len));
1696 gsi = *iter;
1698 base = unshare_expr (base);
1699 base = maybe_create_ssa_name (loc, base, &gsi, before_p);
1701 if (len)
1703 len = unshare_expr (len);
1704 len = maybe_cast_to_ptrmode (loc, len, iter, before_p);
1706 else
1708 gcc_assert (size_in_bytes != -1);
1709 len = build_int_cst (pointer_sized_int_node, size_in_bytes);
1712 if (size_in_bytes > 1)
1714 if ((size_in_bytes & (size_in_bytes - 1)) != 0
1715 || size_in_bytes > 16)
1716 is_scalar_access = false;
1717 else if (align && align < size_in_bytes * BITS_PER_UNIT)
1719 /* On non-strict alignment targets, if
1720 16-byte access is just 8-byte aligned,
1721 this will result in misaligned shadow
1722 memory 2 byte load, but otherwise can
1723 be handled using one read. */
1724 if (size_in_bytes != 16
1725 || STRICT_ALIGNMENT
1726 || align < 8 * BITS_PER_UNIT)
1727 is_scalar_access = false;
1731 HOST_WIDE_INT flags = 0;
1732 if (is_store)
1733 flags |= ASAN_CHECK_STORE;
1734 if (is_non_zero_len)
1735 flags |= ASAN_CHECK_NON_ZERO_LEN;
1736 if (is_scalar_access)
1737 flags |= ASAN_CHECK_SCALAR_ACCESS;
1739 g = gimple_build_call_internal (IFN_ASAN_CHECK, 4,
1740 build_int_cst (integer_type_node, flags),
1741 base, len,
1742 build_int_cst (integer_type_node,
1743 align / BITS_PER_UNIT));
1744 gimple_set_location (g, loc);
1745 if (before_p)
1746 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
1747 else
1749 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1750 gsi_next (&gsi);
1751 *iter = gsi;
1755 /* If T represents a memory access, add instrumentation code before ITER.
1756 LOCATION is source code location.
1757 IS_STORE is either TRUE (for a store) or FALSE (for a load). */
1759 static void
1760 instrument_derefs (gimple_stmt_iterator *iter, tree t,
1761 location_t location, bool is_store)
1763 if (is_store && !ASAN_INSTRUMENT_WRITES)
1764 return;
1765 if (!is_store && !ASAN_INSTRUMENT_READS)
1766 return;
1768 tree type, base;
1769 HOST_WIDE_INT size_in_bytes;
1770 if (location == UNKNOWN_LOCATION)
1771 location = EXPR_LOCATION (t);
1773 type = TREE_TYPE (t);
1774 switch (TREE_CODE (t))
1776 case ARRAY_REF:
1777 case COMPONENT_REF:
1778 case INDIRECT_REF:
1779 case MEM_REF:
1780 case VAR_DECL:
1781 case BIT_FIELD_REF:
1782 break;
1783 /* FALLTHRU */
1784 default:
1785 return;
1788 size_in_bytes = int_size_in_bytes (type);
1789 if (size_in_bytes <= 0)
1790 return;
1792 HOST_WIDE_INT bitsize, bitpos;
1793 tree offset;
1794 machine_mode mode;
1795 int unsignedp, reversep, volatilep = 0;
1796 tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset, &mode,
1797 &unsignedp, &reversep, &volatilep);
1799 if (TREE_CODE (t) == COMPONENT_REF
1800 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)) != NULL_TREE)
1802 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1));
1803 instrument_derefs (iter, build3 (COMPONENT_REF, TREE_TYPE (repr),
1804 TREE_OPERAND (t, 0), repr,
1805 NULL_TREE), location, is_store);
1806 return;
1809 if (bitpos % BITS_PER_UNIT
1810 || bitsize != size_in_bytes * BITS_PER_UNIT)
1811 return;
1813 if (TREE_CODE (inner) == VAR_DECL
1814 && offset == NULL_TREE
1815 && bitpos >= 0
1816 && DECL_SIZE (inner)
1817 && tree_fits_shwi_p (DECL_SIZE (inner))
1818 && bitpos + bitsize <= tree_to_shwi (DECL_SIZE (inner)))
1820 if (DECL_THREAD_LOCAL_P (inner))
1821 return;
1822 if (!ASAN_GLOBALS && is_global_var (inner))
1823 return;
1824 if (!TREE_STATIC (inner))
1826 /* Automatic vars in the current function will be always
1827 accessible. */
1828 if (decl_function_context (inner) == current_function_decl)
1829 return;
1831 /* Always instrument external vars, they might be dynamically
1832 initialized. */
1833 else if (!DECL_EXTERNAL (inner))
1835 /* For static vars if they are known not to be dynamically
1836 initialized, they will be always accessible. */
1837 varpool_node *vnode = varpool_node::get (inner);
1838 if (vnode && !vnode->dynamically_initialized)
1839 return;
1843 base = build_fold_addr_expr (t);
1844 if (!has_mem_ref_been_instrumented (base, size_in_bytes))
1846 unsigned int align = get_object_alignment (t);
1847 build_check_stmt (location, base, NULL_TREE, size_in_bytes, iter,
1848 /*is_non_zero_len*/size_in_bytes > 0, /*before_p=*/true,
1849 is_store, /*is_scalar_access*/true, align);
1850 update_mem_ref_hash_table (base, size_in_bytes);
1851 update_mem_ref_hash_table (t, size_in_bytes);
1856 /* Insert a memory reference into the hash table if access length
1857 can be determined in compile time. */
1859 static void
1860 maybe_update_mem_ref_hash_table (tree base, tree len)
1862 if (!POINTER_TYPE_P (TREE_TYPE (base))
1863 || !INTEGRAL_TYPE_P (TREE_TYPE (len)))
1864 return;
1866 HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
1868 if (size_in_bytes != -1)
1869 update_mem_ref_hash_table (base, size_in_bytes);
1872 /* Instrument an access to a contiguous memory region that starts at
1873 the address pointed to by BASE, over a length of LEN (expressed in
1874 the sizeof (*BASE) bytes). ITER points to the instruction before
1875 which the instrumentation instructions must be inserted. LOCATION
1876 is the source location that the instrumentation instructions must
1877 have. If IS_STORE is true, then the memory access is a store;
1878 otherwise, it's a load. */
1880 static void
1881 instrument_mem_region_access (tree base, tree len,
1882 gimple_stmt_iterator *iter,
1883 location_t location, bool is_store)
1885 if (!POINTER_TYPE_P (TREE_TYPE (base))
1886 || !INTEGRAL_TYPE_P (TREE_TYPE (len))
1887 || integer_zerop (len))
1888 return;
1890 HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
1892 if ((size_in_bytes == -1)
1893 || !has_mem_ref_been_instrumented (base, size_in_bytes))
1895 build_check_stmt (location, base, len, size_in_bytes, iter,
1896 /*is_non_zero_len*/size_in_bytes > 0, /*before_p*/true,
1897 is_store, /*is_scalar_access*/false, /*align*/0);
1900 maybe_update_mem_ref_hash_table (base, len);
1901 *iter = gsi_for_stmt (gsi_stmt (*iter));
1904 /* Instrument the call to a built-in memory access function that is
1905 pointed to by the iterator ITER.
1907 Upon completion, return TRUE iff *ITER has been advanced to the
1908 statement following the one it was originally pointing to. */
1910 static bool
1911 instrument_builtin_call (gimple_stmt_iterator *iter)
1913 if (!ASAN_MEMINTRIN)
1914 return false;
1916 bool iter_advanced_p = false;
1917 gcall *call = as_a <gcall *> (gsi_stmt (*iter));
1919 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
1921 location_t loc = gimple_location (call);
1923 asan_mem_ref src0, src1, dest;
1924 asan_mem_ref_init (&src0, NULL, 1);
1925 asan_mem_ref_init (&src1, NULL, 1);
1926 asan_mem_ref_init (&dest, NULL, 1);
1928 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
1929 bool src0_is_store = false, src1_is_store = false, dest_is_store = false,
1930 dest_is_deref = false, intercepted_p = true;
1932 if (get_mem_refs_of_builtin_call (call,
1933 &src0, &src0_len, &src0_is_store,
1934 &src1, &src1_len, &src1_is_store,
1935 &dest, &dest_len, &dest_is_store,
1936 &dest_is_deref, &intercepted_p))
1938 if (dest_is_deref)
1940 instrument_derefs (iter, dest.start, loc, dest_is_store);
1941 gsi_next (iter);
1942 iter_advanced_p = true;
1944 else if (!intercepted_p
1945 && (src0_len || src1_len || dest_len))
1947 if (src0.start != NULL_TREE)
1948 instrument_mem_region_access (src0.start, src0_len,
1949 iter, loc, /*is_store=*/false);
1950 if (src1.start != NULL_TREE)
1951 instrument_mem_region_access (src1.start, src1_len,
1952 iter, loc, /*is_store=*/false);
1953 if (dest.start != NULL_TREE)
1954 instrument_mem_region_access (dest.start, dest_len,
1955 iter, loc, /*is_store=*/true);
1957 *iter = gsi_for_stmt (call);
1958 gsi_next (iter);
1959 iter_advanced_p = true;
1961 else
1963 if (src0.start != NULL_TREE)
1964 maybe_update_mem_ref_hash_table (src0.start, src0_len);
1965 if (src1.start != NULL_TREE)
1966 maybe_update_mem_ref_hash_table (src1.start, src1_len);
1967 if (dest.start != NULL_TREE)
1968 maybe_update_mem_ref_hash_table (dest.start, dest_len);
1971 return iter_advanced_p;
1974 /* Instrument the assignment statement ITER if it is subject to
1975 instrumentation. Return TRUE iff instrumentation actually
1976 happened. In that case, the iterator ITER is advanced to the next
1977 logical expression following the one initially pointed to by ITER,
1978 and the relevant memory reference that which access has been
1979 instrumented is added to the memory references hash table. */
1981 static bool
1982 maybe_instrument_assignment (gimple_stmt_iterator *iter)
1984 gimple *s = gsi_stmt (*iter);
1986 gcc_assert (gimple_assign_single_p (s));
1988 tree ref_expr = NULL_TREE;
1989 bool is_store, is_instrumented = false;
1991 if (gimple_store_p (s))
1993 ref_expr = gimple_assign_lhs (s);
1994 is_store = true;
1995 instrument_derefs (iter, ref_expr,
1996 gimple_location (s),
1997 is_store);
1998 is_instrumented = true;
2001 if (gimple_assign_load_p (s))
2003 ref_expr = gimple_assign_rhs1 (s);
2004 is_store = false;
2005 instrument_derefs (iter, ref_expr,
2006 gimple_location (s),
2007 is_store);
2008 is_instrumented = true;
2011 if (is_instrumented)
2012 gsi_next (iter);
2014 return is_instrumented;
2017 /* Instrument the function call pointed to by the iterator ITER, if it
2018 is subject to instrumentation. At the moment, the only function
2019 calls that are instrumented are some built-in functions that access
2020 memory. Look at instrument_builtin_call to learn more.
2022 Upon completion return TRUE iff *ITER was advanced to the statement
2023 following the one it was originally pointing to. */
2025 static bool
2026 maybe_instrument_call (gimple_stmt_iterator *iter)
2028 gimple *stmt = gsi_stmt (*iter);
2029 bool is_builtin = gimple_call_builtin_p (stmt, BUILT_IN_NORMAL);
2031 if (is_builtin && instrument_builtin_call (iter))
2032 return true;
2034 if (gimple_call_noreturn_p (stmt))
2036 if (is_builtin)
2038 tree callee = gimple_call_fndecl (stmt);
2039 switch (DECL_FUNCTION_CODE (callee))
2041 case BUILT_IN_UNREACHABLE:
2042 case BUILT_IN_TRAP:
2043 /* Don't instrument these. */
2044 return false;
2045 default:
2046 break;
2049 tree decl = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN);
2050 gimple *g = gimple_build_call (decl, 0);
2051 gimple_set_location (g, gimple_location (stmt));
2052 gsi_insert_before (iter, g, GSI_SAME_STMT);
2055 bool instrumented = false;
2056 if (gimple_store_p (stmt))
2058 tree ref_expr = gimple_call_lhs (stmt);
2059 instrument_derefs (iter, ref_expr,
2060 gimple_location (stmt),
2061 /*is_store=*/true);
2063 instrumented = true;
2066 /* Walk through gimple_call arguments and check them id needed. */
2067 unsigned args_num = gimple_call_num_args (stmt);
2068 for (unsigned i = 0; i < args_num; ++i)
2070 tree arg = gimple_call_arg (stmt, i);
2071 /* If ARG is not a non-aggregate register variable, compiler in general
2072 creates temporary for it and pass it as argument to gimple call.
2073 But in some cases, e.g. when we pass by value a small structure that
2074 fits to register, compiler can avoid extra overhead by pulling out
2075 these temporaries. In this case, we should check the argument. */
2076 if (!is_gimple_reg (arg) && !is_gimple_min_invariant (arg))
2078 instrument_derefs (iter, arg,
2079 gimple_location (stmt),
2080 /*is_store=*/false);
2081 instrumented = true;
2084 if (instrumented)
2085 gsi_next (iter);
2086 return instrumented;
2089 /* Walk each instruction of all basic block and instrument those that
2090 represent memory references: loads, stores, or function calls.
2091 In a given basic block, this function avoids instrumenting memory
2092 references that have already been instrumented. */
2094 static void
2095 transform_statements (void)
2097 basic_block bb, last_bb = NULL;
2098 gimple_stmt_iterator i;
2099 int saved_last_basic_block = last_basic_block_for_fn (cfun);
2101 FOR_EACH_BB_FN (bb, cfun)
2103 basic_block prev_bb = bb;
2105 if (bb->index >= saved_last_basic_block) continue;
2107 /* Flush the mem ref hash table, if current bb doesn't have
2108 exactly one predecessor, or if that predecessor (skipping
2109 over asan created basic blocks) isn't the last processed
2110 basic block. Thus we effectively flush on extended basic
2111 block boundaries. */
2112 while (single_pred_p (prev_bb))
2114 prev_bb = single_pred (prev_bb);
2115 if (prev_bb->index < saved_last_basic_block)
2116 break;
2118 if (prev_bb != last_bb)
2119 empty_mem_ref_hash_table ();
2120 last_bb = bb;
2122 for (i = gsi_start_bb (bb); !gsi_end_p (i);)
2124 gimple *s = gsi_stmt (i);
2126 if (has_stmt_been_instrumented_p (s))
2127 gsi_next (&i);
2128 else if (gimple_assign_single_p (s)
2129 && !gimple_clobber_p (s)
2130 && maybe_instrument_assignment (&i))
2131 /* Nothing to do as maybe_instrument_assignment advanced
2132 the iterator I. */;
2133 else if (is_gimple_call (s) && maybe_instrument_call (&i))
2134 /* Nothing to do as maybe_instrument_call
2135 advanced the iterator I. */;
2136 else
2138 /* No instrumentation happened.
2140 If the current instruction is a function call that
2141 might free something, let's forget about the memory
2142 references that got instrumented. Otherwise we might
2143 miss some instrumentation opportunities. */
2144 if (is_gimple_call (s) && !nonfreeing_call_p (s))
2145 empty_mem_ref_hash_table ();
2147 gsi_next (&i);
2151 free_mem_ref_resources ();
2154 /* Build
2155 __asan_before_dynamic_init (module_name)
2157 __asan_after_dynamic_init ()
2158 call. */
2160 tree
2161 asan_dynamic_init_call (bool after_p)
2163 if (shadow_ptr_types[0] == NULL_TREE)
2164 asan_init_shadow_ptr_types ();
2166 tree fn = builtin_decl_implicit (after_p
2167 ? BUILT_IN_ASAN_AFTER_DYNAMIC_INIT
2168 : BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT);
2169 tree module_name_cst = NULL_TREE;
2170 if (!after_p)
2172 pretty_printer module_name_pp;
2173 pp_string (&module_name_pp, main_input_filename);
2175 module_name_cst = asan_pp_string (&module_name_pp);
2176 module_name_cst = fold_convert (const_ptr_type_node,
2177 module_name_cst);
2180 return build_call_expr (fn, after_p ? 0 : 1, module_name_cst);
2183 /* Build
2184 struct __asan_global
2186 const void *__beg;
2187 uptr __size;
2188 uptr __size_with_redzone;
2189 const void *__name;
2190 const void *__module_name;
2191 uptr __has_dynamic_init;
2192 __asan_global_source_location *__location;
2193 } type. */
2195 static tree
2196 asan_global_struct (void)
2198 static const char *field_names[7]
2199 = { "__beg", "__size", "__size_with_redzone",
2200 "__name", "__module_name", "__has_dynamic_init", "__location"};
2201 tree fields[7], ret;
2202 int i;
2204 ret = make_node (RECORD_TYPE);
2205 for (i = 0; i < 7; i++)
2207 fields[i]
2208 = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
2209 get_identifier (field_names[i]),
2210 (i == 0 || i == 3) ? const_ptr_type_node
2211 : pointer_sized_int_node);
2212 DECL_CONTEXT (fields[i]) = ret;
2213 if (i)
2214 DECL_CHAIN (fields[i - 1]) = fields[i];
2216 tree type_decl = build_decl (input_location, TYPE_DECL,
2217 get_identifier ("__asan_global"), ret);
2218 DECL_IGNORED_P (type_decl) = 1;
2219 DECL_ARTIFICIAL (type_decl) = 1;
2220 TYPE_FIELDS (ret) = fields[0];
2221 TYPE_NAME (ret) = type_decl;
2222 TYPE_STUB_DECL (ret) = type_decl;
2223 layout_type (ret);
2224 return ret;
2227 /* Append description of a single global DECL into vector V.
2228 TYPE is __asan_global struct type as returned by asan_global_struct. */
2230 static void
2231 asan_add_global (tree decl, tree type, vec<constructor_elt, va_gc> *v)
2233 tree init, uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
2234 unsigned HOST_WIDE_INT size;
2235 tree str_cst, module_name_cst, refdecl = decl;
2236 vec<constructor_elt, va_gc> *vinner = NULL;
2238 pretty_printer asan_pp, module_name_pp;
2240 if (DECL_NAME (decl))
2241 pp_tree_identifier (&asan_pp, DECL_NAME (decl));
2242 else
2243 pp_string (&asan_pp, "<unknown>");
2244 str_cst = asan_pp_string (&asan_pp);
2246 pp_string (&module_name_pp, main_input_filename);
2247 module_name_cst = asan_pp_string (&module_name_pp);
2249 if (asan_needs_local_alias (decl))
2251 char buf[20];
2252 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", vec_safe_length (v) + 1);
2253 refdecl = build_decl (DECL_SOURCE_LOCATION (decl),
2254 VAR_DECL, get_identifier (buf), TREE_TYPE (decl));
2255 TREE_ADDRESSABLE (refdecl) = TREE_ADDRESSABLE (decl);
2256 TREE_READONLY (refdecl) = TREE_READONLY (decl);
2257 TREE_THIS_VOLATILE (refdecl) = TREE_THIS_VOLATILE (decl);
2258 DECL_GIMPLE_REG_P (refdecl) = DECL_GIMPLE_REG_P (decl);
2259 DECL_ARTIFICIAL (refdecl) = DECL_ARTIFICIAL (decl);
2260 DECL_IGNORED_P (refdecl) = DECL_IGNORED_P (decl);
2261 TREE_STATIC (refdecl) = 1;
2262 TREE_PUBLIC (refdecl) = 0;
2263 TREE_USED (refdecl) = 1;
2264 assemble_alias (refdecl, DECL_ASSEMBLER_NAME (decl));
2267 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2268 fold_convert (const_ptr_type_node,
2269 build_fold_addr_expr (refdecl)));
2270 size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
2271 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2272 size += asan_red_zone_size (size);
2273 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2274 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2275 fold_convert (const_ptr_type_node, str_cst));
2276 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2277 fold_convert (const_ptr_type_node, module_name_cst));
2278 varpool_node *vnode = varpool_node::get (decl);
2279 int has_dynamic_init = vnode ? vnode->dynamically_initialized : 0;
2280 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2281 build_int_cst (uptr, has_dynamic_init));
2282 tree locptr = NULL_TREE;
2283 location_t loc = DECL_SOURCE_LOCATION (decl);
2284 expanded_location xloc = expand_location (loc);
2285 if (xloc.file != NULL)
2287 static int lasanloccnt = 0;
2288 char buf[25];
2289 ASM_GENERATE_INTERNAL_LABEL (buf, "LASANLOC", ++lasanloccnt);
2290 tree var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
2291 ubsan_get_source_location_type ());
2292 TREE_STATIC (var) = 1;
2293 TREE_PUBLIC (var) = 0;
2294 DECL_ARTIFICIAL (var) = 1;
2295 DECL_IGNORED_P (var) = 1;
2296 pretty_printer filename_pp;
2297 pp_string (&filename_pp, xloc.file);
2298 tree str = asan_pp_string (&filename_pp);
2299 tree ctor = build_constructor_va (TREE_TYPE (var), 3,
2300 NULL_TREE, str, NULL_TREE,
2301 build_int_cst (unsigned_type_node,
2302 xloc.line), NULL_TREE,
2303 build_int_cst (unsigned_type_node,
2304 xloc.column));
2305 TREE_CONSTANT (ctor) = 1;
2306 TREE_STATIC (ctor) = 1;
2307 DECL_INITIAL (var) = ctor;
2308 varpool_node::finalize_decl (var);
2309 locptr = fold_convert (uptr, build_fold_addr_expr (var));
2311 else
2312 locptr = build_int_cst (uptr, 0);
2313 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, locptr);
2314 init = build_constructor (type, vinner);
2315 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, init);
2318 /* Initialize sanitizer.def builtins if the FE hasn't initialized them. */
2319 void
2320 initialize_sanitizer_builtins (void)
2322 tree decl;
2324 if (builtin_decl_implicit_p (BUILT_IN_ASAN_INIT))
2325 return;
2327 tree BT_FN_VOID = build_function_type_list (void_type_node, NULL_TREE);
2328 tree BT_FN_VOID_PTR
2329 = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
2330 tree BT_FN_VOID_CONST_PTR
2331 = build_function_type_list (void_type_node, const_ptr_type_node, NULL_TREE);
2332 tree BT_FN_VOID_PTR_PTR
2333 = build_function_type_list (void_type_node, ptr_type_node,
2334 ptr_type_node, NULL_TREE);
2335 tree BT_FN_VOID_PTR_PTR_PTR
2336 = build_function_type_list (void_type_node, ptr_type_node,
2337 ptr_type_node, ptr_type_node, NULL_TREE);
2338 tree BT_FN_VOID_PTR_PTRMODE
2339 = build_function_type_list (void_type_node, ptr_type_node,
2340 pointer_sized_int_node, NULL_TREE);
2341 tree BT_FN_VOID_INT
2342 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
2343 tree BT_FN_SIZE_CONST_PTR_INT
2344 = build_function_type_list (size_type_node, const_ptr_type_node,
2345 integer_type_node, NULL_TREE);
2346 tree BT_FN_BOOL_VPTR_PTR_IX_INT_INT[5];
2347 tree BT_FN_IX_CONST_VPTR_INT[5];
2348 tree BT_FN_IX_VPTR_IX_INT[5];
2349 tree BT_FN_VOID_VPTR_IX_INT[5];
2350 tree vptr
2351 = build_pointer_type (build_qualified_type (void_type_node,
2352 TYPE_QUAL_VOLATILE));
2353 tree cvptr
2354 = build_pointer_type (build_qualified_type (void_type_node,
2355 TYPE_QUAL_VOLATILE
2356 |TYPE_QUAL_CONST));
2357 tree boolt
2358 = lang_hooks.types.type_for_size (BOOL_TYPE_SIZE, 1);
2359 int i;
2360 for (i = 0; i < 5; i++)
2362 tree ix = build_nonstandard_integer_type (BITS_PER_UNIT * (1 << i), 1);
2363 BT_FN_BOOL_VPTR_PTR_IX_INT_INT[i]
2364 = build_function_type_list (boolt, vptr, ptr_type_node, ix,
2365 integer_type_node, integer_type_node,
2366 NULL_TREE);
2367 BT_FN_IX_CONST_VPTR_INT[i]
2368 = build_function_type_list (ix, cvptr, integer_type_node, NULL_TREE);
2369 BT_FN_IX_VPTR_IX_INT[i]
2370 = build_function_type_list (ix, vptr, ix, integer_type_node,
2371 NULL_TREE);
2372 BT_FN_VOID_VPTR_IX_INT[i]
2373 = build_function_type_list (void_type_node, vptr, ix,
2374 integer_type_node, NULL_TREE);
2376 #define BT_FN_BOOL_VPTR_PTR_I1_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[0]
2377 #define BT_FN_I1_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[0]
2378 #define BT_FN_I1_VPTR_I1_INT BT_FN_IX_VPTR_IX_INT[0]
2379 #define BT_FN_VOID_VPTR_I1_INT BT_FN_VOID_VPTR_IX_INT[0]
2380 #define BT_FN_BOOL_VPTR_PTR_I2_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[1]
2381 #define BT_FN_I2_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[1]
2382 #define BT_FN_I2_VPTR_I2_INT BT_FN_IX_VPTR_IX_INT[1]
2383 #define BT_FN_VOID_VPTR_I2_INT BT_FN_VOID_VPTR_IX_INT[1]
2384 #define BT_FN_BOOL_VPTR_PTR_I4_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[2]
2385 #define BT_FN_I4_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[2]
2386 #define BT_FN_I4_VPTR_I4_INT BT_FN_IX_VPTR_IX_INT[2]
2387 #define BT_FN_VOID_VPTR_I4_INT BT_FN_VOID_VPTR_IX_INT[2]
2388 #define BT_FN_BOOL_VPTR_PTR_I8_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[3]
2389 #define BT_FN_I8_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[3]
2390 #define BT_FN_I8_VPTR_I8_INT BT_FN_IX_VPTR_IX_INT[3]
2391 #define BT_FN_VOID_VPTR_I8_INT BT_FN_VOID_VPTR_IX_INT[3]
2392 #define BT_FN_BOOL_VPTR_PTR_I16_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[4]
2393 #define BT_FN_I16_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[4]
2394 #define BT_FN_I16_VPTR_I16_INT BT_FN_IX_VPTR_IX_INT[4]
2395 #define BT_FN_VOID_VPTR_I16_INT BT_FN_VOID_VPTR_IX_INT[4]
2396 #undef ATTR_NOTHROW_LEAF_LIST
2397 #define ATTR_NOTHROW_LEAF_LIST ECF_NOTHROW | ECF_LEAF
2398 #undef ATTR_TMPURE_NOTHROW_LEAF_LIST
2399 #define ATTR_TMPURE_NOTHROW_LEAF_LIST ECF_TM_PURE | ATTR_NOTHROW_LEAF_LIST
2400 #undef ATTR_NORETURN_NOTHROW_LEAF_LIST
2401 #define ATTR_NORETURN_NOTHROW_LEAF_LIST ECF_NORETURN | ATTR_NOTHROW_LEAF_LIST
2402 #undef ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
2403 #define ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST \
2404 ECF_CONST | ATTR_NORETURN_NOTHROW_LEAF_LIST
2405 #undef ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST
2406 #define ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST \
2407 ECF_TM_PURE | ATTR_NORETURN_NOTHROW_LEAF_LIST
2408 #undef ATTR_COLD_NOTHROW_LEAF_LIST
2409 #define ATTR_COLD_NOTHROW_LEAF_LIST \
2410 /* ECF_COLD missing */ ATTR_NOTHROW_LEAF_LIST
2411 #undef ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST
2412 #define ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST \
2413 /* ECF_COLD missing */ ATTR_NORETURN_NOTHROW_LEAF_LIST
2414 #undef ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST
2415 #define ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST \
2416 /* ECF_COLD missing */ ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
2417 #undef ATTR_PURE_NOTHROW_LEAF_LIST
2418 #define ATTR_PURE_NOTHROW_LEAF_LIST ECF_PURE | ATTR_NOTHROW_LEAF_LIST
2419 #undef DEF_BUILTIN_STUB
2420 #define DEF_BUILTIN_STUB(ENUM, NAME)
2421 #undef DEF_SANITIZER_BUILTIN
2422 #define DEF_SANITIZER_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
2423 decl = add_builtin_function ("__builtin_" NAME, TYPE, ENUM, \
2424 BUILT_IN_NORMAL, NAME, NULL_TREE); \
2425 set_call_expr_flags (decl, ATTRS); \
2426 set_builtin_decl (ENUM, decl, true);
2428 #include "sanitizer.def"
2430 /* -fsanitize=object-size uses __builtin_object_size, but that might
2431 not be available for e.g. Fortran at this point. We use
2432 DEF_SANITIZER_BUILTIN here only as a convenience macro. */
2433 if ((flag_sanitize & SANITIZE_OBJECT_SIZE)
2434 && !builtin_decl_implicit_p (BUILT_IN_OBJECT_SIZE))
2435 DEF_SANITIZER_BUILTIN (BUILT_IN_OBJECT_SIZE, "object_size",
2436 BT_FN_SIZE_CONST_PTR_INT,
2437 ATTR_PURE_NOTHROW_LEAF_LIST)
2439 #undef DEF_SANITIZER_BUILTIN
2440 #undef DEF_BUILTIN_STUB
2443 /* Called via htab_traverse. Count number of emitted
2444 STRING_CSTs in the constant hash table. */
2447 count_string_csts (constant_descriptor_tree **slot,
2448 unsigned HOST_WIDE_INT *data)
2450 struct constant_descriptor_tree *desc = *slot;
2451 if (TREE_CODE (desc->value) == STRING_CST
2452 && TREE_ASM_WRITTEN (desc->value)
2453 && asan_protect_global (desc->value))
2454 ++*data;
2455 return 1;
2458 /* Helper structure to pass two parameters to
2459 add_string_csts. */
2461 struct asan_add_string_csts_data
2463 tree type;
2464 vec<constructor_elt, va_gc> *v;
2467 /* Called via hash_table::traverse. Call asan_add_global
2468 on emitted STRING_CSTs from the constant hash table. */
2471 add_string_csts (constant_descriptor_tree **slot,
2472 asan_add_string_csts_data *aascd)
2474 struct constant_descriptor_tree *desc = *slot;
2475 if (TREE_CODE (desc->value) == STRING_CST
2476 && TREE_ASM_WRITTEN (desc->value)
2477 && asan_protect_global (desc->value))
2479 asan_add_global (SYMBOL_REF_DECL (XEXP (desc->rtl, 0)),
2480 aascd->type, aascd->v);
2482 return 1;
2485 /* Needs to be GTY(()), because cgraph_build_static_cdtor may
2486 invoke ggc_collect. */
2487 static GTY(()) tree asan_ctor_statements;
2489 /* Module-level instrumentation.
2490 - Insert __asan_init_vN() into the list of CTORs.
2491 - TODO: insert redzones around globals.
2494 void
2495 asan_finish_file (void)
2497 varpool_node *vnode;
2498 unsigned HOST_WIDE_INT gcount = 0;
2500 if (shadow_ptr_types[0] == NULL_TREE)
2501 asan_init_shadow_ptr_types ();
2502 /* Avoid instrumenting code in the asan ctors/dtors.
2503 We don't need to insert padding after the description strings,
2504 nor after .LASAN* array. */
2505 flag_sanitize &= ~SANITIZE_ADDRESS;
2507 /* For user-space we want asan constructors to run first.
2508 Linux kernel does not support priorities other than default, and the only
2509 other user of constructors is coverage. So we run with the default
2510 priority. */
2511 int priority = flag_sanitize & SANITIZE_USER_ADDRESS
2512 ? MAX_RESERVED_INIT_PRIORITY - 1 : DEFAULT_INIT_PRIORITY;
2514 if (flag_sanitize & SANITIZE_USER_ADDRESS)
2516 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_INIT);
2517 append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
2518 fn = builtin_decl_implicit (BUILT_IN_ASAN_VERSION_MISMATCH_CHECK);
2519 append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
2521 FOR_EACH_DEFINED_VARIABLE (vnode)
2522 if (TREE_ASM_WRITTEN (vnode->decl)
2523 && asan_protect_global (vnode->decl))
2524 ++gcount;
2525 hash_table<tree_descriptor_hasher> *const_desc_htab = constant_pool_htab ();
2526 const_desc_htab->traverse<unsigned HOST_WIDE_INT *, count_string_csts>
2527 (&gcount);
2528 if (gcount)
2530 tree type = asan_global_struct (), var, ctor;
2531 tree dtor_statements = NULL_TREE;
2532 vec<constructor_elt, va_gc> *v;
2533 char buf[20];
2535 type = build_array_type_nelts (type, gcount);
2536 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", 0);
2537 var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
2538 type);
2539 TREE_STATIC (var) = 1;
2540 TREE_PUBLIC (var) = 0;
2541 DECL_ARTIFICIAL (var) = 1;
2542 DECL_IGNORED_P (var) = 1;
2543 vec_alloc (v, gcount);
2544 FOR_EACH_DEFINED_VARIABLE (vnode)
2545 if (TREE_ASM_WRITTEN (vnode->decl)
2546 && asan_protect_global (vnode->decl))
2547 asan_add_global (vnode->decl, TREE_TYPE (type), v);
2548 struct asan_add_string_csts_data aascd;
2549 aascd.type = TREE_TYPE (type);
2550 aascd.v = v;
2551 const_desc_htab->traverse<asan_add_string_csts_data *, add_string_csts>
2552 (&aascd);
2553 ctor = build_constructor (type, v);
2554 TREE_CONSTANT (ctor) = 1;
2555 TREE_STATIC (ctor) = 1;
2556 DECL_INITIAL (var) = ctor;
2557 varpool_node::finalize_decl (var);
2559 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_REGISTER_GLOBALS);
2560 tree gcount_tree = build_int_cst (pointer_sized_int_node, gcount);
2561 append_to_statement_list (build_call_expr (fn, 2,
2562 build_fold_addr_expr (var),
2563 gcount_tree),
2564 &asan_ctor_statements);
2566 fn = builtin_decl_implicit (BUILT_IN_ASAN_UNREGISTER_GLOBALS);
2567 append_to_statement_list (build_call_expr (fn, 2,
2568 build_fold_addr_expr (var),
2569 gcount_tree),
2570 &dtor_statements);
2571 cgraph_build_static_cdtor ('D', dtor_statements, priority);
2573 if (asan_ctor_statements)
2574 cgraph_build_static_cdtor ('I', asan_ctor_statements, priority);
2575 flag_sanitize |= SANITIZE_ADDRESS;
2578 /* Expand the ASAN_{LOAD,STORE} builtins. */
2580 bool
2581 asan_expand_check_ifn (gimple_stmt_iterator *iter, bool use_calls)
2583 gimple *g = gsi_stmt (*iter);
2584 location_t loc = gimple_location (g);
2585 bool recover_p;
2586 if (flag_sanitize & SANITIZE_USER_ADDRESS)
2587 recover_p = (flag_sanitize_recover & SANITIZE_USER_ADDRESS) != 0;
2588 else
2589 recover_p = (flag_sanitize_recover & SANITIZE_KERNEL_ADDRESS) != 0;
2591 HOST_WIDE_INT flags = tree_to_shwi (gimple_call_arg (g, 0));
2592 gcc_assert (flags < ASAN_CHECK_LAST);
2593 bool is_scalar_access = (flags & ASAN_CHECK_SCALAR_ACCESS) != 0;
2594 bool is_store = (flags & ASAN_CHECK_STORE) != 0;
2595 bool is_non_zero_len = (flags & ASAN_CHECK_NON_ZERO_LEN) != 0;
2597 tree base = gimple_call_arg (g, 1);
2598 tree len = gimple_call_arg (g, 2);
2599 HOST_WIDE_INT align = tree_to_shwi (gimple_call_arg (g, 3));
2601 HOST_WIDE_INT size_in_bytes
2602 = is_scalar_access && tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
2604 if (use_calls)
2606 /* Instrument using callbacks. */
2607 gimple *g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2608 NOP_EXPR, base);
2609 gimple_set_location (g, loc);
2610 gsi_insert_before (iter, g, GSI_SAME_STMT);
2611 tree base_addr = gimple_assign_lhs (g);
2613 int nargs;
2614 tree fun = check_func (is_store, recover_p, size_in_bytes, &nargs);
2615 if (nargs == 1)
2616 g = gimple_build_call (fun, 1, base_addr);
2617 else
2619 gcc_assert (nargs == 2);
2620 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2621 NOP_EXPR, len);
2622 gimple_set_location (g, loc);
2623 gsi_insert_before (iter, g, GSI_SAME_STMT);
2624 tree sz_arg = gimple_assign_lhs (g);
2625 g = gimple_build_call (fun, nargs, base_addr, sz_arg);
2627 gimple_set_location (g, loc);
2628 gsi_replace (iter, g, false);
2629 return false;
2632 HOST_WIDE_INT real_size_in_bytes = size_in_bytes == -1 ? 1 : size_in_bytes;
2634 tree shadow_ptr_type = shadow_ptr_types[real_size_in_bytes == 16 ? 1 : 0];
2635 tree shadow_type = TREE_TYPE (shadow_ptr_type);
2637 gimple_stmt_iterator gsi = *iter;
2639 if (!is_non_zero_len)
2641 /* So, the length of the memory area to asan-protect is
2642 non-constant. Let's guard the generated instrumentation code
2643 like:
2645 if (len != 0)
2647 //asan instrumentation code goes here.
2649 // falltrough instructions, starting with *ITER. */
2651 g = gimple_build_cond (NE_EXPR,
2652 len,
2653 build_int_cst (TREE_TYPE (len), 0),
2654 NULL_TREE, NULL_TREE);
2655 gimple_set_location (g, loc);
2657 basic_block then_bb, fallthrough_bb;
2658 insert_if_then_before_iter (as_a <gcond *> (g), iter,
2659 /*then_more_likely_p=*/true,
2660 &then_bb, &fallthrough_bb);
2661 /* Note that fallthrough_bb starts with the statement that was
2662 pointed to by ITER. */
2664 /* The 'then block' of the 'if (len != 0) condition is where
2665 we'll generate the asan instrumentation code now. */
2666 gsi = gsi_last_bb (then_bb);
2669 /* Get an iterator on the point where we can add the condition
2670 statement for the instrumentation. */
2671 basic_block then_bb, else_bb;
2672 gsi = create_cond_insert_point (&gsi, /*before_p*/false,
2673 /*then_more_likely_p=*/false,
2674 /*create_then_fallthru_edge*/recover_p,
2675 &then_bb,
2676 &else_bb);
2678 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2679 NOP_EXPR, base);
2680 gimple_set_location (g, loc);
2681 gsi_insert_before (&gsi, g, GSI_NEW_STMT);
2682 tree base_addr = gimple_assign_lhs (g);
2684 tree t = NULL_TREE;
2685 if (real_size_in_bytes >= 8)
2687 tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
2688 shadow_ptr_type);
2689 t = shadow;
2691 else
2693 /* Slow path for 1, 2 and 4 byte accesses. */
2694 /* Test (shadow != 0)
2695 & ((base_addr & 7) + (real_size_in_bytes - 1)) >= shadow). */
2696 tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
2697 shadow_ptr_type);
2698 gimple *shadow_test = build_assign (NE_EXPR, shadow, 0);
2699 gimple_seq seq = NULL;
2700 gimple_seq_add_stmt (&seq, shadow_test);
2701 /* Aligned (>= 8 bytes) can test just
2702 (real_size_in_bytes - 1 >= shadow), as base_addr & 7 is known
2703 to be 0. */
2704 if (align < 8)
2706 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
2707 base_addr, 7));
2708 gimple_seq_add_stmt (&seq,
2709 build_type_cast (shadow_type,
2710 gimple_seq_last (seq)));
2711 if (real_size_in_bytes > 1)
2712 gimple_seq_add_stmt (&seq,
2713 build_assign (PLUS_EXPR,
2714 gimple_seq_last (seq),
2715 real_size_in_bytes - 1));
2716 t = gimple_assign_lhs (gimple_seq_last_stmt (seq));
2718 else
2719 t = build_int_cst (shadow_type, real_size_in_bytes - 1);
2720 gimple_seq_add_stmt (&seq, build_assign (GE_EXPR, t, shadow));
2721 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
2722 gimple_seq_last (seq)));
2723 t = gimple_assign_lhs (gimple_seq_last (seq));
2724 gimple_seq_set_location (seq, loc);
2725 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
2727 /* For non-constant, misaligned or otherwise weird access sizes,
2728 check first and last byte. */
2729 if (size_in_bytes == -1)
2731 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2732 MINUS_EXPR, len,
2733 build_int_cst (pointer_sized_int_node, 1));
2734 gimple_set_location (g, loc);
2735 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2736 tree last = gimple_assign_lhs (g);
2737 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2738 PLUS_EXPR, base_addr, last);
2739 gimple_set_location (g, loc);
2740 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2741 tree base_end_addr = gimple_assign_lhs (g);
2743 tree shadow = build_shadow_mem_access (&gsi, loc, base_end_addr,
2744 shadow_ptr_type);
2745 gimple *shadow_test = build_assign (NE_EXPR, shadow, 0);
2746 gimple_seq seq = NULL;
2747 gimple_seq_add_stmt (&seq, shadow_test);
2748 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
2749 base_end_addr, 7));
2750 gimple_seq_add_stmt (&seq, build_type_cast (shadow_type,
2751 gimple_seq_last (seq)));
2752 gimple_seq_add_stmt (&seq, build_assign (GE_EXPR,
2753 gimple_seq_last (seq),
2754 shadow));
2755 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
2756 gimple_seq_last (seq)));
2757 gimple_seq_add_stmt (&seq, build_assign (BIT_IOR_EXPR, t,
2758 gimple_seq_last (seq)));
2759 t = gimple_assign_lhs (gimple_seq_last (seq));
2760 gimple_seq_set_location (seq, loc);
2761 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
2765 g = gimple_build_cond (NE_EXPR, t, build_int_cst (TREE_TYPE (t), 0),
2766 NULL_TREE, NULL_TREE);
2767 gimple_set_location (g, loc);
2768 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2770 /* Generate call to the run-time library (e.g. __asan_report_load8). */
2771 gsi = gsi_start_bb (then_bb);
2772 int nargs;
2773 tree fun = report_error_func (is_store, recover_p, size_in_bytes, &nargs);
2774 g = gimple_build_call (fun, nargs, base_addr, len);
2775 gimple_set_location (g, loc);
2776 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2778 gsi_remove (iter, true);
2779 *iter = gsi_start_bb (else_bb);
2781 return true;
2784 /* Instrument the current function. */
2786 static unsigned int
2787 asan_instrument (void)
2789 if (shadow_ptr_types[0] == NULL_TREE)
2790 asan_init_shadow_ptr_types ();
2791 transform_statements ();
2792 return 0;
2795 static bool
2796 gate_asan (void)
2798 return (flag_sanitize & SANITIZE_ADDRESS) != 0
2799 && !lookup_attribute ("no_sanitize_address",
2800 DECL_ATTRIBUTES (current_function_decl));
2803 namespace {
2805 const pass_data pass_data_asan =
2807 GIMPLE_PASS, /* type */
2808 "asan", /* name */
2809 OPTGROUP_NONE, /* optinfo_flags */
2810 TV_NONE, /* tv_id */
2811 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
2812 0, /* properties_provided */
2813 0, /* properties_destroyed */
2814 0, /* todo_flags_start */
2815 TODO_update_ssa, /* todo_flags_finish */
2818 class pass_asan : public gimple_opt_pass
2820 public:
2821 pass_asan (gcc::context *ctxt)
2822 : gimple_opt_pass (pass_data_asan, ctxt)
2825 /* opt_pass methods: */
2826 opt_pass * clone () { return new pass_asan (m_ctxt); }
2827 virtual bool gate (function *) { return gate_asan (); }
2828 virtual unsigned int execute (function *) { return asan_instrument (); }
2830 }; // class pass_asan
2832 } // anon namespace
2834 gimple_opt_pass *
2835 make_pass_asan (gcc::context *ctxt)
2837 return new pass_asan (ctxt);
2840 namespace {
2842 const pass_data pass_data_asan_O0 =
2844 GIMPLE_PASS, /* type */
2845 "asan0", /* name */
2846 OPTGROUP_NONE, /* optinfo_flags */
2847 TV_NONE, /* tv_id */
2848 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
2849 0, /* properties_provided */
2850 0, /* properties_destroyed */
2851 0, /* todo_flags_start */
2852 TODO_update_ssa, /* todo_flags_finish */
2855 class pass_asan_O0 : public gimple_opt_pass
2857 public:
2858 pass_asan_O0 (gcc::context *ctxt)
2859 : gimple_opt_pass (pass_data_asan_O0, ctxt)
2862 /* opt_pass methods: */
2863 virtual bool gate (function *) { return !optimize && gate_asan (); }
2864 virtual unsigned int execute (function *) { return asan_instrument (); }
2866 }; // class pass_asan_O0
2868 } // anon namespace
2870 gimple_opt_pass *
2871 make_pass_asan_O0 (gcc::context *ctxt)
2873 return new pass_asan_O0 (ctxt);
2876 #include "gt-asan.h"