PR ipa/64481
[official-gcc.git] / gcc / asan.c
blob9585f6ca40d95a44a8803ec60db5e42cdaff9f35
1 /* AddressSanitizer, a fast memory error detector.
2 Copyright (C) 2012-2015 Free Software Foundation, Inc.
3 Contributed by Kostya Serebryany <kcc@google.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "hash-set.h"
26 #include "machmode.h"
27 #include "vec.h"
28 #include "double-int.h"
29 #include "input.h"
30 #include "alias.h"
31 #include "symtab.h"
32 #include "options.h"
33 #include "wide-int.h"
34 #include "inchash.h"
35 #include "tree.h"
36 #include "fold-const.h"
37 #include "hash-table.h"
38 #include "predict.h"
39 #include "tm.h"
40 #include "hard-reg-set.h"
41 #include "input.h"
42 #include "function.h"
43 #include "dominance.h"
44 #include "cfg.h"
45 #include "cfganal.h"
46 #include "basic-block.h"
47 #include "tree-ssa-alias.h"
48 #include "internal-fn.h"
49 #include "gimple-expr.h"
50 #include "is-a.h"
51 #include "inchash.h"
52 #include "gimple.h"
53 #include "gimplify.h"
54 #include "gimple-iterator.h"
55 #include "calls.h"
56 #include "varasm.h"
57 #include "stor-layout.h"
58 #include "tree-iterator.h"
59 #include "hash-map.h"
60 #include "plugin-api.h"
61 #include "ipa-ref.h"
62 #include "cgraph.h"
63 #include "stringpool.h"
64 #include "tree-ssanames.h"
65 #include "tree-pass.h"
66 #include "asan.h"
67 #include "gimple-pretty-print.h"
68 #include "target.h"
69 #include "expr.h"
70 #include "insn-codes.h"
71 #include "optabs.h"
72 #include "output.h"
73 #include "tm_p.h"
74 #include "langhooks.h"
75 #include "alloc-pool.h"
76 #include "cfgloop.h"
77 #include "gimple-builder.h"
78 #include "ubsan.h"
79 #include "params.h"
80 #include "builtins.h"
82 /* AddressSanitizer finds out-of-bounds and use-after-free bugs
83 with <2x slowdown on average.
85 The tool consists of two parts:
86 instrumentation module (this file) and a run-time library.
87 The instrumentation module adds a run-time check before every memory insn.
88 For a 8- or 16- byte load accessing address X:
89 ShadowAddr = (X >> 3) + Offset
90 ShadowValue = *(char*)ShadowAddr; // *(short*) for 16-byte access.
91 if (ShadowValue)
92 __asan_report_load8(X);
93 For a load of N bytes (N=1, 2 or 4) from address X:
94 ShadowAddr = (X >> 3) + Offset
95 ShadowValue = *(char*)ShadowAddr;
96 if (ShadowValue)
97 if ((X & 7) + N - 1 > ShadowValue)
98 __asan_report_loadN(X);
99 Stores are instrumented similarly, but using __asan_report_storeN functions.
100 A call too __asan_init_vN() is inserted to the list of module CTORs.
101 N is the version number of the AddressSanitizer API. The changes between the
102 API versions are listed in libsanitizer/asan/asan_interface_internal.h.
104 The run-time library redefines malloc (so that redzone are inserted around
105 the allocated memory) and free (so that reuse of free-ed memory is delayed),
106 provides __asan_report* and __asan_init_vN functions.
108 Read more:
109 http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm
111 The current implementation supports detection of out-of-bounds and
112 use-after-free in the heap, on the stack and for global variables.
114 [Protection of stack variables]
116 To understand how detection of out-of-bounds and use-after-free works
117 for stack variables, lets look at this example on x86_64 where the
118 stack grows downward:
121 foo ()
123 char a[23] = {0};
124 int b[2] = {0};
126 a[5] = 1;
127 b[1] = 2;
129 return a[5] + b[1];
132 For this function, the stack protected by asan will be organized as
133 follows, from the top of the stack to the bottom:
135 Slot 1/ [red zone of 32 bytes called 'RIGHT RedZone']
137 Slot 2/ [8 bytes of red zone, that adds up to the space of 'a' to make
138 the next slot be 32 bytes aligned; this one is called Partial
139 Redzone; this 32 bytes alignment is an asan constraint]
141 Slot 3/ [24 bytes for variable 'a']
143 Slot 4/ [red zone of 32 bytes called 'Middle RedZone']
145 Slot 5/ [24 bytes of Partial Red Zone (similar to slot 2]
147 Slot 6/ [8 bytes for variable 'b']
149 Slot 7/ [32 bytes of Red Zone at the bottom of the stack, called
150 'LEFT RedZone']
152 The 32 bytes of LEFT red zone at the bottom of the stack can be
153 decomposed as such:
155 1/ The first 8 bytes contain a magical asan number that is always
156 0x41B58AB3.
158 2/ The following 8 bytes contains a pointer to a string (to be
159 parsed at runtime by the runtime asan library), which format is
160 the following:
162 "<function-name> <space> <num-of-variables-on-the-stack>
163 (<32-bytes-aligned-offset-in-bytes-of-variable> <space>
164 <length-of-var-in-bytes> ){n} "
166 where '(...){n}' means the content inside the parenthesis occurs 'n'
167 times, with 'n' being the number of variables on the stack.
169 3/ The following 8 bytes contain the PC of the current function which
170 will be used by the run-time library to print an error message.
172 4/ The following 8 bytes are reserved for internal use by the run-time.
174 The shadow memory for that stack layout is going to look like this:
176 - content of shadow memory 8 bytes for slot 7: 0xF1F1F1F1.
177 The F1 byte pattern is a magic number called
178 ASAN_STACK_MAGIC_LEFT and is a way for the runtime to know that
179 the memory for that shadow byte is part of a the LEFT red zone
180 intended to seat at the bottom of the variables on the stack.
182 - content of shadow memory 8 bytes for slots 6 and 5:
183 0xF4F4F400. The F4 byte pattern is a magic number
184 called ASAN_STACK_MAGIC_PARTIAL. It flags the fact that the
185 memory region for this shadow byte is a PARTIAL red zone
186 intended to pad a variable A, so that the slot following
187 {A,padding} is 32 bytes aligned.
189 Note that the fact that the least significant byte of this
190 shadow memory content is 00 means that 8 bytes of its
191 corresponding memory (which corresponds to the memory of
192 variable 'b') is addressable.
194 - content of shadow memory 8 bytes for slot 4: 0xF2F2F2F2.
195 The F2 byte pattern is a magic number called
196 ASAN_STACK_MAGIC_MIDDLE. It flags the fact that the memory
197 region for this shadow byte is a MIDDLE red zone intended to
198 seat between two 32 aligned slots of {variable,padding}.
200 - content of shadow memory 8 bytes for slot 3 and 2:
201 0xF4000000. This represents is the concatenation of
202 variable 'a' and the partial red zone following it, like what we
203 had for variable 'b'. The least significant 3 bytes being 00
204 means that the 3 bytes of variable 'a' are addressable.
206 - content of shadow memory 8 bytes for slot 1: 0xF3F3F3F3.
207 The F3 byte pattern is a magic number called
208 ASAN_STACK_MAGIC_RIGHT. It flags the fact that the memory
209 region for this shadow byte is a RIGHT red zone intended to seat
210 at the top of the variables of the stack.
212 Note that the real variable layout is done in expand_used_vars in
213 cfgexpand.c. As far as Address Sanitizer is concerned, it lays out
214 stack variables as well as the different red zones, emits some
215 prologue code to populate the shadow memory as to poison (mark as
216 non-accessible) the regions of the red zones and mark the regions of
217 stack variables as accessible, and emit some epilogue code to
218 un-poison (mark as accessible) the regions of red zones right before
219 the function exits.
221 [Protection of global variables]
223 The basic idea is to insert a red zone between two global variables
224 and install a constructor function that calls the asan runtime to do
225 the populating of the relevant shadow memory regions at load time.
227 So the global variables are laid out as to insert a red zone between
228 them. The size of the red zones is so that each variable starts on a
229 32 bytes boundary.
231 Then a constructor function is installed so that, for each global
232 variable, it calls the runtime asan library function
233 __asan_register_globals_with an instance of this type:
235 struct __asan_global
237 // Address of the beginning of the global variable.
238 const void *__beg;
240 // Initial size of the global variable.
241 uptr __size;
243 // Size of the global variable + size of the red zone. This
244 // size is 32 bytes aligned.
245 uptr __size_with_redzone;
247 // Name of the global variable.
248 const void *__name;
250 // Name of the module where the global variable is declared.
251 const void *__module_name;
253 // 1 if it has dynamic initialization, 0 otherwise.
254 uptr __has_dynamic_init;
256 // A pointer to struct that contains source location, could be NULL.
257 __asan_global_source_location *__location;
260 A destructor function that calls the runtime asan library function
261 _asan_unregister_globals is also installed. */
263 static unsigned HOST_WIDE_INT asan_shadow_offset_value;
264 static bool asan_shadow_offset_computed;
266 /* Sets shadow offset to value in string VAL. */
268 bool
269 set_asan_shadow_offset (const char *val)
271 char *endp;
273 errno = 0;
274 #ifdef HAVE_LONG_LONG
275 asan_shadow_offset_value = strtoull (val, &endp, 0);
276 #else
277 asan_shadow_offset_value = strtoul (val, &endp, 0);
278 #endif
279 if (!(*val != '\0' && *endp == '\0' && errno == 0))
280 return false;
282 asan_shadow_offset_computed = true;
284 return true;
287 /* Returns Asan shadow offset. */
289 static unsigned HOST_WIDE_INT
290 asan_shadow_offset ()
292 if (!asan_shadow_offset_computed)
294 asan_shadow_offset_computed = true;
295 asan_shadow_offset_value = targetm.asan_shadow_offset ();
297 return asan_shadow_offset_value;
300 alias_set_type asan_shadow_set = -1;
302 /* Pointer types to 1 resp. 2 byte integers in shadow memory. A separate
303 alias set is used for all shadow memory accesses. */
304 static GTY(()) tree shadow_ptr_types[2];
306 /* Decl for __asan_option_detect_stack_use_after_return. */
307 static GTY(()) tree asan_detect_stack_use_after_return;
309 /* Various flags for Asan builtins. */
310 enum asan_check_flags
312 ASAN_CHECK_STORE = 1 << 0,
313 ASAN_CHECK_SCALAR_ACCESS = 1 << 1,
314 ASAN_CHECK_NON_ZERO_LEN = 1 << 2,
315 ASAN_CHECK_LAST = 1 << 3
318 /* Hashtable support for memory references used by gimple
319 statements. */
321 /* This type represents a reference to a memory region. */
322 struct asan_mem_ref
324 /* The expression of the beginning of the memory region. */
325 tree start;
327 /* The size of the access. */
328 HOST_WIDE_INT access_size;
331 static alloc_pool asan_mem_ref_alloc_pool;
333 /* This creates the alloc pool used to store the instances of
334 asan_mem_ref that are stored in the hash table asan_mem_ref_ht. */
336 static alloc_pool
337 asan_mem_ref_get_alloc_pool ()
339 if (asan_mem_ref_alloc_pool == NULL)
340 asan_mem_ref_alloc_pool = create_alloc_pool ("asan_mem_ref",
341 sizeof (asan_mem_ref),
342 10);
343 return asan_mem_ref_alloc_pool;
347 /* Initializes an instance of asan_mem_ref. */
349 static void
350 asan_mem_ref_init (asan_mem_ref *ref, tree start, HOST_WIDE_INT access_size)
352 ref->start = start;
353 ref->access_size = access_size;
356 /* Allocates memory for an instance of asan_mem_ref into the memory
357 pool returned by asan_mem_ref_get_alloc_pool and initialize it.
358 START is the address of (or the expression pointing to) the
359 beginning of memory reference. ACCESS_SIZE is the size of the
360 access to the referenced memory. */
362 static asan_mem_ref*
363 asan_mem_ref_new (tree start, HOST_WIDE_INT access_size)
365 asan_mem_ref *ref =
366 (asan_mem_ref *) pool_alloc (asan_mem_ref_get_alloc_pool ());
368 asan_mem_ref_init (ref, start, access_size);
369 return ref;
372 /* This builds and returns a pointer to the end of the memory region
373 that starts at START and of length LEN. */
375 tree
376 asan_mem_ref_get_end (tree start, tree len)
378 if (len == NULL_TREE || integer_zerop (len))
379 return start;
381 if (!ptrofftype_p (len))
382 len = convert_to_ptrofftype (len);
384 return fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (start), start, len);
387 /* Return a tree expression that represents the end of the referenced
388 memory region. Beware that this function can actually build a new
389 tree expression. */
391 tree
392 asan_mem_ref_get_end (const asan_mem_ref *ref, tree len)
394 return asan_mem_ref_get_end (ref->start, len);
397 struct asan_mem_ref_hasher
398 : typed_noop_remove <asan_mem_ref>
400 typedef asan_mem_ref value_type;
401 typedef asan_mem_ref compare_type;
403 static inline hashval_t hash (const value_type *);
404 static inline bool equal (const value_type *, const compare_type *);
407 /* Hash a memory reference. */
409 inline hashval_t
410 asan_mem_ref_hasher::hash (const asan_mem_ref *mem_ref)
412 return iterative_hash_expr (mem_ref->start, 0);
415 /* Compare two memory references. We accept the length of either
416 memory references to be NULL_TREE. */
418 inline bool
419 asan_mem_ref_hasher::equal (const asan_mem_ref *m1,
420 const asan_mem_ref *m2)
422 return operand_equal_p (m1->start, m2->start, 0);
425 static hash_table<asan_mem_ref_hasher> *asan_mem_ref_ht;
427 /* Returns a reference to the hash table containing memory references.
428 This function ensures that the hash table is created. Note that
429 this hash table is updated by the function
430 update_mem_ref_hash_table. */
432 static hash_table<asan_mem_ref_hasher> *
433 get_mem_ref_hash_table ()
435 if (!asan_mem_ref_ht)
436 asan_mem_ref_ht = new hash_table<asan_mem_ref_hasher> (10);
438 return asan_mem_ref_ht;
441 /* Clear all entries from the memory references hash table. */
443 static void
444 empty_mem_ref_hash_table ()
446 if (asan_mem_ref_ht)
447 asan_mem_ref_ht->empty ();
450 /* Free the memory references hash table. */
452 static void
453 free_mem_ref_resources ()
455 delete asan_mem_ref_ht;
456 asan_mem_ref_ht = NULL;
458 if (asan_mem_ref_alloc_pool)
460 free_alloc_pool (asan_mem_ref_alloc_pool);
461 asan_mem_ref_alloc_pool = NULL;
465 /* Return true iff the memory reference REF has been instrumented. */
467 static bool
468 has_mem_ref_been_instrumented (tree ref, HOST_WIDE_INT access_size)
470 asan_mem_ref r;
471 asan_mem_ref_init (&r, ref, access_size);
473 asan_mem_ref *saved_ref = get_mem_ref_hash_table ()->find (&r);
474 return saved_ref && saved_ref->access_size >= access_size;
477 /* Return true iff the memory reference REF has been instrumented. */
479 static bool
480 has_mem_ref_been_instrumented (const asan_mem_ref *ref)
482 return has_mem_ref_been_instrumented (ref->start, ref->access_size);
485 /* Return true iff access to memory region starting at REF and of
486 length LEN has been instrumented. */
488 static bool
489 has_mem_ref_been_instrumented (const asan_mem_ref *ref, tree len)
491 HOST_WIDE_INT size_in_bytes
492 = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
494 return size_in_bytes != -1
495 && has_mem_ref_been_instrumented (ref->start, size_in_bytes);
498 /* Set REF to the memory reference present in a gimple assignment
499 ASSIGNMENT. Return true upon successful completion, false
500 otherwise. */
502 static bool
503 get_mem_ref_of_assignment (const gassign *assignment,
504 asan_mem_ref *ref,
505 bool *ref_is_store)
507 gcc_assert (gimple_assign_single_p (assignment));
509 if (gimple_store_p (assignment)
510 && !gimple_clobber_p (assignment))
512 ref->start = gimple_assign_lhs (assignment);
513 *ref_is_store = true;
515 else if (gimple_assign_load_p (assignment))
517 ref->start = gimple_assign_rhs1 (assignment);
518 *ref_is_store = false;
520 else
521 return false;
523 ref->access_size = int_size_in_bytes (TREE_TYPE (ref->start));
524 return true;
527 /* Return the memory references contained in a gimple statement
528 representing a builtin call that has to do with memory access. */
530 static bool
531 get_mem_refs_of_builtin_call (const gcall *call,
532 asan_mem_ref *src0,
533 tree *src0_len,
534 bool *src0_is_store,
535 asan_mem_ref *src1,
536 tree *src1_len,
537 bool *src1_is_store,
538 asan_mem_ref *dst,
539 tree *dst_len,
540 bool *dst_is_store,
541 bool *dest_is_deref,
542 bool *intercepted_p)
544 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
546 tree callee = gimple_call_fndecl (call);
547 tree source0 = NULL_TREE, source1 = NULL_TREE,
548 dest = NULL_TREE, len = NULL_TREE;
549 bool is_store = true, got_reference_p = false;
550 HOST_WIDE_INT access_size = 1;
552 *intercepted_p = asan_intercepted_p ((DECL_FUNCTION_CODE (callee)));
554 switch (DECL_FUNCTION_CODE (callee))
556 /* (s, s, n) style memops. */
557 case BUILT_IN_BCMP:
558 case BUILT_IN_MEMCMP:
559 source0 = gimple_call_arg (call, 0);
560 source1 = gimple_call_arg (call, 1);
561 len = gimple_call_arg (call, 2);
562 break;
564 /* (src, dest, n) style memops. */
565 case BUILT_IN_BCOPY:
566 source0 = gimple_call_arg (call, 0);
567 dest = gimple_call_arg (call, 1);
568 len = gimple_call_arg (call, 2);
569 break;
571 /* (dest, src, n) style memops. */
572 case BUILT_IN_MEMCPY:
573 case BUILT_IN_MEMCPY_CHK:
574 case BUILT_IN_MEMMOVE:
575 case BUILT_IN_MEMMOVE_CHK:
576 case BUILT_IN_MEMPCPY:
577 case BUILT_IN_MEMPCPY_CHK:
578 dest = gimple_call_arg (call, 0);
579 source0 = gimple_call_arg (call, 1);
580 len = gimple_call_arg (call, 2);
581 break;
583 /* (dest, n) style memops. */
584 case BUILT_IN_BZERO:
585 dest = gimple_call_arg (call, 0);
586 len = gimple_call_arg (call, 1);
587 break;
589 /* (dest, x, n) style memops*/
590 case BUILT_IN_MEMSET:
591 case BUILT_IN_MEMSET_CHK:
592 dest = gimple_call_arg (call, 0);
593 len = gimple_call_arg (call, 2);
594 break;
596 case BUILT_IN_STRLEN:
597 source0 = gimple_call_arg (call, 0);
598 len = gimple_call_lhs (call);
599 break ;
601 /* And now the __atomic* and __sync builtins.
602 These are handled differently from the classical memory memory
603 access builtins above. */
605 case BUILT_IN_ATOMIC_LOAD_1:
606 case BUILT_IN_ATOMIC_LOAD_2:
607 case BUILT_IN_ATOMIC_LOAD_4:
608 case BUILT_IN_ATOMIC_LOAD_8:
609 case BUILT_IN_ATOMIC_LOAD_16:
610 is_store = false;
611 /* fall through. */
613 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
614 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
615 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
616 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
617 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
619 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
620 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
621 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
622 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
623 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
625 case BUILT_IN_SYNC_FETCH_AND_OR_1:
626 case BUILT_IN_SYNC_FETCH_AND_OR_2:
627 case BUILT_IN_SYNC_FETCH_AND_OR_4:
628 case BUILT_IN_SYNC_FETCH_AND_OR_8:
629 case BUILT_IN_SYNC_FETCH_AND_OR_16:
631 case BUILT_IN_SYNC_FETCH_AND_AND_1:
632 case BUILT_IN_SYNC_FETCH_AND_AND_2:
633 case BUILT_IN_SYNC_FETCH_AND_AND_4:
634 case BUILT_IN_SYNC_FETCH_AND_AND_8:
635 case BUILT_IN_SYNC_FETCH_AND_AND_16:
637 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
638 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
639 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
640 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
641 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
643 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
644 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
645 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
646 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
648 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
649 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
650 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
651 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
652 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
654 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
655 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
656 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
657 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
658 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
660 case BUILT_IN_SYNC_OR_AND_FETCH_1:
661 case BUILT_IN_SYNC_OR_AND_FETCH_2:
662 case BUILT_IN_SYNC_OR_AND_FETCH_4:
663 case BUILT_IN_SYNC_OR_AND_FETCH_8:
664 case BUILT_IN_SYNC_OR_AND_FETCH_16:
666 case BUILT_IN_SYNC_AND_AND_FETCH_1:
667 case BUILT_IN_SYNC_AND_AND_FETCH_2:
668 case BUILT_IN_SYNC_AND_AND_FETCH_4:
669 case BUILT_IN_SYNC_AND_AND_FETCH_8:
670 case BUILT_IN_SYNC_AND_AND_FETCH_16:
672 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
673 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
674 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
675 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
676 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
678 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
679 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
680 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
681 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
683 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
684 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
685 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
686 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
687 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
689 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
690 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
691 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
692 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
693 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
695 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
696 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
697 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
698 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
699 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
701 case BUILT_IN_SYNC_LOCK_RELEASE_1:
702 case BUILT_IN_SYNC_LOCK_RELEASE_2:
703 case BUILT_IN_SYNC_LOCK_RELEASE_4:
704 case BUILT_IN_SYNC_LOCK_RELEASE_8:
705 case BUILT_IN_SYNC_LOCK_RELEASE_16:
707 case BUILT_IN_ATOMIC_EXCHANGE_1:
708 case BUILT_IN_ATOMIC_EXCHANGE_2:
709 case BUILT_IN_ATOMIC_EXCHANGE_4:
710 case BUILT_IN_ATOMIC_EXCHANGE_8:
711 case BUILT_IN_ATOMIC_EXCHANGE_16:
713 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
714 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
715 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
716 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
717 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
719 case BUILT_IN_ATOMIC_STORE_1:
720 case BUILT_IN_ATOMIC_STORE_2:
721 case BUILT_IN_ATOMIC_STORE_4:
722 case BUILT_IN_ATOMIC_STORE_8:
723 case BUILT_IN_ATOMIC_STORE_16:
725 case BUILT_IN_ATOMIC_ADD_FETCH_1:
726 case BUILT_IN_ATOMIC_ADD_FETCH_2:
727 case BUILT_IN_ATOMIC_ADD_FETCH_4:
728 case BUILT_IN_ATOMIC_ADD_FETCH_8:
729 case BUILT_IN_ATOMIC_ADD_FETCH_16:
731 case BUILT_IN_ATOMIC_SUB_FETCH_1:
732 case BUILT_IN_ATOMIC_SUB_FETCH_2:
733 case BUILT_IN_ATOMIC_SUB_FETCH_4:
734 case BUILT_IN_ATOMIC_SUB_FETCH_8:
735 case BUILT_IN_ATOMIC_SUB_FETCH_16:
737 case BUILT_IN_ATOMIC_AND_FETCH_1:
738 case BUILT_IN_ATOMIC_AND_FETCH_2:
739 case BUILT_IN_ATOMIC_AND_FETCH_4:
740 case BUILT_IN_ATOMIC_AND_FETCH_8:
741 case BUILT_IN_ATOMIC_AND_FETCH_16:
743 case BUILT_IN_ATOMIC_NAND_FETCH_1:
744 case BUILT_IN_ATOMIC_NAND_FETCH_2:
745 case BUILT_IN_ATOMIC_NAND_FETCH_4:
746 case BUILT_IN_ATOMIC_NAND_FETCH_8:
747 case BUILT_IN_ATOMIC_NAND_FETCH_16:
749 case BUILT_IN_ATOMIC_XOR_FETCH_1:
750 case BUILT_IN_ATOMIC_XOR_FETCH_2:
751 case BUILT_IN_ATOMIC_XOR_FETCH_4:
752 case BUILT_IN_ATOMIC_XOR_FETCH_8:
753 case BUILT_IN_ATOMIC_XOR_FETCH_16:
755 case BUILT_IN_ATOMIC_OR_FETCH_1:
756 case BUILT_IN_ATOMIC_OR_FETCH_2:
757 case BUILT_IN_ATOMIC_OR_FETCH_4:
758 case BUILT_IN_ATOMIC_OR_FETCH_8:
759 case BUILT_IN_ATOMIC_OR_FETCH_16:
761 case BUILT_IN_ATOMIC_FETCH_ADD_1:
762 case BUILT_IN_ATOMIC_FETCH_ADD_2:
763 case BUILT_IN_ATOMIC_FETCH_ADD_4:
764 case BUILT_IN_ATOMIC_FETCH_ADD_8:
765 case BUILT_IN_ATOMIC_FETCH_ADD_16:
767 case BUILT_IN_ATOMIC_FETCH_SUB_1:
768 case BUILT_IN_ATOMIC_FETCH_SUB_2:
769 case BUILT_IN_ATOMIC_FETCH_SUB_4:
770 case BUILT_IN_ATOMIC_FETCH_SUB_8:
771 case BUILT_IN_ATOMIC_FETCH_SUB_16:
773 case BUILT_IN_ATOMIC_FETCH_AND_1:
774 case BUILT_IN_ATOMIC_FETCH_AND_2:
775 case BUILT_IN_ATOMIC_FETCH_AND_4:
776 case BUILT_IN_ATOMIC_FETCH_AND_8:
777 case BUILT_IN_ATOMIC_FETCH_AND_16:
779 case BUILT_IN_ATOMIC_FETCH_NAND_1:
780 case BUILT_IN_ATOMIC_FETCH_NAND_2:
781 case BUILT_IN_ATOMIC_FETCH_NAND_4:
782 case BUILT_IN_ATOMIC_FETCH_NAND_8:
783 case BUILT_IN_ATOMIC_FETCH_NAND_16:
785 case BUILT_IN_ATOMIC_FETCH_XOR_1:
786 case BUILT_IN_ATOMIC_FETCH_XOR_2:
787 case BUILT_IN_ATOMIC_FETCH_XOR_4:
788 case BUILT_IN_ATOMIC_FETCH_XOR_8:
789 case BUILT_IN_ATOMIC_FETCH_XOR_16:
791 case BUILT_IN_ATOMIC_FETCH_OR_1:
792 case BUILT_IN_ATOMIC_FETCH_OR_2:
793 case BUILT_IN_ATOMIC_FETCH_OR_4:
794 case BUILT_IN_ATOMIC_FETCH_OR_8:
795 case BUILT_IN_ATOMIC_FETCH_OR_16:
797 dest = gimple_call_arg (call, 0);
798 /* DEST represents the address of a memory location.
799 instrument_derefs wants the memory location, so lets
800 dereference the address DEST before handing it to
801 instrument_derefs. */
802 if (TREE_CODE (dest) == ADDR_EXPR)
803 dest = TREE_OPERAND (dest, 0);
804 else if (TREE_CODE (dest) == SSA_NAME || TREE_CODE (dest) == INTEGER_CST)
805 dest = build2 (MEM_REF, TREE_TYPE (TREE_TYPE (dest)),
806 dest, build_int_cst (TREE_TYPE (dest), 0));
807 else
808 gcc_unreachable ();
810 access_size = int_size_in_bytes (TREE_TYPE (dest));
813 default:
814 /* The other builtins memory access are not instrumented in this
815 function because they either don't have any length parameter,
816 or their length parameter is just a limit. */
817 break;
820 if (len != NULL_TREE)
822 if (source0 != NULL_TREE)
824 src0->start = source0;
825 src0->access_size = access_size;
826 *src0_len = len;
827 *src0_is_store = false;
830 if (source1 != NULL_TREE)
832 src1->start = source1;
833 src1->access_size = access_size;
834 *src1_len = len;
835 *src1_is_store = false;
838 if (dest != NULL_TREE)
840 dst->start = dest;
841 dst->access_size = access_size;
842 *dst_len = len;
843 *dst_is_store = true;
846 got_reference_p = true;
848 else if (dest)
850 dst->start = dest;
851 dst->access_size = access_size;
852 *dst_len = NULL_TREE;
853 *dst_is_store = is_store;
854 *dest_is_deref = true;
855 got_reference_p = true;
858 return got_reference_p;
861 /* Return true iff a given gimple statement has been instrumented.
862 Note that the statement is "defined" by the memory references it
863 contains. */
865 static bool
866 has_stmt_been_instrumented_p (gimple stmt)
868 if (gimple_assign_single_p (stmt))
870 bool r_is_store;
871 asan_mem_ref r;
872 asan_mem_ref_init (&r, NULL, 1);
874 if (get_mem_ref_of_assignment (as_a <gassign *> (stmt), &r,
875 &r_is_store))
876 return has_mem_ref_been_instrumented (&r);
878 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
880 asan_mem_ref src0, src1, dest;
881 asan_mem_ref_init (&src0, NULL, 1);
882 asan_mem_ref_init (&src1, NULL, 1);
883 asan_mem_ref_init (&dest, NULL, 1);
885 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
886 bool src0_is_store = false, src1_is_store = false,
887 dest_is_store = false, dest_is_deref = false, intercepted_p = true;
888 if (get_mem_refs_of_builtin_call (as_a <gcall *> (stmt),
889 &src0, &src0_len, &src0_is_store,
890 &src1, &src1_len, &src1_is_store,
891 &dest, &dest_len, &dest_is_store,
892 &dest_is_deref, &intercepted_p))
894 if (src0.start != NULL_TREE
895 && !has_mem_ref_been_instrumented (&src0, src0_len))
896 return false;
898 if (src1.start != NULL_TREE
899 && !has_mem_ref_been_instrumented (&src1, src1_len))
900 return false;
902 if (dest.start != NULL_TREE
903 && !has_mem_ref_been_instrumented (&dest, dest_len))
904 return false;
906 return true;
909 return false;
912 /* Insert a memory reference into the hash table. */
914 static void
915 update_mem_ref_hash_table (tree ref, HOST_WIDE_INT access_size)
917 hash_table<asan_mem_ref_hasher> *ht = get_mem_ref_hash_table ();
919 asan_mem_ref r;
920 asan_mem_ref_init (&r, ref, access_size);
922 asan_mem_ref **slot = ht->find_slot (&r, INSERT);
923 if (*slot == NULL || (*slot)->access_size < access_size)
924 *slot = asan_mem_ref_new (ref, access_size);
927 /* Initialize shadow_ptr_types array. */
929 static void
930 asan_init_shadow_ptr_types (void)
932 asan_shadow_set = new_alias_set ();
933 shadow_ptr_types[0] = build_distinct_type_copy (signed_char_type_node);
934 TYPE_ALIAS_SET (shadow_ptr_types[0]) = asan_shadow_set;
935 shadow_ptr_types[0] = build_pointer_type (shadow_ptr_types[0]);
936 shadow_ptr_types[1] = build_distinct_type_copy (short_integer_type_node);
937 TYPE_ALIAS_SET (shadow_ptr_types[1]) = asan_shadow_set;
938 shadow_ptr_types[1] = build_pointer_type (shadow_ptr_types[1]);
939 initialize_sanitizer_builtins ();
942 /* Create ADDR_EXPR of STRING_CST with the PP pretty printer text. */
944 static tree
945 asan_pp_string (pretty_printer *pp)
947 const char *buf = pp_formatted_text (pp);
948 size_t len = strlen (buf);
949 tree ret = build_string (len + 1, buf);
950 TREE_TYPE (ret)
951 = build_array_type (TREE_TYPE (shadow_ptr_types[0]),
952 build_index_type (size_int (len)));
953 TREE_READONLY (ret) = 1;
954 TREE_STATIC (ret) = 1;
955 return build1 (ADDR_EXPR, shadow_ptr_types[0], ret);
958 /* Return a CONST_INT representing 4 subsequent shadow memory bytes. */
960 static rtx
961 asan_shadow_cst (unsigned char shadow_bytes[4])
963 int i;
964 unsigned HOST_WIDE_INT val = 0;
965 gcc_assert (WORDS_BIG_ENDIAN == BYTES_BIG_ENDIAN);
966 for (i = 0; i < 4; i++)
967 val |= (unsigned HOST_WIDE_INT) shadow_bytes[BYTES_BIG_ENDIAN ? 3 - i : i]
968 << (BITS_PER_UNIT * i);
969 return gen_int_mode (val, SImode);
972 /* Clear shadow memory at SHADOW_MEM, LEN bytes. Can't call a library call here
973 though. */
975 static void
976 asan_clear_shadow (rtx shadow_mem, HOST_WIDE_INT len)
978 rtx_insn *insn, *insns, *jump;
979 rtx_code_label *top_label;
980 rtx end, addr, tmp;
982 start_sequence ();
983 clear_storage (shadow_mem, GEN_INT (len), BLOCK_OP_NORMAL);
984 insns = get_insns ();
985 end_sequence ();
986 for (insn = insns; insn; insn = NEXT_INSN (insn))
987 if (CALL_P (insn))
988 break;
989 if (insn == NULL_RTX)
991 emit_insn (insns);
992 return;
995 gcc_assert ((len & 3) == 0);
996 top_label = gen_label_rtx ();
997 addr = copy_to_mode_reg (Pmode, XEXP (shadow_mem, 0));
998 shadow_mem = adjust_automodify_address (shadow_mem, SImode, addr, 0);
999 end = force_reg (Pmode, plus_constant (Pmode, addr, len));
1000 emit_label (top_label);
1002 emit_move_insn (shadow_mem, const0_rtx);
1003 tmp = expand_simple_binop (Pmode, PLUS, addr, gen_int_mode (4, Pmode), addr,
1004 true, OPTAB_LIB_WIDEN);
1005 if (tmp != addr)
1006 emit_move_insn (addr, tmp);
1007 emit_cmp_and_jump_insns (addr, end, LT, NULL_RTX, Pmode, true, top_label);
1008 jump = get_last_insn ();
1009 gcc_assert (JUMP_P (jump));
1010 add_int_reg_note (jump, REG_BR_PROB, REG_BR_PROB_BASE * 80 / 100);
1013 void
1014 asan_function_start (void)
1016 section *fnsec = function_section (current_function_decl);
1017 switch_to_section (fnsec);
1018 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LASANPC",
1019 current_function_funcdef_no);
1022 /* Insert code to protect stack vars. The prologue sequence should be emitted
1023 directly, epilogue sequence returned. BASE is the register holding the
1024 stack base, against which OFFSETS array offsets are relative to, OFFSETS
1025 array contains pairs of offsets in reverse order, always the end offset
1026 of some gap that needs protection followed by starting offset,
1027 and DECLS is an array of representative decls for each var partition.
1028 LENGTH is the length of the OFFSETS array, DECLS array is LENGTH / 2 - 1
1029 elements long (OFFSETS include gap before the first variable as well
1030 as gaps after each stack variable). PBASE is, if non-NULL, some pseudo
1031 register which stack vars DECL_RTLs are based on. Either BASE should be
1032 assigned to PBASE, when not doing use after return protection, or
1033 corresponding address based on __asan_stack_malloc* return value. */
1035 rtx_insn *
1036 asan_emit_stack_protection (rtx base, rtx pbase, unsigned int alignb,
1037 HOST_WIDE_INT *offsets, tree *decls, int length)
1039 rtx shadow_base, shadow_mem, ret, mem, orig_base;
1040 rtx_code_label *lab;
1041 rtx_insn *insns;
1042 char buf[30];
1043 unsigned char shadow_bytes[4];
1044 HOST_WIDE_INT base_offset = offsets[length - 1];
1045 HOST_WIDE_INT base_align_bias = 0, offset, prev_offset;
1046 HOST_WIDE_INT asan_frame_size = offsets[0] - base_offset;
1047 HOST_WIDE_INT last_offset, last_size;
1048 int l;
1049 unsigned char cur_shadow_byte = ASAN_STACK_MAGIC_LEFT;
1050 tree str_cst, decl, id;
1051 int use_after_return_class = -1;
1053 if (shadow_ptr_types[0] == NULL_TREE)
1054 asan_init_shadow_ptr_types ();
1056 /* First of all, prepare the description string. */
1057 pretty_printer asan_pp;
1059 pp_decimal_int (&asan_pp, length / 2 - 1);
1060 pp_space (&asan_pp);
1061 for (l = length - 2; l; l -= 2)
1063 tree decl = decls[l / 2 - 1];
1064 pp_wide_integer (&asan_pp, offsets[l] - base_offset);
1065 pp_space (&asan_pp);
1066 pp_wide_integer (&asan_pp, offsets[l - 1] - offsets[l]);
1067 pp_space (&asan_pp);
1068 if (DECL_P (decl) && DECL_NAME (decl))
1070 pp_decimal_int (&asan_pp, IDENTIFIER_LENGTH (DECL_NAME (decl)));
1071 pp_space (&asan_pp);
1072 pp_tree_identifier (&asan_pp, DECL_NAME (decl));
1074 else
1075 pp_string (&asan_pp, "9 <unknown>");
1076 pp_space (&asan_pp);
1078 str_cst = asan_pp_string (&asan_pp);
1080 /* Emit the prologue sequence. */
1081 if (asan_frame_size > 32 && asan_frame_size <= 65536 && pbase
1082 && ASAN_USE_AFTER_RETURN)
1084 use_after_return_class = floor_log2 (asan_frame_size - 1) - 5;
1085 /* __asan_stack_malloc_N guarantees alignment
1086 N < 6 ? (64 << N) : 4096 bytes. */
1087 if (alignb > (use_after_return_class < 6
1088 ? (64U << use_after_return_class) : 4096U))
1089 use_after_return_class = -1;
1090 else if (alignb > ASAN_RED_ZONE_SIZE && (asan_frame_size & (alignb - 1)))
1091 base_align_bias = ((asan_frame_size + alignb - 1)
1092 & ~(alignb - HOST_WIDE_INT_1)) - asan_frame_size;
1094 /* Align base if target is STRICT_ALIGNMENT. */
1095 if (STRICT_ALIGNMENT)
1096 base = expand_binop (Pmode, and_optab, base,
1097 gen_int_mode (-((GET_MODE_ALIGNMENT (SImode)
1098 << ASAN_SHADOW_SHIFT)
1099 / BITS_PER_UNIT), Pmode), NULL_RTX,
1100 1, OPTAB_DIRECT);
1102 if (use_after_return_class == -1 && pbase)
1103 emit_move_insn (pbase, base);
1105 base = expand_binop (Pmode, add_optab, base,
1106 gen_int_mode (base_offset - base_align_bias, Pmode),
1107 NULL_RTX, 1, OPTAB_DIRECT);
1108 orig_base = NULL_RTX;
1109 if (use_after_return_class != -1)
1111 if (asan_detect_stack_use_after_return == NULL_TREE)
1113 id = get_identifier ("__asan_option_detect_stack_use_after_return");
1114 decl = build_decl (BUILTINS_LOCATION, VAR_DECL, id,
1115 integer_type_node);
1116 SET_DECL_ASSEMBLER_NAME (decl, id);
1117 TREE_ADDRESSABLE (decl) = 1;
1118 DECL_ARTIFICIAL (decl) = 1;
1119 DECL_IGNORED_P (decl) = 1;
1120 DECL_EXTERNAL (decl) = 1;
1121 TREE_STATIC (decl) = 1;
1122 TREE_PUBLIC (decl) = 1;
1123 TREE_USED (decl) = 1;
1124 asan_detect_stack_use_after_return = decl;
1126 orig_base = gen_reg_rtx (Pmode);
1127 emit_move_insn (orig_base, base);
1128 ret = expand_normal (asan_detect_stack_use_after_return);
1129 lab = gen_label_rtx ();
1130 int very_likely = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
1131 emit_cmp_and_jump_insns (ret, const0_rtx, EQ, NULL_RTX,
1132 VOIDmode, 0, lab, very_likely);
1133 snprintf (buf, sizeof buf, "__asan_stack_malloc_%d",
1134 use_after_return_class);
1135 ret = init_one_libfunc (buf);
1136 rtx addr = convert_memory_address (ptr_mode, base);
1137 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode, 2,
1138 GEN_INT (asan_frame_size
1139 + base_align_bias),
1140 TYPE_MODE (pointer_sized_int_node),
1141 addr, ptr_mode);
1142 ret = convert_memory_address (Pmode, ret);
1143 emit_move_insn (base, ret);
1144 emit_label (lab);
1145 emit_move_insn (pbase, expand_binop (Pmode, add_optab, base,
1146 gen_int_mode (base_align_bias
1147 - base_offset, Pmode),
1148 NULL_RTX, 1, OPTAB_DIRECT));
1150 mem = gen_rtx_MEM (ptr_mode, base);
1151 mem = adjust_address (mem, VOIDmode, base_align_bias);
1152 emit_move_insn (mem, gen_int_mode (ASAN_STACK_FRAME_MAGIC, ptr_mode));
1153 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1154 emit_move_insn (mem, expand_normal (str_cst));
1155 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1156 ASM_GENERATE_INTERNAL_LABEL (buf, "LASANPC", current_function_funcdef_no);
1157 id = get_identifier (buf);
1158 decl = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1159 VAR_DECL, id, char_type_node);
1160 SET_DECL_ASSEMBLER_NAME (decl, id);
1161 TREE_ADDRESSABLE (decl) = 1;
1162 TREE_READONLY (decl) = 1;
1163 DECL_ARTIFICIAL (decl) = 1;
1164 DECL_IGNORED_P (decl) = 1;
1165 TREE_STATIC (decl) = 1;
1166 TREE_PUBLIC (decl) = 0;
1167 TREE_USED (decl) = 1;
1168 DECL_INITIAL (decl) = decl;
1169 TREE_ASM_WRITTEN (decl) = 1;
1170 TREE_ASM_WRITTEN (id) = 1;
1171 emit_move_insn (mem, expand_normal (build_fold_addr_expr (decl)));
1172 shadow_base = expand_binop (Pmode, lshr_optab, base,
1173 GEN_INT (ASAN_SHADOW_SHIFT),
1174 NULL_RTX, 1, OPTAB_DIRECT);
1175 shadow_base
1176 = plus_constant (Pmode, shadow_base,
1177 asan_shadow_offset ()
1178 + (base_align_bias >> ASAN_SHADOW_SHIFT));
1179 gcc_assert (asan_shadow_set != -1
1180 && (ASAN_RED_ZONE_SIZE >> ASAN_SHADOW_SHIFT) == 4);
1181 shadow_mem = gen_rtx_MEM (SImode, shadow_base);
1182 set_mem_alias_set (shadow_mem, asan_shadow_set);
1183 if (STRICT_ALIGNMENT)
1184 set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
1185 prev_offset = base_offset;
1186 for (l = length; l; l -= 2)
1188 if (l == 2)
1189 cur_shadow_byte = ASAN_STACK_MAGIC_RIGHT;
1190 offset = offsets[l - 1];
1191 if ((offset - base_offset) & (ASAN_RED_ZONE_SIZE - 1))
1193 int i;
1194 HOST_WIDE_INT aoff
1195 = base_offset + ((offset - base_offset)
1196 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1197 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1198 (aoff - prev_offset)
1199 >> ASAN_SHADOW_SHIFT);
1200 prev_offset = aoff;
1201 for (i = 0; i < 4; i++, aoff += (1 << ASAN_SHADOW_SHIFT))
1202 if (aoff < offset)
1204 if (aoff < offset - (1 << ASAN_SHADOW_SHIFT) + 1)
1205 shadow_bytes[i] = 0;
1206 else
1207 shadow_bytes[i] = offset - aoff;
1209 else
1210 shadow_bytes[i] = ASAN_STACK_MAGIC_PARTIAL;
1211 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1212 offset = aoff;
1214 while (offset <= offsets[l - 2] - ASAN_RED_ZONE_SIZE)
1216 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1217 (offset - prev_offset)
1218 >> ASAN_SHADOW_SHIFT);
1219 prev_offset = offset;
1220 memset (shadow_bytes, cur_shadow_byte, 4);
1221 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1222 offset += ASAN_RED_ZONE_SIZE;
1224 cur_shadow_byte = ASAN_STACK_MAGIC_MIDDLE;
1226 do_pending_stack_adjust ();
1228 /* Construct epilogue sequence. */
1229 start_sequence ();
1231 lab = NULL;
1232 if (use_after_return_class != -1)
1234 rtx_code_label *lab2 = gen_label_rtx ();
1235 char c = (char) ASAN_STACK_MAGIC_USE_AFTER_RET;
1236 int very_likely = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
1237 emit_cmp_and_jump_insns (orig_base, base, EQ, NULL_RTX,
1238 VOIDmode, 0, lab2, very_likely);
1239 shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1240 set_mem_alias_set (shadow_mem, asan_shadow_set);
1241 mem = gen_rtx_MEM (ptr_mode, base);
1242 mem = adjust_address (mem, VOIDmode, base_align_bias);
1243 emit_move_insn (mem, gen_int_mode (ASAN_STACK_RETIRED_MAGIC, ptr_mode));
1244 unsigned HOST_WIDE_INT sz = asan_frame_size >> ASAN_SHADOW_SHIFT;
1245 if (use_after_return_class < 5
1246 && can_store_by_pieces (sz, builtin_memset_read_str, &c,
1247 BITS_PER_UNIT, true))
1248 store_by_pieces (shadow_mem, sz, builtin_memset_read_str, &c,
1249 BITS_PER_UNIT, true, 0);
1250 else if (use_after_return_class >= 5
1251 || !set_storage_via_setmem (shadow_mem,
1252 GEN_INT (sz),
1253 gen_int_mode (c, QImode),
1254 BITS_PER_UNIT, BITS_PER_UNIT,
1255 -1, sz, sz, sz))
1257 snprintf (buf, sizeof buf, "__asan_stack_free_%d",
1258 use_after_return_class);
1259 ret = init_one_libfunc (buf);
1260 rtx addr = convert_memory_address (ptr_mode, base);
1261 rtx orig_addr = convert_memory_address (ptr_mode, orig_base);
1262 emit_library_call (ret, LCT_NORMAL, ptr_mode, 3, addr, ptr_mode,
1263 GEN_INT (asan_frame_size + base_align_bias),
1264 TYPE_MODE (pointer_sized_int_node),
1265 orig_addr, ptr_mode);
1267 lab = gen_label_rtx ();
1268 emit_jump (lab);
1269 emit_label (lab2);
1272 shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1273 set_mem_alias_set (shadow_mem, asan_shadow_set);
1275 if (STRICT_ALIGNMENT)
1276 set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
1278 prev_offset = base_offset;
1279 last_offset = base_offset;
1280 last_size = 0;
1281 for (l = length; l; l -= 2)
1283 offset = base_offset + ((offsets[l - 1] - base_offset)
1284 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1285 if (last_offset + last_size != offset)
1287 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1288 (last_offset - prev_offset)
1289 >> ASAN_SHADOW_SHIFT);
1290 prev_offset = last_offset;
1291 asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
1292 last_offset = offset;
1293 last_size = 0;
1295 last_size += base_offset + ((offsets[l - 2] - base_offset)
1296 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1))
1297 - offset;
1299 if (last_size)
1301 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1302 (last_offset - prev_offset)
1303 >> ASAN_SHADOW_SHIFT);
1304 asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
1307 do_pending_stack_adjust ();
1308 if (lab)
1309 emit_label (lab);
1311 insns = get_insns ();
1312 end_sequence ();
1313 return insns;
1316 /* Return true if DECL, a global var, might be overridden and needs
1317 therefore a local alias. */
1319 static bool
1320 asan_needs_local_alias (tree decl)
1322 return DECL_WEAK (decl) || !targetm.binds_local_p (decl);
1325 /* Return true if DECL is a VAR_DECL that should be protected
1326 by Address Sanitizer, by appending a red zone with protected
1327 shadow memory after it and aligning it to at least
1328 ASAN_RED_ZONE_SIZE bytes. */
1330 bool
1331 asan_protect_global (tree decl)
1333 if (!ASAN_GLOBALS)
1334 return false;
1336 rtx rtl, symbol;
1338 if (TREE_CODE (decl) == STRING_CST)
1340 /* Instrument all STRING_CSTs except those created
1341 by asan_pp_string here. */
1342 if (shadow_ptr_types[0] != NULL_TREE
1343 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1344 && TREE_TYPE (TREE_TYPE (decl)) == TREE_TYPE (shadow_ptr_types[0]))
1345 return false;
1346 return true;
1348 if (TREE_CODE (decl) != VAR_DECL
1349 /* TLS vars aren't statically protectable. */
1350 || DECL_THREAD_LOCAL_P (decl)
1351 /* Externs will be protected elsewhere. */
1352 || DECL_EXTERNAL (decl)
1353 || !DECL_RTL_SET_P (decl)
1354 /* Comdat vars pose an ABI problem, we can't know if
1355 the var that is selected by the linker will have
1356 padding or not. */
1357 || DECL_ONE_ONLY (decl)
1358 /* Similarly for common vars. People can use -fno-common.
1359 Note: Linux kernel is built with -fno-common, so we do instrument
1360 globals there even if it is C. */
1361 || (DECL_COMMON (decl) && TREE_PUBLIC (decl))
1362 /* Don't protect if using user section, often vars placed
1363 into user section from multiple TUs are then assumed
1364 to be an array of such vars, putting padding in there
1365 breaks this assumption. */
1366 || (DECL_SECTION_NAME (decl) != NULL
1367 && !symtab_node::get (decl)->implicit_section)
1368 || DECL_SIZE (decl) == 0
1369 || ASAN_RED_ZONE_SIZE * BITS_PER_UNIT > MAX_OFILE_ALIGNMENT
1370 || !valid_constant_size_p (DECL_SIZE_UNIT (decl))
1371 || DECL_ALIGN_UNIT (decl) > 2 * ASAN_RED_ZONE_SIZE
1372 || TREE_TYPE (decl) == ubsan_get_source_location_type ())
1373 return false;
1375 rtl = DECL_RTL (decl);
1376 if (!MEM_P (rtl) || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF)
1377 return false;
1378 symbol = XEXP (rtl, 0);
1380 if (CONSTANT_POOL_ADDRESS_P (symbol)
1381 || TREE_CONSTANT_POOL_ADDRESS_P (symbol))
1382 return false;
1384 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
1385 return false;
1387 #ifndef ASM_OUTPUT_DEF
1388 if (asan_needs_local_alias (decl))
1389 return false;
1390 #endif
1392 return true;
1395 /* Construct a function tree for __asan_report_{load,store}{1,2,4,8,16,_n}.
1396 IS_STORE is either 1 (for a store) or 0 (for a load). */
1398 static tree
1399 report_error_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
1400 int *nargs)
1402 static enum built_in_function report[2][2][6]
1403 = { { { BUILT_IN_ASAN_REPORT_LOAD1, BUILT_IN_ASAN_REPORT_LOAD2,
1404 BUILT_IN_ASAN_REPORT_LOAD4, BUILT_IN_ASAN_REPORT_LOAD8,
1405 BUILT_IN_ASAN_REPORT_LOAD16, BUILT_IN_ASAN_REPORT_LOAD_N },
1406 { BUILT_IN_ASAN_REPORT_STORE1, BUILT_IN_ASAN_REPORT_STORE2,
1407 BUILT_IN_ASAN_REPORT_STORE4, BUILT_IN_ASAN_REPORT_STORE8,
1408 BUILT_IN_ASAN_REPORT_STORE16, BUILT_IN_ASAN_REPORT_STORE_N } },
1409 { { BUILT_IN_ASAN_REPORT_LOAD1_NOABORT,
1410 BUILT_IN_ASAN_REPORT_LOAD2_NOABORT,
1411 BUILT_IN_ASAN_REPORT_LOAD4_NOABORT,
1412 BUILT_IN_ASAN_REPORT_LOAD8_NOABORT,
1413 BUILT_IN_ASAN_REPORT_LOAD16_NOABORT,
1414 BUILT_IN_ASAN_REPORT_LOAD_N_NOABORT },
1415 { BUILT_IN_ASAN_REPORT_STORE1_NOABORT,
1416 BUILT_IN_ASAN_REPORT_STORE2_NOABORT,
1417 BUILT_IN_ASAN_REPORT_STORE4_NOABORT,
1418 BUILT_IN_ASAN_REPORT_STORE8_NOABORT,
1419 BUILT_IN_ASAN_REPORT_STORE16_NOABORT,
1420 BUILT_IN_ASAN_REPORT_STORE_N_NOABORT } } };
1421 if (size_in_bytes == -1)
1423 *nargs = 2;
1424 return builtin_decl_implicit (report[recover_p][is_store][5]);
1426 *nargs = 1;
1427 int size_log2 = exact_log2 (size_in_bytes);
1428 return builtin_decl_implicit (report[recover_p][is_store][size_log2]);
1431 /* Construct a function tree for __asan_{load,store}{1,2,4,8,16,_n}.
1432 IS_STORE is either 1 (for a store) or 0 (for a load). */
1434 static tree
1435 check_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
1436 int *nargs)
1438 static enum built_in_function check[2][2][6]
1439 = { { { BUILT_IN_ASAN_LOAD1, BUILT_IN_ASAN_LOAD2,
1440 BUILT_IN_ASAN_LOAD4, BUILT_IN_ASAN_LOAD8,
1441 BUILT_IN_ASAN_LOAD16, BUILT_IN_ASAN_LOADN },
1442 { BUILT_IN_ASAN_STORE1, BUILT_IN_ASAN_STORE2,
1443 BUILT_IN_ASAN_STORE4, BUILT_IN_ASAN_STORE8,
1444 BUILT_IN_ASAN_STORE16, BUILT_IN_ASAN_STOREN } },
1445 { { BUILT_IN_ASAN_LOAD1_NOABORT,
1446 BUILT_IN_ASAN_LOAD2_NOABORT,
1447 BUILT_IN_ASAN_LOAD4_NOABORT,
1448 BUILT_IN_ASAN_LOAD8_NOABORT,
1449 BUILT_IN_ASAN_LOAD16_NOABORT,
1450 BUILT_IN_ASAN_LOADN_NOABORT },
1451 { BUILT_IN_ASAN_STORE1_NOABORT,
1452 BUILT_IN_ASAN_STORE2_NOABORT,
1453 BUILT_IN_ASAN_STORE4_NOABORT,
1454 BUILT_IN_ASAN_STORE8_NOABORT,
1455 BUILT_IN_ASAN_STORE16_NOABORT,
1456 BUILT_IN_ASAN_STOREN_NOABORT } } };
1457 if (size_in_bytes == -1)
1459 *nargs = 2;
1460 return builtin_decl_implicit (check[recover_p][is_store][5]);
1462 *nargs = 1;
1463 int size_log2 = exact_log2 (size_in_bytes);
1464 return builtin_decl_implicit (check[recover_p][is_store][size_log2]);
1467 /* Split the current basic block and create a condition statement
1468 insertion point right before or after the statement pointed to by
1469 ITER. Return an iterator to the point at which the caller might
1470 safely insert the condition statement.
1472 THEN_BLOCK must be set to the address of an uninitialized instance
1473 of basic_block. The function will then set *THEN_BLOCK to the
1474 'then block' of the condition statement to be inserted by the
1475 caller.
1477 If CREATE_THEN_FALLTHRU_EDGE is false, no edge will be created from
1478 *THEN_BLOCK to *FALLTHROUGH_BLOCK.
1480 Similarly, the function will set *FALLTRHOUGH_BLOCK to the 'else
1481 block' of the condition statement to be inserted by the caller.
1483 Note that *FALLTHROUGH_BLOCK is a new block that contains the
1484 statements starting from *ITER, and *THEN_BLOCK is a new empty
1485 block.
1487 *ITER is adjusted to point to always point to the first statement
1488 of the basic block * FALLTHROUGH_BLOCK. That statement is the
1489 same as what ITER was pointing to prior to calling this function,
1490 if BEFORE_P is true; otherwise, it is its following statement. */
1492 gimple_stmt_iterator
1493 create_cond_insert_point (gimple_stmt_iterator *iter,
1494 bool before_p,
1495 bool then_more_likely_p,
1496 bool create_then_fallthru_edge,
1497 basic_block *then_block,
1498 basic_block *fallthrough_block)
1500 gimple_stmt_iterator gsi = *iter;
1502 if (!gsi_end_p (gsi) && before_p)
1503 gsi_prev (&gsi);
1505 basic_block cur_bb = gsi_bb (*iter);
1507 edge e = split_block (cur_bb, gsi_stmt (gsi));
1509 /* Get a hold on the 'condition block', the 'then block' and the
1510 'else block'. */
1511 basic_block cond_bb = e->src;
1512 basic_block fallthru_bb = e->dest;
1513 basic_block then_bb = create_empty_bb (cond_bb);
1514 if (current_loops)
1516 add_bb_to_loop (then_bb, cond_bb->loop_father);
1517 loops_state_set (LOOPS_NEED_FIXUP);
1520 /* Set up the newly created 'then block'. */
1521 e = make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
1522 int fallthrough_probability
1523 = then_more_likely_p
1524 ? PROB_VERY_UNLIKELY
1525 : PROB_ALWAYS - PROB_VERY_UNLIKELY;
1526 e->probability = PROB_ALWAYS - fallthrough_probability;
1527 if (create_then_fallthru_edge)
1528 make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU);
1530 /* Set up the fallthrough basic block. */
1531 e = find_edge (cond_bb, fallthru_bb);
1532 e->flags = EDGE_FALSE_VALUE;
1533 e->count = cond_bb->count;
1534 e->probability = fallthrough_probability;
1536 /* Update dominance info for the newly created then_bb; note that
1537 fallthru_bb's dominance info has already been updated by
1538 split_bock. */
1539 if (dom_info_available_p (CDI_DOMINATORS))
1540 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
1542 *then_block = then_bb;
1543 *fallthrough_block = fallthru_bb;
1544 *iter = gsi_start_bb (fallthru_bb);
1546 return gsi_last_bb (cond_bb);
1549 /* Insert an if condition followed by a 'then block' right before the
1550 statement pointed to by ITER. The fallthrough block -- which is the
1551 else block of the condition as well as the destination of the
1552 outcoming edge of the 'then block' -- starts with the statement
1553 pointed to by ITER.
1555 COND is the condition of the if.
1557 If THEN_MORE_LIKELY_P is true, the probability of the edge to the
1558 'then block' is higher than the probability of the edge to the
1559 fallthrough block.
1561 Upon completion of the function, *THEN_BB is set to the newly
1562 inserted 'then block' and similarly, *FALLTHROUGH_BB is set to the
1563 fallthrough block.
1565 *ITER is adjusted to still point to the same statement it was
1566 pointing to initially. */
1568 static void
1569 insert_if_then_before_iter (gcond *cond,
1570 gimple_stmt_iterator *iter,
1571 bool then_more_likely_p,
1572 basic_block *then_bb,
1573 basic_block *fallthrough_bb)
1575 gimple_stmt_iterator cond_insert_point =
1576 create_cond_insert_point (iter,
1577 /*before_p=*/true,
1578 then_more_likely_p,
1579 /*create_then_fallthru_edge=*/true,
1580 then_bb,
1581 fallthrough_bb);
1582 gsi_insert_after (&cond_insert_point, cond, GSI_NEW_STMT);
1585 /* Build
1586 (base_addr >> ASAN_SHADOW_SHIFT) + asan_shadow_offset (). */
1588 static tree
1589 build_shadow_mem_access (gimple_stmt_iterator *gsi, location_t location,
1590 tree base_addr, tree shadow_ptr_type)
1592 tree t, uintptr_type = TREE_TYPE (base_addr);
1593 tree shadow_type = TREE_TYPE (shadow_ptr_type);
1594 gimple g;
1596 t = build_int_cst (uintptr_type, ASAN_SHADOW_SHIFT);
1597 g = gimple_build_assign (make_ssa_name (uintptr_type), RSHIFT_EXPR,
1598 base_addr, t);
1599 gimple_set_location (g, location);
1600 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1602 t = build_int_cst (uintptr_type, asan_shadow_offset ());
1603 g = gimple_build_assign (make_ssa_name (uintptr_type), PLUS_EXPR,
1604 gimple_assign_lhs (g), t);
1605 gimple_set_location (g, location);
1606 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1608 g = gimple_build_assign (make_ssa_name (shadow_ptr_type), NOP_EXPR,
1609 gimple_assign_lhs (g));
1610 gimple_set_location (g, location);
1611 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1613 t = build2 (MEM_REF, shadow_type, gimple_assign_lhs (g),
1614 build_int_cst (shadow_ptr_type, 0));
1615 g = gimple_build_assign (make_ssa_name (shadow_type), MEM_REF, t);
1616 gimple_set_location (g, location);
1617 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1618 return gimple_assign_lhs (g);
1621 /* BASE can already be an SSA_NAME; in that case, do not create a
1622 new SSA_NAME for it. */
1624 static tree
1625 maybe_create_ssa_name (location_t loc, tree base, gimple_stmt_iterator *iter,
1626 bool before_p)
1628 if (TREE_CODE (base) == SSA_NAME)
1629 return base;
1630 gimple g = gimple_build_assign (make_ssa_name (TREE_TYPE (base)),
1631 TREE_CODE (base), base);
1632 gimple_set_location (g, loc);
1633 if (before_p)
1634 gsi_insert_before (iter, g, GSI_SAME_STMT);
1635 else
1636 gsi_insert_after (iter, g, GSI_NEW_STMT);
1637 return gimple_assign_lhs (g);
1640 /* LEN can already have necessary size and precision;
1641 in that case, do not create a new variable. */
1643 tree
1644 maybe_cast_to_ptrmode (location_t loc, tree len, gimple_stmt_iterator *iter,
1645 bool before_p)
1647 if (ptrofftype_p (len))
1648 return len;
1649 gimple g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
1650 NOP_EXPR, len);
1651 gimple_set_location (g, loc);
1652 if (before_p)
1653 gsi_insert_before (iter, g, GSI_SAME_STMT);
1654 else
1655 gsi_insert_after (iter, g, GSI_NEW_STMT);
1656 return gimple_assign_lhs (g);
1659 /* Instrument the memory access instruction BASE. Insert new
1660 statements before or after ITER.
1662 Note that the memory access represented by BASE can be either an
1663 SSA_NAME, or a non-SSA expression. LOCATION is the source code
1664 location. IS_STORE is TRUE for a store, FALSE for a load.
1665 BEFORE_P is TRUE for inserting the instrumentation code before
1666 ITER, FALSE for inserting it after ITER. IS_SCALAR_ACCESS is TRUE
1667 for a scalar memory access and FALSE for memory region access.
1668 NON_ZERO_P is TRUE if memory region is guaranteed to have non-zero
1669 length. ALIGN tells alignment of accessed memory object.
1671 START_INSTRUMENTED and END_INSTRUMENTED are TRUE if start/end of
1672 memory region have already been instrumented.
1674 If BEFORE_P is TRUE, *ITER is arranged to still point to the
1675 statement it was pointing to prior to calling this function,
1676 otherwise, it points to the statement logically following it. */
1678 static void
1679 build_check_stmt (location_t loc, tree base, tree len,
1680 HOST_WIDE_INT size_in_bytes, gimple_stmt_iterator *iter,
1681 bool is_non_zero_len, bool before_p, bool is_store,
1682 bool is_scalar_access, unsigned int align = 0)
1684 gimple_stmt_iterator gsi = *iter;
1685 gimple g;
1687 gcc_assert (!(size_in_bytes > 0 && !is_non_zero_len));
1689 gsi = *iter;
1691 base = unshare_expr (base);
1692 base = maybe_create_ssa_name (loc, base, &gsi, before_p);
1694 if (len)
1696 len = unshare_expr (len);
1697 len = maybe_cast_to_ptrmode (loc, len, iter, before_p);
1699 else
1701 gcc_assert (size_in_bytes != -1);
1702 len = build_int_cst (pointer_sized_int_node, size_in_bytes);
1705 if (size_in_bytes > 1)
1707 if ((size_in_bytes & (size_in_bytes - 1)) != 0
1708 || size_in_bytes > 16)
1709 is_scalar_access = false;
1710 else if (align && align < size_in_bytes * BITS_PER_UNIT)
1712 /* On non-strict alignment targets, if
1713 16-byte access is just 8-byte aligned,
1714 this will result in misaligned shadow
1715 memory 2 byte load, but otherwise can
1716 be handled using one read. */
1717 if (size_in_bytes != 16
1718 || STRICT_ALIGNMENT
1719 || align < 8 * BITS_PER_UNIT)
1720 is_scalar_access = false;
1724 HOST_WIDE_INT flags = 0;
1725 if (is_store)
1726 flags |= ASAN_CHECK_STORE;
1727 if (is_non_zero_len)
1728 flags |= ASAN_CHECK_NON_ZERO_LEN;
1729 if (is_scalar_access)
1730 flags |= ASAN_CHECK_SCALAR_ACCESS;
1732 g = gimple_build_call_internal (IFN_ASAN_CHECK, 4,
1733 build_int_cst (integer_type_node, flags),
1734 base, len,
1735 build_int_cst (integer_type_node,
1736 align / BITS_PER_UNIT));
1737 gimple_set_location (g, loc);
1738 if (before_p)
1739 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
1740 else
1742 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1743 gsi_next (&gsi);
1744 *iter = gsi;
1748 /* If T represents a memory access, add instrumentation code before ITER.
1749 LOCATION is source code location.
1750 IS_STORE is either TRUE (for a store) or FALSE (for a load). */
1752 static void
1753 instrument_derefs (gimple_stmt_iterator *iter, tree t,
1754 location_t location, bool is_store)
1756 if (is_store && !ASAN_INSTRUMENT_WRITES)
1757 return;
1758 if (!is_store && !ASAN_INSTRUMENT_READS)
1759 return;
1761 tree type, base;
1762 HOST_WIDE_INT size_in_bytes;
1764 type = TREE_TYPE (t);
1765 switch (TREE_CODE (t))
1767 case ARRAY_REF:
1768 case COMPONENT_REF:
1769 case INDIRECT_REF:
1770 case MEM_REF:
1771 case VAR_DECL:
1772 case BIT_FIELD_REF:
1773 break;
1774 /* FALLTHRU */
1775 default:
1776 return;
1779 size_in_bytes = int_size_in_bytes (type);
1780 if (size_in_bytes <= 0)
1781 return;
1783 HOST_WIDE_INT bitsize, bitpos;
1784 tree offset;
1785 machine_mode mode;
1786 int volatilep = 0, unsignedp = 0;
1787 tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset,
1788 &mode, &unsignedp, &volatilep, false);
1790 if (TREE_CODE (t) == COMPONENT_REF
1791 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)) != NULL_TREE)
1793 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1));
1794 instrument_derefs (iter, build3 (COMPONENT_REF, TREE_TYPE (repr),
1795 TREE_OPERAND (t, 0), repr,
1796 NULL_TREE), location, is_store);
1797 return;
1800 if (bitpos % BITS_PER_UNIT
1801 || bitsize != size_in_bytes * BITS_PER_UNIT)
1802 return;
1804 if (TREE_CODE (inner) == VAR_DECL
1805 && offset == NULL_TREE
1806 && bitpos >= 0
1807 && DECL_SIZE (inner)
1808 && tree_fits_shwi_p (DECL_SIZE (inner))
1809 && bitpos + bitsize <= tree_to_shwi (DECL_SIZE (inner)))
1811 if (DECL_THREAD_LOCAL_P (inner))
1812 return;
1813 if (!TREE_STATIC (inner))
1815 /* Automatic vars in the current function will be always
1816 accessible. */
1817 if (decl_function_context (inner) == current_function_decl)
1818 return;
1820 /* Always instrument external vars, they might be dynamically
1821 initialized. */
1822 else if (!DECL_EXTERNAL (inner))
1824 /* For static vars if they are known not to be dynamically
1825 initialized, they will be always accessible. */
1826 varpool_node *vnode = varpool_node::get (inner);
1827 if (vnode && !vnode->dynamically_initialized)
1828 return;
1832 base = build_fold_addr_expr (t);
1833 if (!has_mem_ref_been_instrumented (base, size_in_bytes))
1835 unsigned int align = get_object_alignment (t);
1836 build_check_stmt (location, base, NULL_TREE, size_in_bytes, iter,
1837 /*is_non_zero_len*/size_in_bytes > 0, /*before_p=*/true,
1838 is_store, /*is_scalar_access*/true, align);
1839 update_mem_ref_hash_table (base, size_in_bytes);
1840 update_mem_ref_hash_table (t, size_in_bytes);
1845 /* Insert a memory reference into the hash table if access length
1846 can be determined in compile time. */
1848 static void
1849 maybe_update_mem_ref_hash_table (tree base, tree len)
1851 if (!POINTER_TYPE_P (TREE_TYPE (base))
1852 || !INTEGRAL_TYPE_P (TREE_TYPE (len)))
1853 return;
1855 HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
1857 if (size_in_bytes != -1)
1858 update_mem_ref_hash_table (base, size_in_bytes);
1861 /* Instrument an access to a contiguous memory region that starts at
1862 the address pointed to by BASE, over a length of LEN (expressed in
1863 the sizeof (*BASE) bytes). ITER points to the instruction before
1864 which the instrumentation instructions must be inserted. LOCATION
1865 is the source location that the instrumentation instructions must
1866 have. If IS_STORE is true, then the memory access is a store;
1867 otherwise, it's a load. */
1869 static void
1870 instrument_mem_region_access (tree base, tree len,
1871 gimple_stmt_iterator *iter,
1872 location_t location, bool is_store)
1874 if (!POINTER_TYPE_P (TREE_TYPE (base))
1875 || !INTEGRAL_TYPE_P (TREE_TYPE (len))
1876 || integer_zerop (len))
1877 return;
1879 HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
1881 if ((size_in_bytes == -1)
1882 || !has_mem_ref_been_instrumented (base, size_in_bytes))
1884 build_check_stmt (location, base, len, size_in_bytes, iter,
1885 /*is_non_zero_len*/size_in_bytes > 0, /*before_p*/true,
1886 is_store, /*is_scalar_access*/false, /*align*/0);
1889 maybe_update_mem_ref_hash_table (base, len);
1890 *iter = gsi_for_stmt (gsi_stmt (*iter));
1893 /* Instrument the call to a built-in memory access function that is
1894 pointed to by the iterator ITER.
1896 Upon completion, return TRUE iff *ITER has been advanced to the
1897 statement following the one it was originally pointing to. */
1899 static bool
1900 instrument_builtin_call (gimple_stmt_iterator *iter)
1902 if (!ASAN_MEMINTRIN)
1903 return false;
1905 bool iter_advanced_p = false;
1906 gcall *call = as_a <gcall *> (gsi_stmt (*iter));
1908 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
1910 location_t loc = gimple_location (call);
1912 asan_mem_ref src0, src1, dest;
1913 asan_mem_ref_init (&src0, NULL, 1);
1914 asan_mem_ref_init (&src1, NULL, 1);
1915 asan_mem_ref_init (&dest, NULL, 1);
1917 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
1918 bool src0_is_store = false, src1_is_store = false, dest_is_store = false,
1919 dest_is_deref = false, intercepted_p = true;
1921 if (get_mem_refs_of_builtin_call (call,
1922 &src0, &src0_len, &src0_is_store,
1923 &src1, &src1_len, &src1_is_store,
1924 &dest, &dest_len, &dest_is_store,
1925 &dest_is_deref, &intercepted_p))
1927 if (dest_is_deref)
1929 instrument_derefs (iter, dest.start, loc, dest_is_store);
1930 gsi_next (iter);
1931 iter_advanced_p = true;
1933 else if (!intercepted_p
1934 && (src0_len || src1_len || dest_len))
1936 if (src0.start != NULL_TREE)
1937 instrument_mem_region_access (src0.start, src0_len,
1938 iter, loc, /*is_store=*/false);
1939 if (src1.start != NULL_TREE)
1940 instrument_mem_region_access (src1.start, src1_len,
1941 iter, loc, /*is_store=*/false);
1942 if (dest.start != NULL_TREE)
1943 instrument_mem_region_access (dest.start, dest_len,
1944 iter, loc, /*is_store=*/true);
1946 *iter = gsi_for_stmt (call);
1947 gsi_next (iter);
1948 iter_advanced_p = true;
1950 else
1952 if (src0.start != NULL_TREE)
1953 maybe_update_mem_ref_hash_table (src0.start, src0_len);
1954 if (src1.start != NULL_TREE)
1955 maybe_update_mem_ref_hash_table (src1.start, src1_len);
1956 if (dest.start != NULL_TREE)
1957 maybe_update_mem_ref_hash_table (dest.start, dest_len);
1960 return iter_advanced_p;
1963 /* Instrument the assignment statement ITER if it is subject to
1964 instrumentation. Return TRUE iff instrumentation actually
1965 happened. In that case, the iterator ITER is advanced to the next
1966 logical expression following the one initially pointed to by ITER,
1967 and the relevant memory reference that which access has been
1968 instrumented is added to the memory references hash table. */
1970 static bool
1971 maybe_instrument_assignment (gimple_stmt_iterator *iter)
1973 gimple s = gsi_stmt (*iter);
1975 gcc_assert (gimple_assign_single_p (s));
1977 tree ref_expr = NULL_TREE;
1978 bool is_store, is_instrumented = false;
1980 if (gimple_store_p (s))
1982 ref_expr = gimple_assign_lhs (s);
1983 is_store = true;
1984 instrument_derefs (iter, ref_expr,
1985 gimple_location (s),
1986 is_store);
1987 is_instrumented = true;
1990 if (gimple_assign_load_p (s))
1992 ref_expr = gimple_assign_rhs1 (s);
1993 is_store = false;
1994 instrument_derefs (iter, ref_expr,
1995 gimple_location (s),
1996 is_store);
1997 is_instrumented = true;
2000 if (is_instrumented)
2001 gsi_next (iter);
2003 return is_instrumented;
2006 /* Instrument the function call pointed to by the iterator ITER, if it
2007 is subject to instrumentation. At the moment, the only function
2008 calls that are instrumented are some built-in functions that access
2009 memory. Look at instrument_builtin_call to learn more.
2011 Upon completion return TRUE iff *ITER was advanced to the statement
2012 following the one it was originally pointing to. */
2014 static bool
2015 maybe_instrument_call (gimple_stmt_iterator *iter)
2017 gimple stmt = gsi_stmt (*iter);
2018 bool is_builtin = gimple_call_builtin_p (stmt, BUILT_IN_NORMAL);
2020 if (is_builtin && instrument_builtin_call (iter))
2021 return true;
2023 if (gimple_call_noreturn_p (stmt))
2025 if (is_builtin)
2027 tree callee = gimple_call_fndecl (stmt);
2028 switch (DECL_FUNCTION_CODE (callee))
2030 case BUILT_IN_UNREACHABLE:
2031 case BUILT_IN_TRAP:
2032 /* Don't instrument these. */
2033 return false;
2034 default:
2035 break;
2038 tree decl = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN);
2039 gimple g = gimple_build_call (decl, 0);
2040 gimple_set_location (g, gimple_location (stmt));
2041 gsi_insert_before (iter, g, GSI_SAME_STMT);
2043 return false;
2046 /* Walk each instruction of all basic block and instrument those that
2047 represent memory references: loads, stores, or function calls.
2048 In a given basic block, this function avoids instrumenting memory
2049 references that have already been instrumented. */
2051 static void
2052 transform_statements (void)
2054 basic_block bb, last_bb = NULL;
2055 gimple_stmt_iterator i;
2056 int saved_last_basic_block = last_basic_block_for_fn (cfun);
2058 FOR_EACH_BB_FN (bb, cfun)
2060 basic_block prev_bb = bb;
2062 if (bb->index >= saved_last_basic_block) continue;
2064 /* Flush the mem ref hash table, if current bb doesn't have
2065 exactly one predecessor, or if that predecessor (skipping
2066 over asan created basic blocks) isn't the last processed
2067 basic block. Thus we effectively flush on extended basic
2068 block boundaries. */
2069 while (single_pred_p (prev_bb))
2071 prev_bb = single_pred (prev_bb);
2072 if (prev_bb->index < saved_last_basic_block)
2073 break;
2075 if (prev_bb != last_bb)
2076 empty_mem_ref_hash_table ();
2077 last_bb = bb;
2079 for (i = gsi_start_bb (bb); !gsi_end_p (i);)
2081 gimple s = gsi_stmt (i);
2083 if (has_stmt_been_instrumented_p (s))
2084 gsi_next (&i);
2085 else if (gimple_assign_single_p (s)
2086 && !gimple_clobber_p (s)
2087 && maybe_instrument_assignment (&i))
2088 /* Nothing to do as maybe_instrument_assignment advanced
2089 the iterator I. */;
2090 else if (is_gimple_call (s) && maybe_instrument_call (&i))
2091 /* Nothing to do as maybe_instrument_call
2092 advanced the iterator I. */;
2093 else
2095 /* No instrumentation happened.
2097 If the current instruction is a function call that
2098 might free something, let's forget about the memory
2099 references that got instrumented. Otherwise we might
2100 miss some instrumentation opportunities. */
2101 if (is_gimple_call (s) && !nonfreeing_call_p (s))
2102 empty_mem_ref_hash_table ();
2104 gsi_next (&i);
2108 free_mem_ref_resources ();
2111 /* Build
2112 __asan_before_dynamic_init (module_name)
2114 __asan_after_dynamic_init ()
2115 call. */
2117 tree
2118 asan_dynamic_init_call (bool after_p)
2120 tree fn = builtin_decl_implicit (after_p
2121 ? BUILT_IN_ASAN_AFTER_DYNAMIC_INIT
2122 : BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT);
2123 tree module_name_cst = NULL_TREE;
2124 if (!after_p)
2126 pretty_printer module_name_pp;
2127 pp_string (&module_name_pp, main_input_filename);
2129 if (shadow_ptr_types[0] == NULL_TREE)
2130 asan_init_shadow_ptr_types ();
2131 module_name_cst = asan_pp_string (&module_name_pp);
2132 module_name_cst = fold_convert (const_ptr_type_node,
2133 module_name_cst);
2136 return build_call_expr (fn, after_p ? 0 : 1, module_name_cst);
2139 /* Build
2140 struct __asan_global
2142 const void *__beg;
2143 uptr __size;
2144 uptr __size_with_redzone;
2145 const void *__name;
2146 const void *__module_name;
2147 uptr __has_dynamic_init;
2148 __asan_global_source_location *__location;
2149 } type. */
2151 static tree
2152 asan_global_struct (void)
2154 static const char *field_names[7]
2155 = { "__beg", "__size", "__size_with_redzone",
2156 "__name", "__module_name", "__has_dynamic_init", "__location"};
2157 tree fields[7], ret;
2158 int i;
2160 ret = make_node (RECORD_TYPE);
2161 for (i = 0; i < 7; i++)
2163 fields[i]
2164 = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
2165 get_identifier (field_names[i]),
2166 (i == 0 || i == 3) ? const_ptr_type_node
2167 : pointer_sized_int_node);
2168 DECL_CONTEXT (fields[i]) = ret;
2169 if (i)
2170 DECL_CHAIN (fields[i - 1]) = fields[i];
2172 tree type_decl = build_decl (input_location, TYPE_DECL,
2173 get_identifier ("__asan_global"), ret);
2174 DECL_IGNORED_P (type_decl) = 1;
2175 DECL_ARTIFICIAL (type_decl) = 1;
2176 TYPE_FIELDS (ret) = fields[0];
2177 TYPE_NAME (ret) = type_decl;
2178 TYPE_STUB_DECL (ret) = type_decl;
2179 layout_type (ret);
2180 return ret;
2183 /* Append description of a single global DECL into vector V.
2184 TYPE is __asan_global struct type as returned by asan_global_struct. */
2186 static void
2187 asan_add_global (tree decl, tree type, vec<constructor_elt, va_gc> *v)
2189 tree init, uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
2190 unsigned HOST_WIDE_INT size;
2191 tree str_cst, module_name_cst, refdecl = decl;
2192 vec<constructor_elt, va_gc> *vinner = NULL;
2194 pretty_printer asan_pp, module_name_pp;
2196 if (DECL_NAME (decl))
2197 pp_tree_identifier (&asan_pp, DECL_NAME (decl));
2198 else
2199 pp_string (&asan_pp, "<unknown>");
2200 str_cst = asan_pp_string (&asan_pp);
2202 pp_string (&module_name_pp, main_input_filename);
2203 module_name_cst = asan_pp_string (&module_name_pp);
2205 if (asan_needs_local_alias (decl))
2207 char buf[20];
2208 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", vec_safe_length (v) + 1);
2209 refdecl = build_decl (DECL_SOURCE_LOCATION (decl),
2210 VAR_DECL, get_identifier (buf), TREE_TYPE (decl));
2211 TREE_ADDRESSABLE (refdecl) = TREE_ADDRESSABLE (decl);
2212 TREE_READONLY (refdecl) = TREE_READONLY (decl);
2213 TREE_THIS_VOLATILE (refdecl) = TREE_THIS_VOLATILE (decl);
2214 DECL_GIMPLE_REG_P (refdecl) = DECL_GIMPLE_REG_P (decl);
2215 DECL_ARTIFICIAL (refdecl) = DECL_ARTIFICIAL (decl);
2216 DECL_IGNORED_P (refdecl) = DECL_IGNORED_P (decl);
2217 TREE_STATIC (refdecl) = 1;
2218 TREE_PUBLIC (refdecl) = 0;
2219 TREE_USED (refdecl) = 1;
2220 assemble_alias (refdecl, DECL_ASSEMBLER_NAME (decl));
2223 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2224 fold_convert (const_ptr_type_node,
2225 build_fold_addr_expr (refdecl)));
2226 size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
2227 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2228 size += asan_red_zone_size (size);
2229 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2230 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2231 fold_convert (const_ptr_type_node, str_cst));
2232 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2233 fold_convert (const_ptr_type_node, module_name_cst));
2234 varpool_node *vnode = varpool_node::get (decl);
2235 int has_dynamic_init = vnode ? vnode->dynamically_initialized : 0;
2236 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2237 build_int_cst (uptr, has_dynamic_init));
2238 tree locptr = NULL_TREE;
2239 location_t loc = DECL_SOURCE_LOCATION (decl);
2240 expanded_location xloc = expand_location (loc);
2241 if (xloc.file != NULL)
2243 static int lasanloccnt = 0;
2244 char buf[25];
2245 ASM_GENERATE_INTERNAL_LABEL (buf, "LASANLOC", ++lasanloccnt);
2246 tree var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
2247 ubsan_get_source_location_type ());
2248 TREE_STATIC (var) = 1;
2249 TREE_PUBLIC (var) = 0;
2250 DECL_ARTIFICIAL (var) = 1;
2251 DECL_IGNORED_P (var) = 1;
2252 pretty_printer filename_pp;
2253 pp_string (&filename_pp, xloc.file);
2254 tree str = asan_pp_string (&filename_pp);
2255 tree ctor = build_constructor_va (TREE_TYPE (var), 3,
2256 NULL_TREE, str, NULL_TREE,
2257 build_int_cst (unsigned_type_node,
2258 xloc.line), NULL_TREE,
2259 build_int_cst (unsigned_type_node,
2260 xloc.column));
2261 TREE_CONSTANT (ctor) = 1;
2262 TREE_STATIC (ctor) = 1;
2263 DECL_INITIAL (var) = ctor;
2264 varpool_node::finalize_decl (var);
2265 locptr = fold_convert (uptr, build_fold_addr_expr (var));
2267 else
2268 locptr = build_int_cst (uptr, 0);
2269 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, locptr);
2270 init = build_constructor (type, vinner);
2271 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, init);
2274 /* Initialize sanitizer.def builtins if the FE hasn't initialized them. */
2275 void
2276 initialize_sanitizer_builtins (void)
2278 tree decl;
2280 if (builtin_decl_implicit_p (BUILT_IN_ASAN_INIT))
2281 return;
2283 tree BT_FN_VOID = build_function_type_list (void_type_node, NULL_TREE);
2284 tree BT_FN_VOID_PTR
2285 = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
2286 tree BT_FN_VOID_CONST_PTR
2287 = build_function_type_list (void_type_node, const_ptr_type_node, NULL_TREE);
2288 tree BT_FN_VOID_PTR_PTR
2289 = build_function_type_list (void_type_node, ptr_type_node,
2290 ptr_type_node, NULL_TREE);
2291 tree BT_FN_VOID_PTR_PTR_PTR
2292 = build_function_type_list (void_type_node, ptr_type_node,
2293 ptr_type_node, ptr_type_node, NULL_TREE);
2294 tree BT_FN_VOID_PTR_PTRMODE
2295 = build_function_type_list (void_type_node, ptr_type_node,
2296 pointer_sized_int_node, NULL_TREE);
2297 tree BT_FN_VOID_INT
2298 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
2299 tree BT_FN_SIZE_CONST_PTR_INT
2300 = build_function_type_list (size_type_node, const_ptr_type_node,
2301 integer_type_node, NULL_TREE);
2302 tree BT_FN_BOOL_VPTR_PTR_IX_INT_INT[5];
2303 tree BT_FN_IX_CONST_VPTR_INT[5];
2304 tree BT_FN_IX_VPTR_IX_INT[5];
2305 tree BT_FN_VOID_VPTR_IX_INT[5];
2306 tree vptr
2307 = build_pointer_type (build_qualified_type (void_type_node,
2308 TYPE_QUAL_VOLATILE));
2309 tree cvptr
2310 = build_pointer_type (build_qualified_type (void_type_node,
2311 TYPE_QUAL_VOLATILE
2312 |TYPE_QUAL_CONST));
2313 tree boolt
2314 = lang_hooks.types.type_for_size (BOOL_TYPE_SIZE, 1);
2315 int i;
2316 for (i = 0; i < 5; i++)
2318 tree ix = build_nonstandard_integer_type (BITS_PER_UNIT * (1 << i), 1);
2319 BT_FN_BOOL_VPTR_PTR_IX_INT_INT[i]
2320 = build_function_type_list (boolt, vptr, ptr_type_node, ix,
2321 integer_type_node, integer_type_node,
2322 NULL_TREE);
2323 BT_FN_IX_CONST_VPTR_INT[i]
2324 = build_function_type_list (ix, cvptr, integer_type_node, NULL_TREE);
2325 BT_FN_IX_VPTR_IX_INT[i]
2326 = build_function_type_list (ix, vptr, ix, integer_type_node,
2327 NULL_TREE);
2328 BT_FN_VOID_VPTR_IX_INT[i]
2329 = build_function_type_list (void_type_node, vptr, ix,
2330 integer_type_node, NULL_TREE);
2332 #define BT_FN_BOOL_VPTR_PTR_I1_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[0]
2333 #define BT_FN_I1_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[0]
2334 #define BT_FN_I1_VPTR_I1_INT BT_FN_IX_VPTR_IX_INT[0]
2335 #define BT_FN_VOID_VPTR_I1_INT BT_FN_VOID_VPTR_IX_INT[0]
2336 #define BT_FN_BOOL_VPTR_PTR_I2_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[1]
2337 #define BT_FN_I2_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[1]
2338 #define BT_FN_I2_VPTR_I2_INT BT_FN_IX_VPTR_IX_INT[1]
2339 #define BT_FN_VOID_VPTR_I2_INT BT_FN_VOID_VPTR_IX_INT[1]
2340 #define BT_FN_BOOL_VPTR_PTR_I4_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[2]
2341 #define BT_FN_I4_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[2]
2342 #define BT_FN_I4_VPTR_I4_INT BT_FN_IX_VPTR_IX_INT[2]
2343 #define BT_FN_VOID_VPTR_I4_INT BT_FN_VOID_VPTR_IX_INT[2]
2344 #define BT_FN_BOOL_VPTR_PTR_I8_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[3]
2345 #define BT_FN_I8_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[3]
2346 #define BT_FN_I8_VPTR_I8_INT BT_FN_IX_VPTR_IX_INT[3]
2347 #define BT_FN_VOID_VPTR_I8_INT BT_FN_VOID_VPTR_IX_INT[3]
2348 #define BT_FN_BOOL_VPTR_PTR_I16_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[4]
2349 #define BT_FN_I16_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[4]
2350 #define BT_FN_I16_VPTR_I16_INT BT_FN_IX_VPTR_IX_INT[4]
2351 #define BT_FN_VOID_VPTR_I16_INT BT_FN_VOID_VPTR_IX_INT[4]
2352 #undef ATTR_NOTHROW_LEAF_LIST
2353 #define ATTR_NOTHROW_LEAF_LIST ECF_NOTHROW | ECF_LEAF
2354 #undef ATTR_TMPURE_NOTHROW_LEAF_LIST
2355 #define ATTR_TMPURE_NOTHROW_LEAF_LIST ECF_TM_PURE | ATTR_NOTHROW_LEAF_LIST
2356 #undef ATTR_NORETURN_NOTHROW_LEAF_LIST
2357 #define ATTR_NORETURN_NOTHROW_LEAF_LIST ECF_NORETURN | ATTR_NOTHROW_LEAF_LIST
2358 #undef ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
2359 #define ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST \
2360 ECF_CONST | ATTR_NORETURN_NOTHROW_LEAF_LIST
2361 #undef ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST
2362 #define ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST \
2363 ECF_TM_PURE | ATTR_NORETURN_NOTHROW_LEAF_LIST
2364 #undef ATTR_COLD_NOTHROW_LEAF_LIST
2365 #define ATTR_COLD_NOTHROW_LEAF_LIST \
2366 /* ECF_COLD missing */ ATTR_NOTHROW_LEAF_LIST
2367 #undef ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST
2368 #define ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST \
2369 /* ECF_COLD missing */ ATTR_NORETURN_NOTHROW_LEAF_LIST
2370 #undef ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST
2371 #define ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST \
2372 /* ECF_COLD missing */ ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
2373 #undef ATTR_PURE_NOTHROW_LEAF_LIST
2374 #define ATTR_PURE_NOTHROW_LEAF_LIST ECF_PURE | ATTR_NOTHROW_LEAF_LIST
2375 #undef DEF_SANITIZER_BUILTIN
2376 #define DEF_SANITIZER_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
2377 decl = add_builtin_function ("__builtin_" NAME, TYPE, ENUM, \
2378 BUILT_IN_NORMAL, NAME, NULL_TREE); \
2379 set_call_expr_flags (decl, ATTRS); \
2380 set_builtin_decl (ENUM, decl, true);
2382 #include "sanitizer.def"
2384 /* -fsanitize=object-size uses __builtin_object_size, but that might
2385 not be available for e.g. Fortran at this point. We use
2386 DEF_SANITIZER_BUILTIN here only as a convenience macro. */
2387 if ((flag_sanitize & SANITIZE_OBJECT_SIZE)
2388 && !builtin_decl_implicit_p (BUILT_IN_OBJECT_SIZE))
2389 DEF_SANITIZER_BUILTIN (BUILT_IN_OBJECT_SIZE, "object_size",
2390 BT_FN_SIZE_CONST_PTR_INT,
2391 ATTR_PURE_NOTHROW_LEAF_LIST)
2393 #undef DEF_SANITIZER_BUILTIN
2396 /* Called via htab_traverse. Count number of emitted
2397 STRING_CSTs in the constant hash table. */
2400 count_string_csts (constant_descriptor_tree **slot,
2401 unsigned HOST_WIDE_INT *data)
2403 struct constant_descriptor_tree *desc = *slot;
2404 if (TREE_CODE (desc->value) == STRING_CST
2405 && TREE_ASM_WRITTEN (desc->value)
2406 && asan_protect_global (desc->value))
2407 ++*data;
2408 return 1;
2411 /* Helper structure to pass two parameters to
2412 add_string_csts. */
2414 struct asan_add_string_csts_data
2416 tree type;
2417 vec<constructor_elt, va_gc> *v;
2420 /* Called via hash_table::traverse. Call asan_add_global
2421 on emitted STRING_CSTs from the constant hash table. */
2424 add_string_csts (constant_descriptor_tree **slot,
2425 asan_add_string_csts_data *aascd)
2427 struct constant_descriptor_tree *desc = *slot;
2428 if (TREE_CODE (desc->value) == STRING_CST
2429 && TREE_ASM_WRITTEN (desc->value)
2430 && asan_protect_global (desc->value))
2432 asan_add_global (SYMBOL_REF_DECL (XEXP (desc->rtl, 0)),
2433 aascd->type, aascd->v);
2435 return 1;
2438 /* Needs to be GTY(()), because cgraph_build_static_cdtor may
2439 invoke ggc_collect. */
2440 static GTY(()) tree asan_ctor_statements;
2442 /* Module-level instrumentation.
2443 - Insert __asan_init_vN() into the list of CTORs.
2444 - TODO: insert redzones around globals.
2447 void
2448 asan_finish_file (void)
2450 varpool_node *vnode;
2451 unsigned HOST_WIDE_INT gcount = 0;
2453 if (shadow_ptr_types[0] == NULL_TREE)
2454 asan_init_shadow_ptr_types ();
2455 /* Avoid instrumenting code in the asan ctors/dtors.
2456 We don't need to insert padding after the description strings,
2457 nor after .LASAN* array. */
2458 flag_sanitize &= ~SANITIZE_ADDRESS;
2460 /* For user-space we want asan constructors to run first.
2461 Linux kernel does not support priorities other than default, and the only
2462 other user of constructors is coverage. So we run with the default
2463 priority. */
2464 int priority = flag_sanitize & SANITIZE_USER_ADDRESS
2465 ? MAX_RESERVED_INIT_PRIORITY - 1 : DEFAULT_INIT_PRIORITY;
2467 if (flag_sanitize & SANITIZE_USER_ADDRESS)
2469 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_INIT);
2470 append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
2472 FOR_EACH_DEFINED_VARIABLE (vnode)
2473 if (TREE_ASM_WRITTEN (vnode->decl)
2474 && asan_protect_global (vnode->decl))
2475 ++gcount;
2476 hash_table<tree_descriptor_hasher> *const_desc_htab = constant_pool_htab ();
2477 const_desc_htab->traverse<unsigned HOST_WIDE_INT *, count_string_csts>
2478 (&gcount);
2479 if (gcount)
2481 tree type = asan_global_struct (), var, ctor;
2482 tree dtor_statements = NULL_TREE;
2483 vec<constructor_elt, va_gc> *v;
2484 char buf[20];
2486 type = build_array_type_nelts (type, gcount);
2487 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", 0);
2488 var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
2489 type);
2490 TREE_STATIC (var) = 1;
2491 TREE_PUBLIC (var) = 0;
2492 DECL_ARTIFICIAL (var) = 1;
2493 DECL_IGNORED_P (var) = 1;
2494 vec_alloc (v, gcount);
2495 FOR_EACH_DEFINED_VARIABLE (vnode)
2496 if (TREE_ASM_WRITTEN (vnode->decl)
2497 && asan_protect_global (vnode->decl))
2498 asan_add_global (vnode->decl, TREE_TYPE (type), v);
2499 struct asan_add_string_csts_data aascd;
2500 aascd.type = TREE_TYPE (type);
2501 aascd.v = v;
2502 const_desc_htab->traverse<asan_add_string_csts_data *, add_string_csts>
2503 (&aascd);
2504 ctor = build_constructor (type, v);
2505 TREE_CONSTANT (ctor) = 1;
2506 TREE_STATIC (ctor) = 1;
2507 DECL_INITIAL (var) = ctor;
2508 varpool_node::finalize_decl (var);
2510 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_REGISTER_GLOBALS);
2511 tree gcount_tree = build_int_cst (pointer_sized_int_node, gcount);
2512 append_to_statement_list (build_call_expr (fn, 2,
2513 build_fold_addr_expr (var),
2514 gcount_tree),
2515 &asan_ctor_statements);
2517 fn = builtin_decl_implicit (BUILT_IN_ASAN_UNREGISTER_GLOBALS);
2518 append_to_statement_list (build_call_expr (fn, 2,
2519 build_fold_addr_expr (var),
2520 gcount_tree),
2521 &dtor_statements);
2522 cgraph_build_static_cdtor ('D', dtor_statements, priority);
2524 if (asan_ctor_statements)
2525 cgraph_build_static_cdtor ('I', asan_ctor_statements, priority);
2526 flag_sanitize |= SANITIZE_ADDRESS;
2529 /* Expand the ASAN_{LOAD,STORE} builtins. */
2531 bool
2532 asan_expand_check_ifn (gimple_stmt_iterator *iter, bool use_calls)
2534 gimple g = gsi_stmt (*iter);
2535 location_t loc = gimple_location (g);
2537 bool recover_p
2538 = (flag_sanitize & flag_sanitize_recover & SANITIZE_KERNEL_ADDRESS) != 0;
2540 HOST_WIDE_INT flags = tree_to_shwi (gimple_call_arg (g, 0));
2541 gcc_assert (flags < ASAN_CHECK_LAST);
2542 bool is_scalar_access = (flags & ASAN_CHECK_SCALAR_ACCESS) != 0;
2543 bool is_store = (flags & ASAN_CHECK_STORE) != 0;
2544 bool is_non_zero_len = (flags & ASAN_CHECK_NON_ZERO_LEN) != 0;
2546 tree base = gimple_call_arg (g, 1);
2547 tree len = gimple_call_arg (g, 2);
2548 HOST_WIDE_INT align = tree_to_shwi (gimple_call_arg (g, 3));
2550 HOST_WIDE_INT size_in_bytes
2551 = is_scalar_access && tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
2553 if (use_calls)
2555 /* Instrument using callbacks. */
2556 gimple g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2557 NOP_EXPR, base);
2558 gimple_set_location (g, loc);
2559 gsi_insert_before (iter, g, GSI_SAME_STMT);
2560 tree base_addr = gimple_assign_lhs (g);
2562 int nargs;
2563 tree fun = check_func (is_store, recover_p, size_in_bytes, &nargs);
2564 if (nargs == 1)
2565 g = gimple_build_call (fun, 1, base_addr);
2566 else
2568 gcc_assert (nargs == 2);
2569 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2570 NOP_EXPR, len);
2571 gimple_set_location (g, loc);
2572 gsi_insert_before (iter, g, GSI_SAME_STMT);
2573 tree sz_arg = gimple_assign_lhs (g);
2574 g = gimple_build_call (fun, nargs, base_addr, sz_arg);
2576 gimple_set_location (g, loc);
2577 gsi_replace (iter, g, false);
2578 return false;
2581 HOST_WIDE_INT real_size_in_bytes = size_in_bytes == -1 ? 1 : size_in_bytes;
2583 tree shadow_ptr_type = shadow_ptr_types[real_size_in_bytes == 16 ? 1 : 0];
2584 tree shadow_type = TREE_TYPE (shadow_ptr_type);
2586 gimple_stmt_iterator gsi = *iter;
2588 if (!is_non_zero_len)
2590 /* So, the length of the memory area to asan-protect is
2591 non-constant. Let's guard the generated instrumentation code
2592 like:
2594 if (len != 0)
2596 //asan instrumentation code goes here.
2598 // falltrough instructions, starting with *ITER. */
2600 g = gimple_build_cond (NE_EXPR,
2601 len,
2602 build_int_cst (TREE_TYPE (len), 0),
2603 NULL_TREE, NULL_TREE);
2604 gimple_set_location (g, loc);
2606 basic_block then_bb, fallthrough_bb;
2607 insert_if_then_before_iter (as_a <gcond *> (g), iter,
2608 /*then_more_likely_p=*/true,
2609 &then_bb, &fallthrough_bb);
2610 /* Note that fallthrough_bb starts with the statement that was
2611 pointed to by ITER. */
2613 /* The 'then block' of the 'if (len != 0) condition is where
2614 we'll generate the asan instrumentation code now. */
2615 gsi = gsi_last_bb (then_bb);
2618 /* Get an iterator on the point where we can add the condition
2619 statement for the instrumentation. */
2620 basic_block then_bb, else_bb;
2621 gsi = create_cond_insert_point (&gsi, /*before_p*/false,
2622 /*then_more_likely_p=*/false,
2623 /*create_then_fallthru_edge*/recover_p,
2624 &then_bb,
2625 &else_bb);
2627 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2628 NOP_EXPR, base);
2629 gimple_set_location (g, loc);
2630 gsi_insert_before (&gsi, g, GSI_NEW_STMT);
2631 tree base_addr = gimple_assign_lhs (g);
2633 tree t = NULL_TREE;
2634 if (real_size_in_bytes >= 8)
2636 tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
2637 shadow_ptr_type);
2638 t = shadow;
2640 else
2642 /* Slow path for 1, 2 and 4 byte accesses. */
2643 /* Test (shadow != 0)
2644 & ((base_addr & 7) + (real_size_in_bytes - 1)) >= shadow). */
2645 tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
2646 shadow_ptr_type);
2647 gimple shadow_test = build_assign (NE_EXPR, shadow, 0);
2648 gimple_seq seq = NULL;
2649 gimple_seq_add_stmt (&seq, shadow_test);
2650 /* Aligned (>= 8 bytes) can test just
2651 (real_size_in_bytes - 1 >= shadow), as base_addr & 7 is known
2652 to be 0. */
2653 if (align < 8)
2655 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
2656 base_addr, 7));
2657 gimple_seq_add_stmt (&seq,
2658 build_type_cast (shadow_type,
2659 gimple_seq_last (seq)));
2660 if (real_size_in_bytes > 1)
2661 gimple_seq_add_stmt (&seq,
2662 build_assign (PLUS_EXPR,
2663 gimple_seq_last (seq),
2664 real_size_in_bytes - 1));
2665 t = gimple_assign_lhs (gimple_seq_last_stmt (seq));
2667 else
2668 t = build_int_cst (shadow_type, real_size_in_bytes - 1);
2669 gimple_seq_add_stmt (&seq, build_assign (GE_EXPR, t, shadow));
2670 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
2671 gimple_seq_last (seq)));
2672 t = gimple_assign_lhs (gimple_seq_last (seq));
2673 gimple_seq_set_location (seq, loc);
2674 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
2676 /* For non-constant, misaligned or otherwise weird access sizes,
2677 check first and last byte. */
2678 if (size_in_bytes == -1)
2680 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2681 MINUS_EXPR, len,
2682 build_int_cst (pointer_sized_int_node, 1));
2683 gimple_set_location (g, loc);
2684 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2685 tree last = gimple_assign_lhs (g);
2686 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2687 PLUS_EXPR, base_addr, last);
2688 gimple_set_location (g, loc);
2689 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2690 tree base_end_addr = gimple_assign_lhs (g);
2692 tree shadow = build_shadow_mem_access (&gsi, loc, base_end_addr,
2693 shadow_ptr_type);
2694 gimple shadow_test = build_assign (NE_EXPR, shadow, 0);
2695 gimple_seq seq = NULL;
2696 gimple_seq_add_stmt (&seq, shadow_test);
2697 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
2698 base_end_addr, 7));
2699 gimple_seq_add_stmt (&seq, build_type_cast (shadow_type,
2700 gimple_seq_last (seq)));
2701 gimple_seq_add_stmt (&seq, build_assign (GE_EXPR,
2702 gimple_seq_last (seq),
2703 shadow));
2704 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
2705 gimple_seq_last (seq)));
2706 gimple_seq_add_stmt (&seq, build_assign (BIT_IOR_EXPR, t,
2707 gimple_seq_last (seq)));
2708 t = gimple_assign_lhs (gimple_seq_last (seq));
2709 gimple_seq_set_location (seq, loc);
2710 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
2714 g = gimple_build_cond (NE_EXPR, t, build_int_cst (TREE_TYPE (t), 0),
2715 NULL_TREE, NULL_TREE);
2716 gimple_set_location (g, loc);
2717 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2719 /* Generate call to the run-time library (e.g. __asan_report_load8). */
2720 gsi = gsi_start_bb (then_bb);
2721 int nargs;
2722 tree fun = report_error_func (is_store, recover_p, size_in_bytes, &nargs);
2723 g = gimple_build_call (fun, nargs, base_addr, len);
2724 gimple_set_location (g, loc);
2725 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2727 gsi_remove (iter, true);
2728 *iter = gsi_start_bb (else_bb);
2730 return true;
2733 /* Instrument the current function. */
2735 static unsigned int
2736 asan_instrument (void)
2738 if (shadow_ptr_types[0] == NULL_TREE)
2739 asan_init_shadow_ptr_types ();
2740 transform_statements ();
2741 return 0;
2744 static bool
2745 gate_asan (void)
2747 return (flag_sanitize & SANITIZE_ADDRESS) != 0
2748 && !lookup_attribute ("no_sanitize_address",
2749 DECL_ATTRIBUTES (current_function_decl));
2752 namespace {
2754 const pass_data pass_data_asan =
2756 GIMPLE_PASS, /* type */
2757 "asan", /* name */
2758 OPTGROUP_NONE, /* optinfo_flags */
2759 TV_NONE, /* tv_id */
2760 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
2761 0, /* properties_provided */
2762 0, /* properties_destroyed */
2763 0, /* todo_flags_start */
2764 TODO_update_ssa, /* todo_flags_finish */
2767 class pass_asan : public gimple_opt_pass
2769 public:
2770 pass_asan (gcc::context *ctxt)
2771 : gimple_opt_pass (pass_data_asan, ctxt)
2774 /* opt_pass methods: */
2775 opt_pass * clone () { return new pass_asan (m_ctxt); }
2776 virtual bool gate (function *) { return gate_asan (); }
2777 virtual unsigned int execute (function *) { return asan_instrument (); }
2779 }; // class pass_asan
2781 } // anon namespace
2783 gimple_opt_pass *
2784 make_pass_asan (gcc::context *ctxt)
2786 return new pass_asan (ctxt);
2789 namespace {
2791 const pass_data pass_data_asan_O0 =
2793 GIMPLE_PASS, /* type */
2794 "asan0", /* name */
2795 OPTGROUP_NONE, /* optinfo_flags */
2796 TV_NONE, /* tv_id */
2797 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
2798 0, /* properties_provided */
2799 0, /* properties_destroyed */
2800 0, /* todo_flags_start */
2801 TODO_update_ssa, /* todo_flags_finish */
2804 class pass_asan_O0 : public gimple_opt_pass
2806 public:
2807 pass_asan_O0 (gcc::context *ctxt)
2808 : gimple_opt_pass (pass_data_asan_O0, ctxt)
2811 /* opt_pass methods: */
2812 virtual bool gate (function *) { return !optimize && gate_asan (); }
2813 virtual unsigned int execute (function *) { return asan_instrument (); }
2815 }; // class pass_asan_O0
2817 } // anon namespace
2819 gimple_opt_pass *
2820 make_pass_asan_O0 (gcc::context *ctxt)
2822 return new pass_asan_O0 (ctxt);
2825 #include "gt-asan.h"