c-common.c (c_common_nodes_and_builtins): Use cxx11 in lieu of cxx0x.
[official-gcc.git] / gcc / asan.c
blobc068b8ca0f455b3e7e78fe2743ccb2ef9aef2646
1 /* AddressSanitizer, a fast memory error detector.
2 Copyright (C) 2012-2013 Free Software Foundation, Inc.
3 Contributed by Kostya Serebryany <kcc@google.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "gimple.h"
26 #include "tree-iterator.h"
27 #include "tree-flow.h"
28 #include "tree-pass.h"
29 #include "asan.h"
30 #include "gimple-pretty-print.h"
31 #include "target.h"
32 #include "expr.h"
33 #include "optabs.h"
34 #include "output.h"
35 #include "tm_p.h"
36 #include "langhooks.h"
37 #include "hash-table.h"
38 #include "alloc-pool.h"
39 #include "cfgloop.h"
41 /* AddressSanitizer finds out-of-bounds and use-after-free bugs
42 with <2x slowdown on average.
44 The tool consists of two parts:
45 instrumentation module (this file) and a run-time library.
46 The instrumentation module adds a run-time check before every memory insn.
47 For a 8- or 16- byte load accessing address X:
48 ShadowAddr = (X >> 3) + Offset
49 ShadowValue = *(char*)ShadowAddr; // *(short*) for 16-byte access.
50 if (ShadowValue)
51 __asan_report_load8(X);
52 For a load of N bytes (N=1, 2 or 4) from address X:
53 ShadowAddr = (X >> 3) + Offset
54 ShadowValue = *(char*)ShadowAddr;
55 if (ShadowValue)
56 if ((X & 7) + N - 1 > ShadowValue)
57 __asan_report_loadN(X);
58 Stores are instrumented similarly, but using __asan_report_storeN functions.
59 A call too __asan_init() is inserted to the list of module CTORs.
61 The run-time library redefines malloc (so that redzone are inserted around
62 the allocated memory) and free (so that reuse of free-ed memory is delayed),
63 provides __asan_report* and __asan_init functions.
65 Read more:
66 http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm
68 The current implementation supports detection of out-of-bounds and
69 use-after-free in the heap, on the stack and for global variables.
71 [Protection of stack variables]
73 To understand how detection of out-of-bounds and use-after-free works
74 for stack variables, lets look at this example on x86_64 where the
75 stack grows downward:
77 int
78 foo ()
80 char a[23] = {0};
81 int b[2] = {0};
83 a[5] = 1;
84 b[1] = 2;
86 return a[5] + b[1];
89 For this function, the stack protected by asan will be organized as
90 follows, from the top of the stack to the bottom:
92 Slot 1/ [red zone of 32 bytes called 'RIGHT RedZone']
94 Slot 2/ [8 bytes of red zone, that adds up to the space of 'a' to make
95 the next slot be 32 bytes aligned; this one is called Partial
96 Redzone; this 32 bytes alignment is an asan constraint]
98 Slot 3/ [24 bytes for variable 'a']
100 Slot 4/ [red zone of 32 bytes called 'Middle RedZone']
102 Slot 5/ [24 bytes of Partial Red Zone (similar to slot 2]
104 Slot 6/ [8 bytes for variable 'b']
106 Slot 7/ [32 bytes of Red Zone at the bottom of the stack, called
107 'LEFT RedZone']
109 The 32 bytes of LEFT red zone at the bottom of the stack can be
110 decomposed as such:
112 1/ The first 8 bytes contain a magical asan number that is always
113 0x41B58AB3.
115 2/ The following 8 bytes contains a pointer to a string (to be
116 parsed at runtime by the runtime asan library), which format is
117 the following:
119 "<function-name> <space> <num-of-variables-on-the-stack>
120 (<32-bytes-aligned-offset-in-bytes-of-variable> <space>
121 <length-of-var-in-bytes> ){n} "
123 where '(...){n}' means the content inside the parenthesis occurs 'n'
124 times, with 'n' being the number of variables on the stack.
126 3/ The following 16 bytes of the red zone have no particular
127 format.
129 The shadow memory for that stack layout is going to look like this:
131 - content of shadow memory 8 bytes for slot 7: 0xF1F1F1F1.
132 The F1 byte pattern is a magic number called
133 ASAN_STACK_MAGIC_LEFT and is a way for the runtime to know that
134 the memory for that shadow byte is part of a the LEFT red zone
135 intended to seat at the bottom of the variables on the stack.
137 - content of shadow memory 8 bytes for slots 6 and 5:
138 0xF4F4F400. The F4 byte pattern is a magic number
139 called ASAN_STACK_MAGIC_PARTIAL. It flags the fact that the
140 memory region for this shadow byte is a PARTIAL red zone
141 intended to pad a variable A, so that the slot following
142 {A,padding} is 32 bytes aligned.
144 Note that the fact that the least significant byte of this
145 shadow memory content is 00 means that 8 bytes of its
146 corresponding memory (which corresponds to the memory of
147 variable 'b') is addressable.
149 - content of shadow memory 8 bytes for slot 4: 0xF2F2F2F2.
150 The F2 byte pattern is a magic number called
151 ASAN_STACK_MAGIC_MIDDLE. It flags the fact that the memory
152 region for this shadow byte is a MIDDLE red zone intended to
153 seat between two 32 aligned slots of {variable,padding}.
155 - content of shadow memory 8 bytes for slot 3 and 2:
156 0xF4000000. This represents is the concatenation of
157 variable 'a' and the partial red zone following it, like what we
158 had for variable 'b'. The least significant 3 bytes being 00
159 means that the 3 bytes of variable 'a' are addressable.
161 - content of shadow memory 8 bytes for slot 1: 0xF3F3F3F3.
162 The F3 byte pattern is a magic number called
163 ASAN_STACK_MAGIC_RIGHT. It flags the fact that the memory
164 region for this shadow byte is a RIGHT red zone intended to seat
165 at the top of the variables of the stack.
167 Note that the real variable layout is done in expand_used_vars in
168 cfgexpand.c. As far as Address Sanitizer is concerned, it lays out
169 stack variables as well as the different red zones, emits some
170 prologue code to populate the shadow memory as to poison (mark as
171 non-accessible) the regions of the red zones and mark the regions of
172 stack variables as accessible, and emit some epilogue code to
173 un-poison (mark as accessible) the regions of red zones right before
174 the function exits.
176 [Protection of global variables]
178 The basic idea is to insert a red zone between two global variables
179 and install a constructor function that calls the asan runtime to do
180 the populating of the relevant shadow memory regions at load time.
182 So the global variables are laid out as to insert a red zone between
183 them. The size of the red zones is so that each variable starts on a
184 32 bytes boundary.
186 Then a constructor function is installed so that, for each global
187 variable, it calls the runtime asan library function
188 __asan_register_globals_with an instance of this type:
190 struct __asan_global
192 // Address of the beginning of the global variable.
193 const void *__beg;
195 // Initial size of the global variable.
196 uptr __size;
198 // Size of the global variable + size of the red zone. This
199 // size is 32 bytes aligned.
200 uptr __size_with_redzone;
202 // Name of the global variable.
203 const void *__name;
205 // This is always set to NULL for now.
206 uptr __has_dynamic_init;
209 A destructor function that calls the runtime asan library function
210 _asan_unregister_globals is also installed. */
212 alias_set_type asan_shadow_set = -1;
214 /* Pointer types to 1 resp. 2 byte integers in shadow memory. A separate
215 alias set is used for all shadow memory accesses. */
216 static GTY(()) tree shadow_ptr_types[2];
218 /* Hashtable support for memory references used by gimple
219 statements. */
221 /* This type represents a reference to a memory region. */
222 struct asan_mem_ref
224 /* The expression of the begining of the memory region. */
225 tree start;
227 /* The size of the access (can be 1, 2, 4, 8, 16 for now). */
228 char access_size;
231 static alloc_pool asan_mem_ref_alloc_pool;
233 /* This creates the alloc pool used to store the instances of
234 asan_mem_ref that are stored in the hash table asan_mem_ref_ht. */
236 static alloc_pool
237 asan_mem_ref_get_alloc_pool ()
239 if (asan_mem_ref_alloc_pool == NULL)
240 asan_mem_ref_alloc_pool = create_alloc_pool ("asan_mem_ref",
241 sizeof (asan_mem_ref),
242 10);
243 return asan_mem_ref_alloc_pool;
247 /* Initializes an instance of asan_mem_ref. */
249 static void
250 asan_mem_ref_init (asan_mem_ref *ref, tree start, char access_size)
252 ref->start = start;
253 ref->access_size = access_size;
256 /* Allocates memory for an instance of asan_mem_ref into the memory
257 pool returned by asan_mem_ref_get_alloc_pool and initialize it.
258 START is the address of (or the expression pointing to) the
259 beginning of memory reference. ACCESS_SIZE is the size of the
260 access to the referenced memory. */
262 static asan_mem_ref*
263 asan_mem_ref_new (tree start, char access_size)
265 asan_mem_ref *ref =
266 (asan_mem_ref *) pool_alloc (asan_mem_ref_get_alloc_pool ());
268 asan_mem_ref_init (ref, start, access_size);
269 return ref;
272 /* This builds and returns a pointer to the end of the memory region
273 that starts at START and of length LEN. */
275 tree
276 asan_mem_ref_get_end (tree start, tree len)
278 if (len == NULL_TREE || integer_zerop (len))
279 return start;
281 return fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (start), start, len);
284 /* Return a tree expression that represents the end of the referenced
285 memory region. Beware that this function can actually build a new
286 tree expression. */
288 tree
289 asan_mem_ref_get_end (const asan_mem_ref *ref, tree len)
291 return asan_mem_ref_get_end (ref->start, len);
294 struct asan_mem_ref_hasher
295 : typed_noop_remove <asan_mem_ref>
297 typedef asan_mem_ref value_type;
298 typedef asan_mem_ref compare_type;
300 static inline hashval_t hash (const value_type *);
301 static inline bool equal (const value_type *, const compare_type *);
304 /* Hash a memory reference. */
306 inline hashval_t
307 asan_mem_ref_hasher::hash (const asan_mem_ref *mem_ref)
309 hashval_t h = iterative_hash_expr (mem_ref->start, 0);
310 h = iterative_hash_hashval_t (h, mem_ref->access_size);
311 return h;
314 /* Compare two memory references. We accept the length of either
315 memory references to be NULL_TREE. */
317 inline bool
318 asan_mem_ref_hasher::equal (const asan_mem_ref *m1,
319 const asan_mem_ref *m2)
321 return (m1->access_size == m2->access_size
322 && operand_equal_p (m1->start, m2->start, 0));
325 static hash_table <asan_mem_ref_hasher> asan_mem_ref_ht;
327 /* Returns a reference to the hash table containing memory references.
328 This function ensures that the hash table is created. Note that
329 this hash table is updated by the function
330 update_mem_ref_hash_table. */
332 static hash_table <asan_mem_ref_hasher> &
333 get_mem_ref_hash_table ()
335 if (!asan_mem_ref_ht.is_created ())
336 asan_mem_ref_ht.create (10);
338 return asan_mem_ref_ht;
341 /* Clear all entries from the memory references hash table. */
343 static void
344 empty_mem_ref_hash_table ()
346 if (asan_mem_ref_ht.is_created ())
347 asan_mem_ref_ht.empty ();
350 /* Free the memory references hash table. */
352 static void
353 free_mem_ref_resources ()
355 if (asan_mem_ref_ht.is_created ())
356 asan_mem_ref_ht.dispose ();
358 if (asan_mem_ref_alloc_pool)
360 free_alloc_pool (asan_mem_ref_alloc_pool);
361 asan_mem_ref_alloc_pool = NULL;
365 /* Return true iff the memory reference REF has been instrumented. */
367 static bool
368 has_mem_ref_been_instrumented (tree ref, char access_size)
370 asan_mem_ref r;
371 asan_mem_ref_init (&r, ref, access_size);
373 return (get_mem_ref_hash_table ().find (&r) != NULL);
376 /* Return true iff the memory reference REF has been instrumented. */
378 static bool
379 has_mem_ref_been_instrumented (const asan_mem_ref *ref)
381 return has_mem_ref_been_instrumented (ref->start, ref->access_size);
384 /* Return true iff access to memory region starting at REF and of
385 length LEN has been instrumented. */
387 static bool
388 has_mem_ref_been_instrumented (const asan_mem_ref *ref, tree len)
390 /* First let's see if the address of the beginning of REF has been
391 instrumented. */
392 if (!has_mem_ref_been_instrumented (ref))
393 return false;
395 if (len != 0)
397 /* Let's see if the end of the region has been instrumented. */
398 if (!has_mem_ref_been_instrumented (asan_mem_ref_get_end (ref, len),
399 ref->access_size))
400 return false;
402 return true;
405 /* Set REF to the memory reference present in a gimple assignment
406 ASSIGNMENT. Return true upon successful completion, false
407 otherwise. */
409 static bool
410 get_mem_ref_of_assignment (const gimple assignment,
411 asan_mem_ref *ref,
412 bool *ref_is_store)
414 gcc_assert (gimple_assign_single_p (assignment));
416 if (gimple_store_p (assignment)
417 && !gimple_clobber_p (assignment))
419 ref->start = gimple_assign_lhs (assignment);
420 *ref_is_store = true;
422 else if (gimple_assign_load_p (assignment))
424 ref->start = gimple_assign_rhs1 (assignment);
425 *ref_is_store = false;
427 else
428 return false;
430 ref->access_size = int_size_in_bytes (TREE_TYPE (ref->start));
431 return true;
434 /* Return the memory references contained in a gimple statement
435 representing a builtin call that has to do with memory access. */
437 static bool
438 get_mem_refs_of_builtin_call (const gimple call,
439 asan_mem_ref *src0,
440 tree *src0_len,
441 bool *src0_is_store,
442 asan_mem_ref *src1,
443 tree *src1_len,
444 bool *src1_is_store,
445 asan_mem_ref *dst,
446 tree *dst_len,
447 bool *dst_is_store,
448 bool *dest_is_deref)
450 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
452 tree callee = gimple_call_fndecl (call);
453 tree source0 = NULL_TREE, source1 = NULL_TREE,
454 dest = NULL_TREE, len = NULL_TREE;
455 bool is_store = true, got_reference_p = false;
456 char access_size = 1;
458 switch (DECL_FUNCTION_CODE (callee))
460 /* (s, s, n) style memops. */
461 case BUILT_IN_BCMP:
462 case BUILT_IN_MEMCMP:
463 source0 = gimple_call_arg (call, 0);
464 source1 = gimple_call_arg (call, 1);
465 len = gimple_call_arg (call, 2);
466 break;
468 /* (src, dest, n) style memops. */
469 case BUILT_IN_BCOPY:
470 source0 = gimple_call_arg (call, 0);
471 dest = gimple_call_arg (call, 1);
472 len = gimple_call_arg (call, 2);
473 break;
475 /* (dest, src, n) style memops. */
476 case BUILT_IN_MEMCPY:
477 case BUILT_IN_MEMCPY_CHK:
478 case BUILT_IN_MEMMOVE:
479 case BUILT_IN_MEMMOVE_CHK:
480 case BUILT_IN_MEMPCPY:
481 case BUILT_IN_MEMPCPY_CHK:
482 dest = gimple_call_arg (call, 0);
483 source0 = gimple_call_arg (call, 1);
484 len = gimple_call_arg (call, 2);
485 break;
487 /* (dest, n) style memops. */
488 case BUILT_IN_BZERO:
489 dest = gimple_call_arg (call, 0);
490 len = gimple_call_arg (call, 1);
491 break;
493 /* (dest, x, n) style memops*/
494 case BUILT_IN_MEMSET:
495 case BUILT_IN_MEMSET_CHK:
496 dest = gimple_call_arg (call, 0);
497 len = gimple_call_arg (call, 2);
498 break;
500 case BUILT_IN_STRLEN:
501 source0 = gimple_call_arg (call, 0);
502 len = gimple_call_lhs (call);
503 break ;
505 /* And now the __atomic* and __sync builtins.
506 These are handled differently from the classical memory memory
507 access builtins above. */
509 case BUILT_IN_ATOMIC_LOAD_1:
510 case BUILT_IN_ATOMIC_LOAD_2:
511 case BUILT_IN_ATOMIC_LOAD_4:
512 case BUILT_IN_ATOMIC_LOAD_8:
513 case BUILT_IN_ATOMIC_LOAD_16:
514 is_store = false;
515 /* fall through. */
517 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
518 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
519 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
520 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
521 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
523 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
524 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
525 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
526 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
527 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
529 case BUILT_IN_SYNC_FETCH_AND_OR_1:
530 case BUILT_IN_SYNC_FETCH_AND_OR_2:
531 case BUILT_IN_SYNC_FETCH_AND_OR_4:
532 case BUILT_IN_SYNC_FETCH_AND_OR_8:
533 case BUILT_IN_SYNC_FETCH_AND_OR_16:
535 case BUILT_IN_SYNC_FETCH_AND_AND_1:
536 case BUILT_IN_SYNC_FETCH_AND_AND_2:
537 case BUILT_IN_SYNC_FETCH_AND_AND_4:
538 case BUILT_IN_SYNC_FETCH_AND_AND_8:
539 case BUILT_IN_SYNC_FETCH_AND_AND_16:
541 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
542 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
543 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
544 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
545 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
547 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
548 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
549 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
550 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
552 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
553 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
554 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
555 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
556 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
558 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
559 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
560 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
561 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
562 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
564 case BUILT_IN_SYNC_OR_AND_FETCH_1:
565 case BUILT_IN_SYNC_OR_AND_FETCH_2:
566 case BUILT_IN_SYNC_OR_AND_FETCH_4:
567 case BUILT_IN_SYNC_OR_AND_FETCH_8:
568 case BUILT_IN_SYNC_OR_AND_FETCH_16:
570 case BUILT_IN_SYNC_AND_AND_FETCH_1:
571 case BUILT_IN_SYNC_AND_AND_FETCH_2:
572 case BUILT_IN_SYNC_AND_AND_FETCH_4:
573 case BUILT_IN_SYNC_AND_AND_FETCH_8:
574 case BUILT_IN_SYNC_AND_AND_FETCH_16:
576 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
577 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
578 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
579 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
580 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
582 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
583 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
584 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
585 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
587 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
588 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
589 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
590 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
591 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
593 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
594 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
595 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
596 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
597 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
599 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
600 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
601 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
602 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
603 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
605 case BUILT_IN_SYNC_LOCK_RELEASE_1:
606 case BUILT_IN_SYNC_LOCK_RELEASE_2:
607 case BUILT_IN_SYNC_LOCK_RELEASE_4:
608 case BUILT_IN_SYNC_LOCK_RELEASE_8:
609 case BUILT_IN_SYNC_LOCK_RELEASE_16:
611 case BUILT_IN_ATOMIC_EXCHANGE_1:
612 case BUILT_IN_ATOMIC_EXCHANGE_2:
613 case BUILT_IN_ATOMIC_EXCHANGE_4:
614 case BUILT_IN_ATOMIC_EXCHANGE_8:
615 case BUILT_IN_ATOMIC_EXCHANGE_16:
617 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
618 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
619 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
620 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
621 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
623 case BUILT_IN_ATOMIC_STORE_1:
624 case BUILT_IN_ATOMIC_STORE_2:
625 case BUILT_IN_ATOMIC_STORE_4:
626 case BUILT_IN_ATOMIC_STORE_8:
627 case BUILT_IN_ATOMIC_STORE_16:
629 case BUILT_IN_ATOMIC_ADD_FETCH_1:
630 case BUILT_IN_ATOMIC_ADD_FETCH_2:
631 case BUILT_IN_ATOMIC_ADD_FETCH_4:
632 case BUILT_IN_ATOMIC_ADD_FETCH_8:
633 case BUILT_IN_ATOMIC_ADD_FETCH_16:
635 case BUILT_IN_ATOMIC_SUB_FETCH_1:
636 case BUILT_IN_ATOMIC_SUB_FETCH_2:
637 case BUILT_IN_ATOMIC_SUB_FETCH_4:
638 case BUILT_IN_ATOMIC_SUB_FETCH_8:
639 case BUILT_IN_ATOMIC_SUB_FETCH_16:
641 case BUILT_IN_ATOMIC_AND_FETCH_1:
642 case BUILT_IN_ATOMIC_AND_FETCH_2:
643 case BUILT_IN_ATOMIC_AND_FETCH_4:
644 case BUILT_IN_ATOMIC_AND_FETCH_8:
645 case BUILT_IN_ATOMIC_AND_FETCH_16:
647 case BUILT_IN_ATOMIC_NAND_FETCH_1:
648 case BUILT_IN_ATOMIC_NAND_FETCH_2:
649 case BUILT_IN_ATOMIC_NAND_FETCH_4:
650 case BUILT_IN_ATOMIC_NAND_FETCH_8:
651 case BUILT_IN_ATOMIC_NAND_FETCH_16:
653 case BUILT_IN_ATOMIC_XOR_FETCH_1:
654 case BUILT_IN_ATOMIC_XOR_FETCH_2:
655 case BUILT_IN_ATOMIC_XOR_FETCH_4:
656 case BUILT_IN_ATOMIC_XOR_FETCH_8:
657 case BUILT_IN_ATOMIC_XOR_FETCH_16:
659 case BUILT_IN_ATOMIC_OR_FETCH_1:
660 case BUILT_IN_ATOMIC_OR_FETCH_2:
661 case BUILT_IN_ATOMIC_OR_FETCH_4:
662 case BUILT_IN_ATOMIC_OR_FETCH_8:
663 case BUILT_IN_ATOMIC_OR_FETCH_16:
665 case BUILT_IN_ATOMIC_FETCH_ADD_1:
666 case BUILT_IN_ATOMIC_FETCH_ADD_2:
667 case BUILT_IN_ATOMIC_FETCH_ADD_4:
668 case BUILT_IN_ATOMIC_FETCH_ADD_8:
669 case BUILT_IN_ATOMIC_FETCH_ADD_16:
671 case BUILT_IN_ATOMIC_FETCH_SUB_1:
672 case BUILT_IN_ATOMIC_FETCH_SUB_2:
673 case BUILT_IN_ATOMIC_FETCH_SUB_4:
674 case BUILT_IN_ATOMIC_FETCH_SUB_8:
675 case BUILT_IN_ATOMIC_FETCH_SUB_16:
677 case BUILT_IN_ATOMIC_FETCH_AND_1:
678 case BUILT_IN_ATOMIC_FETCH_AND_2:
679 case BUILT_IN_ATOMIC_FETCH_AND_4:
680 case BUILT_IN_ATOMIC_FETCH_AND_8:
681 case BUILT_IN_ATOMIC_FETCH_AND_16:
683 case BUILT_IN_ATOMIC_FETCH_NAND_1:
684 case BUILT_IN_ATOMIC_FETCH_NAND_2:
685 case BUILT_IN_ATOMIC_FETCH_NAND_4:
686 case BUILT_IN_ATOMIC_FETCH_NAND_8:
687 case BUILT_IN_ATOMIC_FETCH_NAND_16:
689 case BUILT_IN_ATOMIC_FETCH_XOR_1:
690 case BUILT_IN_ATOMIC_FETCH_XOR_2:
691 case BUILT_IN_ATOMIC_FETCH_XOR_4:
692 case BUILT_IN_ATOMIC_FETCH_XOR_8:
693 case BUILT_IN_ATOMIC_FETCH_XOR_16:
695 case BUILT_IN_ATOMIC_FETCH_OR_1:
696 case BUILT_IN_ATOMIC_FETCH_OR_2:
697 case BUILT_IN_ATOMIC_FETCH_OR_4:
698 case BUILT_IN_ATOMIC_FETCH_OR_8:
699 case BUILT_IN_ATOMIC_FETCH_OR_16:
701 dest = gimple_call_arg (call, 0);
702 /* DEST represents the address of a memory location.
703 instrument_derefs wants the memory location, so lets
704 dereference the address DEST before handing it to
705 instrument_derefs. */
706 if (TREE_CODE (dest) == ADDR_EXPR)
707 dest = TREE_OPERAND (dest, 0);
708 else if (TREE_CODE (dest) == SSA_NAME)
709 dest = build2 (MEM_REF, TREE_TYPE (TREE_TYPE (dest)),
710 dest, build_int_cst (TREE_TYPE (dest), 0));
711 else
712 gcc_unreachable ();
714 access_size = int_size_in_bytes (TREE_TYPE (dest));
717 default:
718 /* The other builtins memory access are not instrumented in this
719 function because they either don't have any length parameter,
720 or their length parameter is just a limit. */
721 break;
724 if (len != NULL_TREE)
726 if (source0 != NULL_TREE)
728 src0->start = source0;
729 src0->access_size = access_size;
730 *src0_len = len;
731 *src0_is_store = false;
734 if (source1 != NULL_TREE)
736 src1->start = source1;
737 src1->access_size = access_size;
738 *src1_len = len;
739 *src1_is_store = false;
742 if (dest != NULL_TREE)
744 dst->start = dest;
745 dst->access_size = access_size;
746 *dst_len = len;
747 *dst_is_store = true;
750 got_reference_p = true;
752 else if (dest)
754 dst->start = dest;
755 dst->access_size = access_size;
756 *dst_len = NULL_TREE;
757 *dst_is_store = is_store;
758 *dest_is_deref = true;
759 got_reference_p = true;
762 return got_reference_p;
765 /* Return true iff a given gimple statement has been instrumented.
766 Note that the statement is "defined" by the memory references it
767 contains. */
769 static bool
770 has_stmt_been_instrumented_p (gimple stmt)
772 if (gimple_assign_single_p (stmt))
774 bool r_is_store;
775 asan_mem_ref r;
776 asan_mem_ref_init (&r, NULL, 1);
778 if (get_mem_ref_of_assignment (stmt, &r, &r_is_store))
779 return has_mem_ref_been_instrumented (&r);
781 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
783 asan_mem_ref src0, src1, dest;
784 asan_mem_ref_init (&src0, NULL, 1);
785 asan_mem_ref_init (&src1, NULL, 1);
786 asan_mem_ref_init (&dest, NULL, 1);
788 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
789 bool src0_is_store = false, src1_is_store = false,
790 dest_is_store = false, dest_is_deref = false;
791 if (get_mem_refs_of_builtin_call (stmt,
792 &src0, &src0_len, &src0_is_store,
793 &src1, &src1_len, &src1_is_store,
794 &dest, &dest_len, &dest_is_store,
795 &dest_is_deref))
797 if (src0.start != NULL_TREE
798 && !has_mem_ref_been_instrumented (&src0, src0_len))
799 return false;
801 if (src1.start != NULL_TREE
802 && !has_mem_ref_been_instrumented (&src1, src1_len))
803 return false;
805 if (dest.start != NULL_TREE
806 && !has_mem_ref_been_instrumented (&dest, dest_len))
807 return false;
809 return true;
812 return false;
815 /* Insert a memory reference into the hash table. */
817 static void
818 update_mem_ref_hash_table (tree ref, char access_size)
820 hash_table <asan_mem_ref_hasher> ht = get_mem_ref_hash_table ();
822 asan_mem_ref r;
823 asan_mem_ref_init (&r, ref, access_size);
825 asan_mem_ref **slot = ht.find_slot (&r, INSERT);
826 if (*slot == NULL)
827 *slot = asan_mem_ref_new (ref, access_size);
830 /* Initialize shadow_ptr_types array. */
832 static void
833 asan_init_shadow_ptr_types (void)
835 asan_shadow_set = new_alias_set ();
836 shadow_ptr_types[0] = build_distinct_type_copy (signed_char_type_node);
837 TYPE_ALIAS_SET (shadow_ptr_types[0]) = asan_shadow_set;
838 shadow_ptr_types[0] = build_pointer_type (shadow_ptr_types[0]);
839 shadow_ptr_types[1] = build_distinct_type_copy (short_integer_type_node);
840 TYPE_ALIAS_SET (shadow_ptr_types[1]) = asan_shadow_set;
841 shadow_ptr_types[1] = build_pointer_type (shadow_ptr_types[1]);
842 initialize_sanitizer_builtins ();
845 /* Asan pretty-printer, used for buidling of the description STRING_CSTs. */
846 static pretty_printer asan_pp;
847 static bool asan_pp_initialized;
849 /* Initialize asan_pp. */
851 static void
852 asan_pp_initialize (void)
854 pp_construct (&asan_pp, /* prefix */NULL, /* line-width */0);
855 asan_pp_initialized = true;
858 /* Create ADDR_EXPR of STRING_CST with asan_pp text. */
860 static tree
861 asan_pp_string (void)
863 const char *buf = pp_base_formatted_text (&asan_pp);
864 size_t len = strlen (buf);
865 tree ret = build_string (len + 1, buf);
866 TREE_TYPE (ret)
867 = build_array_type (TREE_TYPE (shadow_ptr_types[0]),
868 build_index_type (size_int (len)));
869 TREE_READONLY (ret) = 1;
870 TREE_STATIC (ret) = 1;
871 return build1 (ADDR_EXPR, shadow_ptr_types[0], ret);
874 /* Return a CONST_INT representing 4 subsequent shadow memory bytes. */
876 static rtx
877 asan_shadow_cst (unsigned char shadow_bytes[4])
879 int i;
880 unsigned HOST_WIDE_INT val = 0;
881 gcc_assert (WORDS_BIG_ENDIAN == BYTES_BIG_ENDIAN);
882 for (i = 0; i < 4; i++)
883 val |= (unsigned HOST_WIDE_INT) shadow_bytes[BYTES_BIG_ENDIAN ? 3 - i : i]
884 << (BITS_PER_UNIT * i);
885 return GEN_INT (trunc_int_for_mode (val, SImode));
888 /* Clear shadow memory at SHADOW_MEM, LEN bytes. Can't call a library call here
889 though. */
891 static void
892 asan_clear_shadow (rtx shadow_mem, HOST_WIDE_INT len)
894 rtx insn, insns, top_label, end, addr, tmp, jump;
896 start_sequence ();
897 clear_storage (shadow_mem, GEN_INT (len), BLOCK_OP_NORMAL);
898 insns = get_insns ();
899 end_sequence ();
900 for (insn = insns; insn; insn = NEXT_INSN (insn))
901 if (CALL_P (insn))
902 break;
903 if (insn == NULL_RTX)
905 emit_insn (insns);
906 return;
909 gcc_assert ((len & 3) == 0);
910 top_label = gen_label_rtx ();
911 addr = force_reg (Pmode, XEXP (shadow_mem, 0));
912 shadow_mem = adjust_automodify_address (shadow_mem, SImode, addr, 0);
913 end = force_reg (Pmode, plus_constant (Pmode, addr, len));
914 emit_label (top_label);
916 emit_move_insn (shadow_mem, const0_rtx);
917 tmp = expand_simple_binop (Pmode, PLUS, addr, GEN_INT (4), addr,
918 true, OPTAB_LIB_WIDEN);
919 if (tmp != addr)
920 emit_move_insn (addr, tmp);
921 emit_cmp_and_jump_insns (addr, end, LT, NULL_RTX, Pmode, true, top_label);
922 jump = get_last_insn ();
923 gcc_assert (JUMP_P (jump));
924 add_reg_note (jump, REG_BR_PROB, GEN_INT (REG_BR_PROB_BASE * 80 / 100));
927 /* Insert code to protect stack vars. The prologue sequence should be emitted
928 directly, epilogue sequence returned. BASE is the register holding the
929 stack base, against which OFFSETS array offsets are relative to, OFFSETS
930 array contains pairs of offsets in reverse order, always the end offset
931 of some gap that needs protection followed by starting offset,
932 and DECLS is an array of representative decls for each var partition.
933 LENGTH is the length of the OFFSETS array, DECLS array is LENGTH / 2 - 1
934 elements long (OFFSETS include gap before the first variable as well
935 as gaps after each stack variable). */
938 asan_emit_stack_protection (rtx base, HOST_WIDE_INT *offsets, tree *decls,
939 int length)
941 rtx shadow_base, shadow_mem, ret, mem;
942 unsigned char shadow_bytes[4];
943 HOST_WIDE_INT base_offset = offsets[length - 1], offset, prev_offset;
944 HOST_WIDE_INT last_offset, last_size;
945 int l;
946 unsigned char cur_shadow_byte = ASAN_STACK_MAGIC_LEFT;
947 tree str_cst;
949 if (shadow_ptr_types[0] == NULL_TREE)
950 asan_init_shadow_ptr_types ();
952 /* First of all, prepare the description string. */
953 if (!asan_pp_initialized)
954 asan_pp_initialize ();
956 pp_clear_output_area (&asan_pp);
957 if (DECL_NAME (current_function_decl))
958 pp_base_tree_identifier (&asan_pp, DECL_NAME (current_function_decl));
959 else
960 pp_string (&asan_pp, "<unknown>");
961 pp_space (&asan_pp);
962 pp_decimal_int (&asan_pp, length / 2 - 1);
963 pp_space (&asan_pp);
964 for (l = length - 2; l; l -= 2)
966 tree decl = decls[l / 2 - 1];
967 pp_wide_integer (&asan_pp, offsets[l] - base_offset);
968 pp_space (&asan_pp);
969 pp_wide_integer (&asan_pp, offsets[l - 1] - offsets[l]);
970 pp_space (&asan_pp);
971 if (DECL_P (decl) && DECL_NAME (decl))
973 pp_decimal_int (&asan_pp, IDENTIFIER_LENGTH (DECL_NAME (decl)));
974 pp_space (&asan_pp);
975 pp_base_tree_identifier (&asan_pp, DECL_NAME (decl));
977 else
978 pp_string (&asan_pp, "9 <unknown>");
979 pp_space (&asan_pp);
981 str_cst = asan_pp_string ();
983 /* Emit the prologue sequence. */
984 base = expand_binop (Pmode, add_optab, base, GEN_INT (base_offset),
985 NULL_RTX, 1, OPTAB_DIRECT);
986 mem = gen_rtx_MEM (ptr_mode, base);
987 emit_move_insn (mem, GEN_INT (ASAN_STACK_FRAME_MAGIC));
988 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
989 emit_move_insn (mem, expand_normal (str_cst));
990 shadow_base = expand_binop (Pmode, lshr_optab, base,
991 GEN_INT (ASAN_SHADOW_SHIFT),
992 NULL_RTX, 1, OPTAB_DIRECT);
993 shadow_base = expand_binop (Pmode, add_optab, shadow_base,
994 GEN_INT (targetm.asan_shadow_offset ()),
995 NULL_RTX, 1, OPTAB_DIRECT);
996 gcc_assert (asan_shadow_set != -1
997 && (ASAN_RED_ZONE_SIZE >> ASAN_SHADOW_SHIFT) == 4);
998 shadow_mem = gen_rtx_MEM (SImode, shadow_base);
999 set_mem_alias_set (shadow_mem, asan_shadow_set);
1000 prev_offset = base_offset;
1001 for (l = length; l; l -= 2)
1003 if (l == 2)
1004 cur_shadow_byte = ASAN_STACK_MAGIC_RIGHT;
1005 offset = offsets[l - 1];
1006 if ((offset - base_offset) & (ASAN_RED_ZONE_SIZE - 1))
1008 int i;
1009 HOST_WIDE_INT aoff
1010 = base_offset + ((offset - base_offset)
1011 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1012 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1013 (aoff - prev_offset)
1014 >> ASAN_SHADOW_SHIFT);
1015 prev_offset = aoff;
1016 for (i = 0; i < 4; i++, aoff += (1 << ASAN_SHADOW_SHIFT))
1017 if (aoff < offset)
1019 if (aoff < offset - (1 << ASAN_SHADOW_SHIFT) + 1)
1020 shadow_bytes[i] = 0;
1021 else
1022 shadow_bytes[i] = offset - aoff;
1024 else
1025 shadow_bytes[i] = ASAN_STACK_MAGIC_PARTIAL;
1026 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1027 offset = aoff;
1029 while (offset <= offsets[l - 2] - ASAN_RED_ZONE_SIZE)
1031 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1032 (offset - prev_offset)
1033 >> ASAN_SHADOW_SHIFT);
1034 prev_offset = offset;
1035 memset (shadow_bytes, cur_shadow_byte, 4);
1036 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1037 offset += ASAN_RED_ZONE_SIZE;
1039 cur_shadow_byte = ASAN_STACK_MAGIC_MIDDLE;
1041 do_pending_stack_adjust ();
1043 /* Construct epilogue sequence. */
1044 start_sequence ();
1046 shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1047 set_mem_alias_set (shadow_mem, asan_shadow_set);
1048 prev_offset = base_offset;
1049 last_offset = base_offset;
1050 last_size = 0;
1051 for (l = length; l; l -= 2)
1053 offset = base_offset + ((offsets[l - 1] - base_offset)
1054 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1055 if (last_offset + last_size != offset)
1057 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1058 (last_offset - prev_offset)
1059 >> ASAN_SHADOW_SHIFT);
1060 prev_offset = last_offset;
1061 asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
1062 last_offset = offset;
1063 last_size = 0;
1065 last_size += base_offset + ((offsets[l - 2] - base_offset)
1066 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1))
1067 - offset;
1069 if (last_size)
1071 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1072 (last_offset - prev_offset)
1073 >> ASAN_SHADOW_SHIFT);
1074 asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
1077 do_pending_stack_adjust ();
1079 ret = get_insns ();
1080 end_sequence ();
1081 return ret;
1084 /* Return true if DECL, a global var, might be overridden and needs
1085 therefore a local alias. */
1087 static bool
1088 asan_needs_local_alias (tree decl)
1090 return DECL_WEAK (decl) || !targetm.binds_local_p (decl);
1093 /* Return true if DECL is a VAR_DECL that should be protected
1094 by Address Sanitizer, by appending a red zone with protected
1095 shadow memory after it and aligning it to at least
1096 ASAN_RED_ZONE_SIZE bytes. */
1098 bool
1099 asan_protect_global (tree decl)
1101 rtx rtl, symbol;
1103 if (TREE_CODE (decl) == STRING_CST)
1105 /* Instrument all STRING_CSTs except those created
1106 by asan_pp_string here. */
1107 if (shadow_ptr_types[0] != NULL_TREE
1108 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1109 && TREE_TYPE (TREE_TYPE (decl)) == TREE_TYPE (shadow_ptr_types[0]))
1110 return false;
1111 return true;
1113 if (TREE_CODE (decl) != VAR_DECL
1114 /* TLS vars aren't statically protectable. */
1115 || DECL_THREAD_LOCAL_P (decl)
1116 /* Externs will be protected elsewhere. */
1117 || DECL_EXTERNAL (decl)
1118 || !DECL_RTL_SET_P (decl)
1119 /* Comdat vars pose an ABI problem, we can't know if
1120 the var that is selected by the linker will have
1121 padding or not. */
1122 || DECL_ONE_ONLY (decl)
1123 /* Similarly for common vars. People can use -fno-common. */
1124 || (DECL_COMMON (decl) && TREE_PUBLIC (decl))
1125 /* Don't protect if using user section, often vars placed
1126 into user section from multiple TUs are then assumed
1127 to be an array of such vars, putting padding in there
1128 breaks this assumption. */
1129 || (DECL_SECTION_NAME (decl) != NULL_TREE
1130 && !DECL_HAS_IMPLICIT_SECTION_NAME_P (decl))
1131 || DECL_SIZE (decl) == 0
1132 || ASAN_RED_ZONE_SIZE * BITS_PER_UNIT > MAX_OFILE_ALIGNMENT
1133 || !valid_constant_size_p (DECL_SIZE_UNIT (decl))
1134 || DECL_ALIGN_UNIT (decl) > 2 * ASAN_RED_ZONE_SIZE)
1135 return false;
1137 rtl = DECL_RTL (decl);
1138 if (!MEM_P (rtl) || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF)
1139 return false;
1140 symbol = XEXP (rtl, 0);
1142 if (CONSTANT_POOL_ADDRESS_P (symbol)
1143 || TREE_CONSTANT_POOL_ADDRESS_P (symbol))
1144 return false;
1146 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
1147 return false;
1149 #ifndef ASM_OUTPUT_DEF
1150 if (asan_needs_local_alias (decl))
1151 return false;
1152 #endif
1154 return true;
1157 /* Construct a function tree for __asan_report_{load,store}{1,2,4,8,16}.
1158 IS_STORE is either 1 (for a store) or 0 (for a load).
1159 SIZE_IN_BYTES is one of 1, 2, 4, 8, 16. */
1161 static tree
1162 report_error_func (bool is_store, int size_in_bytes)
1164 static enum built_in_function report[2][5]
1165 = { { BUILT_IN_ASAN_REPORT_LOAD1, BUILT_IN_ASAN_REPORT_LOAD2,
1166 BUILT_IN_ASAN_REPORT_LOAD4, BUILT_IN_ASAN_REPORT_LOAD8,
1167 BUILT_IN_ASAN_REPORT_LOAD16 },
1168 { BUILT_IN_ASAN_REPORT_STORE1, BUILT_IN_ASAN_REPORT_STORE2,
1169 BUILT_IN_ASAN_REPORT_STORE4, BUILT_IN_ASAN_REPORT_STORE8,
1170 BUILT_IN_ASAN_REPORT_STORE16 } };
1171 return builtin_decl_implicit (report[is_store][exact_log2 (size_in_bytes)]);
1174 #define PROB_VERY_UNLIKELY (REG_BR_PROB_BASE / 2000 - 1)
1175 #define PROB_ALWAYS (REG_BR_PROB_BASE)
1177 /* Split the current basic block and create a condition statement
1178 insertion point right before or after the statement pointed to by
1179 ITER. Return an iterator to the point at which the caller might
1180 safely insert the condition statement.
1182 THEN_BLOCK must be set to the address of an uninitialized instance
1183 of basic_block. The function will then set *THEN_BLOCK to the
1184 'then block' of the condition statement to be inserted by the
1185 caller.
1187 If CREATE_THEN_FALLTHRU_EDGE is false, no edge will be created from
1188 *THEN_BLOCK to *FALLTHROUGH_BLOCK.
1190 Similarly, the function will set *FALLTRHOUGH_BLOCK to the 'else
1191 block' of the condition statement to be inserted by the caller.
1193 Note that *FALLTHROUGH_BLOCK is a new block that contains the
1194 statements starting from *ITER, and *THEN_BLOCK is a new empty
1195 block.
1197 *ITER is adjusted to point to always point to the first statement
1198 of the basic block * FALLTHROUGH_BLOCK. That statement is the
1199 same as what ITER was pointing to prior to calling this function,
1200 if BEFORE_P is true; otherwise, it is its following statement. */
1202 static gimple_stmt_iterator
1203 create_cond_insert_point (gimple_stmt_iterator *iter,
1204 bool before_p,
1205 bool then_more_likely_p,
1206 bool create_then_fallthru_edge,
1207 basic_block *then_block,
1208 basic_block *fallthrough_block)
1210 gimple_stmt_iterator gsi = *iter;
1212 if (!gsi_end_p (gsi) && before_p)
1213 gsi_prev (&gsi);
1215 basic_block cur_bb = gsi_bb (*iter);
1217 edge e = split_block (cur_bb, gsi_stmt (gsi));
1219 /* Get a hold on the 'condition block', the 'then block' and the
1220 'else block'. */
1221 basic_block cond_bb = e->src;
1222 basic_block fallthru_bb = e->dest;
1223 basic_block then_bb = create_empty_bb (cond_bb);
1224 if (current_loops)
1226 add_bb_to_loop (then_bb, cond_bb->loop_father);
1227 loops_state_set (LOOPS_NEED_FIXUP);
1230 /* Set up the newly created 'then block'. */
1231 e = make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
1232 int fallthrough_probability
1233 = then_more_likely_p
1234 ? PROB_VERY_UNLIKELY
1235 : PROB_ALWAYS - PROB_VERY_UNLIKELY;
1236 e->probability = PROB_ALWAYS - fallthrough_probability;
1237 if (create_then_fallthru_edge)
1238 make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU);
1240 /* Set up the fallthrough basic block. */
1241 e = find_edge (cond_bb, fallthru_bb);
1242 e->flags = EDGE_FALSE_VALUE;
1243 e->count = cond_bb->count;
1244 e->probability = fallthrough_probability;
1246 /* Update dominance info for the newly created then_bb; note that
1247 fallthru_bb's dominance info has already been updated by
1248 split_bock. */
1249 if (dom_info_available_p (CDI_DOMINATORS))
1250 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
1252 *then_block = then_bb;
1253 *fallthrough_block = fallthru_bb;
1254 *iter = gsi_start_bb (fallthru_bb);
1256 return gsi_last_bb (cond_bb);
1259 /* Insert an if condition followed by a 'then block' right before the
1260 statement pointed to by ITER. The fallthrough block -- which is the
1261 else block of the condition as well as the destination of the
1262 outcoming edge of the 'then block' -- starts with the statement
1263 pointed to by ITER.
1265 COND is the condition of the if.
1267 If THEN_MORE_LIKELY_P is true, the probability of the edge to the
1268 'then block' is higher than the probability of the edge to the
1269 fallthrough block.
1271 Upon completion of the function, *THEN_BB is set to the newly
1272 inserted 'then block' and similarly, *FALLTHROUGH_BB is set to the
1273 fallthrough block.
1275 *ITER is adjusted to still point to the same statement it was
1276 pointing to initially. */
1278 static void
1279 insert_if_then_before_iter (gimple cond,
1280 gimple_stmt_iterator *iter,
1281 bool then_more_likely_p,
1282 basic_block *then_bb,
1283 basic_block *fallthrough_bb)
1285 gimple_stmt_iterator cond_insert_point =
1286 create_cond_insert_point (iter,
1287 /*before_p=*/true,
1288 then_more_likely_p,
1289 /*create_then_fallthru_edge=*/true,
1290 then_bb,
1291 fallthrough_bb);
1292 gsi_insert_after (&cond_insert_point, cond, GSI_NEW_STMT);
1295 /* Instrument the memory access instruction BASE. Insert new
1296 statements before or after ITER.
1298 Note that the memory access represented by BASE can be either an
1299 SSA_NAME, or a non-SSA expression. LOCATION is the source code
1300 location. IS_STORE is TRUE for a store, FALSE for a load.
1301 BEFORE_P is TRUE for inserting the instrumentation code before
1302 ITER, FALSE for inserting it after ITER. SIZE_IN_BYTES is one of
1303 1, 2, 4, 8, 16.
1305 If BEFORE_P is TRUE, *ITER is arranged to still point to the
1306 statement it was pointing to prior to calling this function,
1307 otherwise, it points to the statement logically following it. */
1309 static void
1310 build_check_stmt (location_t location, tree base, gimple_stmt_iterator *iter,
1311 bool before_p, bool is_store, int size_in_bytes)
1313 gimple_stmt_iterator gsi;
1314 basic_block then_bb, else_bb;
1315 tree t, base_addr, shadow;
1316 gimple g;
1317 tree shadow_ptr_type = shadow_ptr_types[size_in_bytes == 16 ? 1 : 0];
1318 tree shadow_type = TREE_TYPE (shadow_ptr_type);
1319 tree uintptr_type
1320 = build_nonstandard_integer_type (TYPE_PRECISION (TREE_TYPE (base)), 1);
1321 tree base_ssa = base;
1323 /* Get an iterator on the point where we can add the condition
1324 statement for the instrumentation. */
1325 gsi = create_cond_insert_point (iter, before_p,
1326 /*then_more_likely_p=*/false,
1327 /*create_then_fallthru_edge=*/false,
1328 &then_bb,
1329 &else_bb);
1331 base = unshare_expr (base);
1333 /* BASE can already be an SSA_NAME; in that case, do not create a
1334 new SSA_NAME for it. */
1335 if (TREE_CODE (base) != SSA_NAME)
1337 g = gimple_build_assign_with_ops (TREE_CODE (base),
1338 make_ssa_name (TREE_TYPE (base), NULL),
1339 base, NULL_TREE);
1340 gimple_set_location (g, location);
1341 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1342 base_ssa = gimple_assign_lhs (g);
1345 g = gimple_build_assign_with_ops (NOP_EXPR,
1346 make_ssa_name (uintptr_type, NULL),
1347 base_ssa, NULL_TREE);
1348 gimple_set_location (g, location);
1349 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1350 base_addr = gimple_assign_lhs (g);
1352 /* Build
1353 (base_addr >> ASAN_SHADOW_SHIFT) + targetm.asan_shadow_offset (). */
1355 t = build_int_cst (uintptr_type, ASAN_SHADOW_SHIFT);
1356 g = gimple_build_assign_with_ops (RSHIFT_EXPR,
1357 make_ssa_name (uintptr_type, NULL),
1358 base_addr, t);
1359 gimple_set_location (g, location);
1360 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1362 t = build_int_cst (uintptr_type, targetm.asan_shadow_offset ());
1363 g = gimple_build_assign_with_ops (PLUS_EXPR,
1364 make_ssa_name (uintptr_type, NULL),
1365 gimple_assign_lhs (g), t);
1366 gimple_set_location (g, location);
1367 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1369 g = gimple_build_assign_with_ops (NOP_EXPR,
1370 make_ssa_name (shadow_ptr_type, NULL),
1371 gimple_assign_lhs (g), NULL_TREE);
1372 gimple_set_location (g, location);
1373 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1375 t = build2 (MEM_REF, shadow_type, gimple_assign_lhs (g),
1376 build_int_cst (shadow_ptr_type, 0));
1377 g = gimple_build_assign_with_ops (MEM_REF,
1378 make_ssa_name (shadow_type, NULL),
1379 t, NULL_TREE);
1380 gimple_set_location (g, location);
1381 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1382 shadow = gimple_assign_lhs (g);
1384 if (size_in_bytes < 8)
1386 /* Slow path for 1, 2 and 4 byte accesses.
1387 Test (shadow != 0)
1388 & ((base_addr & 7) + (size_in_bytes - 1)) >= shadow). */
1389 gimple_seq seq = NULL;
1390 gimple shadow_test = build_assign (NE_EXPR, shadow, 0);
1391 gimple_seq_add_stmt (&seq, shadow_test);
1392 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, base_addr, 7));
1393 gimple_seq_add_stmt (&seq, build_type_cast (shadow_type,
1394 gimple_seq_last (seq)));
1395 if (size_in_bytes > 1)
1396 gimple_seq_add_stmt (&seq,
1397 build_assign (PLUS_EXPR, gimple_seq_last (seq),
1398 size_in_bytes - 1));
1399 gimple_seq_add_stmt (&seq, build_assign (GE_EXPR, gimple_seq_last (seq),
1400 shadow));
1401 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
1402 gimple_seq_last (seq)));
1403 t = gimple_assign_lhs (gimple_seq_last (seq));
1404 gimple_seq_set_location (seq, location);
1405 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
1407 else
1408 t = shadow;
1410 g = gimple_build_cond (NE_EXPR, t, build_int_cst (TREE_TYPE (t), 0),
1411 NULL_TREE, NULL_TREE);
1412 gimple_set_location (g, location);
1413 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1415 /* Generate call to the run-time library (e.g. __asan_report_load8). */
1416 gsi = gsi_start_bb (then_bb);
1417 g = gimple_build_call (report_error_func (is_store, size_in_bytes),
1418 1, base_addr);
1419 gimple_set_location (g, location);
1420 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1422 *iter = gsi_start_bb (else_bb);
1425 /* If T represents a memory access, add instrumentation code before ITER.
1426 LOCATION is source code location.
1427 IS_STORE is either TRUE (for a store) or FALSE (for a load). */
1429 static void
1430 instrument_derefs (gimple_stmt_iterator *iter, tree t,
1431 location_t location, bool is_store)
1433 tree type, base;
1434 HOST_WIDE_INT size_in_bytes;
1436 type = TREE_TYPE (t);
1437 switch (TREE_CODE (t))
1439 case ARRAY_REF:
1440 case COMPONENT_REF:
1441 case INDIRECT_REF:
1442 case MEM_REF:
1443 break;
1444 default:
1445 return;
1448 size_in_bytes = int_size_in_bytes (type);
1449 if ((size_in_bytes & (size_in_bytes - 1)) != 0
1450 || (unsigned HOST_WIDE_INT) size_in_bytes - 1 >= 16)
1451 return;
1453 HOST_WIDE_INT bitsize, bitpos;
1454 tree offset;
1455 enum machine_mode mode;
1456 int volatilep = 0, unsignedp = 0;
1457 get_inner_reference (t, &bitsize, &bitpos, &offset,
1458 &mode, &unsignedp, &volatilep, false);
1459 if (bitpos % (size_in_bytes * BITS_PER_UNIT)
1460 || bitsize != size_in_bytes * BITS_PER_UNIT)
1462 if (TREE_CODE (t) == COMPONENT_REF
1463 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)) != NULL_TREE)
1465 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1));
1466 instrument_derefs (iter, build3 (COMPONENT_REF, TREE_TYPE (repr),
1467 TREE_OPERAND (t, 0), repr,
1468 NULL_TREE), location, is_store);
1470 return;
1473 base = build_fold_addr_expr (t);
1474 if (!has_mem_ref_been_instrumented (base, size_in_bytes))
1476 build_check_stmt (location, base, iter, /*before_p=*/true,
1477 is_store, size_in_bytes);
1478 update_mem_ref_hash_table (base, size_in_bytes);
1479 update_mem_ref_hash_table (t, size_in_bytes);
1484 /* Instrument an access to a contiguous memory region that starts at
1485 the address pointed to by BASE, over a length of LEN (expressed in
1486 the sizeof (*BASE) bytes). ITER points to the instruction before
1487 which the instrumentation instructions must be inserted. LOCATION
1488 is the source location that the instrumentation instructions must
1489 have. If IS_STORE is true, then the memory access is a store;
1490 otherwise, it's a load. */
1492 static void
1493 instrument_mem_region_access (tree base, tree len,
1494 gimple_stmt_iterator *iter,
1495 location_t location, bool is_store)
1497 if (!POINTER_TYPE_P (TREE_TYPE (base))
1498 || !INTEGRAL_TYPE_P (TREE_TYPE (len))
1499 || integer_zerop (len))
1500 return;
1502 gimple_stmt_iterator gsi = *iter;
1504 basic_block fallthrough_bb = NULL, then_bb = NULL;
1506 /* If the beginning of the memory region has already been
1507 instrumented, do not instrument it. */
1508 bool start_instrumented = has_mem_ref_been_instrumented (base, 1);
1510 /* If the end of the memory region has already been instrumented, do
1511 not instrument it. */
1512 tree end = asan_mem_ref_get_end (base, len);
1513 bool end_instrumented = has_mem_ref_been_instrumented (end, 1);
1515 if (start_instrumented && end_instrumented)
1516 return;
1518 if (!is_gimple_constant (len))
1520 /* So, the length of the memory area to asan-protect is
1521 non-constant. Let's guard the generated instrumentation code
1522 like:
1524 if (len != 0)
1526 //asan instrumentation code goes here.
1528 // falltrough instructions, starting with *ITER. */
1530 gimple g = gimple_build_cond (NE_EXPR,
1531 len,
1532 build_int_cst (TREE_TYPE (len), 0),
1533 NULL_TREE, NULL_TREE);
1534 gimple_set_location (g, location);
1535 insert_if_then_before_iter (g, iter, /*then_more_likely_p=*/true,
1536 &then_bb, &fallthrough_bb);
1537 /* Note that fallthrough_bb starts with the statement that was
1538 pointed to by ITER. */
1540 /* The 'then block' of the 'if (len != 0) condition is where
1541 we'll generate the asan instrumentation code now. */
1542 gsi = gsi_last_bb (then_bb);
1545 if (!start_instrumented)
1547 /* Instrument the beginning of the memory region to be accessed,
1548 and arrange for the rest of the intrumentation code to be
1549 inserted in the then block *after* the current gsi. */
1550 build_check_stmt (location, base, &gsi, /*before_p=*/true, is_store, 1);
1552 if (then_bb)
1553 /* We are in the case where the length of the region is not
1554 constant; so instrumentation code is being generated in the
1555 'then block' of the 'if (len != 0) condition. Let's arrange
1556 for the subsequent instrumentation statements to go in the
1557 'then block'. */
1558 gsi = gsi_last_bb (then_bb);
1559 else
1561 *iter = gsi;
1562 /* Don't remember this access as instrumented, if length
1563 is unknown. It might be zero and not being actually
1564 instrumented, so we can't rely on it being instrumented. */
1565 update_mem_ref_hash_table (base, 1);
1569 if (end_instrumented)
1570 return;
1572 /* We want to instrument the access at the end of the memory region,
1573 which is at (base + len - 1). */
1575 /* offset = len - 1; */
1576 len = unshare_expr (len);
1577 tree offset;
1578 gimple_seq seq = NULL;
1579 if (TREE_CODE (len) == INTEGER_CST)
1580 offset = fold_build2 (MINUS_EXPR, size_type_node,
1581 fold_convert (size_type_node, len),
1582 build_int_cst (size_type_node, 1));
1583 else
1585 gimple g;
1586 tree t;
1588 if (TREE_CODE (len) != SSA_NAME)
1590 t = make_ssa_name (TREE_TYPE (len), NULL);
1591 g = gimple_build_assign_with_ops (TREE_CODE (len), t, len, NULL);
1592 gimple_set_location (g, location);
1593 gimple_seq_add_stmt_without_update (&seq, g);
1594 len = t;
1596 if (!useless_type_conversion_p (size_type_node, TREE_TYPE (len)))
1598 t = make_ssa_name (size_type_node, NULL);
1599 g = gimple_build_assign_with_ops (NOP_EXPR, t, len, NULL);
1600 gimple_set_location (g, location);
1601 gimple_seq_add_stmt_without_update (&seq, g);
1602 len = t;
1605 t = make_ssa_name (size_type_node, NULL);
1606 g = gimple_build_assign_with_ops (MINUS_EXPR, t, len,
1607 build_int_cst (size_type_node, 1));
1608 gimple_set_location (g, location);
1609 gimple_seq_add_stmt_without_update (&seq, g);
1610 offset = gimple_assign_lhs (g);
1613 /* _1 = base; */
1614 base = unshare_expr (base);
1615 gimple region_end =
1616 gimple_build_assign_with_ops (TREE_CODE (base),
1617 make_ssa_name (TREE_TYPE (base), NULL),
1618 base, NULL);
1619 gimple_set_location (region_end, location);
1620 gimple_seq_add_stmt_without_update (&seq, region_end);
1622 /* _2 = _1 + offset; */
1623 region_end =
1624 gimple_build_assign_with_ops (POINTER_PLUS_EXPR,
1625 make_ssa_name (TREE_TYPE (base), NULL),
1626 gimple_assign_lhs (region_end),
1627 offset);
1628 gimple_set_location (region_end, location);
1629 gimple_seq_add_stmt_without_update (&seq, region_end);
1630 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
1632 /* instrument access at _2; */
1633 gsi = gsi_for_stmt (region_end);
1634 build_check_stmt (location, gimple_assign_lhs (region_end),
1635 &gsi, /*before_p=*/false, is_store, 1);
1637 if (then_bb == NULL)
1638 update_mem_ref_hash_table (end, 1);
1640 *iter = gsi_for_stmt (gsi_stmt (*iter));
1643 /* Instrument the call (to the builtin strlen function) pointed to by
1644 ITER.
1646 This function instruments the access to the first byte of the
1647 argument, right before the call. After the call it instruments the
1648 access to the last byte of the argument; it uses the result of the
1649 call to deduce the offset of that last byte.
1651 Upon completion, iff the call has actullay been instrumented, this
1652 function returns TRUE and *ITER points to the statement logically
1653 following the built-in strlen function call *ITER was initially
1654 pointing to. Otherwise, the function returns FALSE and *ITER
1655 remains unchanged. */
1657 static bool
1658 instrument_strlen_call (gimple_stmt_iterator *iter)
1660 gimple call = gsi_stmt (*iter);
1661 gcc_assert (is_gimple_call (call));
1663 tree callee = gimple_call_fndecl (call);
1664 gcc_assert (is_builtin_fn (callee)
1665 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
1666 && DECL_FUNCTION_CODE (callee) == BUILT_IN_STRLEN);
1668 tree len = gimple_call_lhs (call);
1669 if (len == NULL)
1670 /* Some passes might clear the return value of the strlen call;
1671 bail out in that case. Return FALSE as we are not advancing
1672 *ITER. */
1673 return false;
1674 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (len)));
1676 location_t loc = gimple_location (call);
1677 tree str_arg = gimple_call_arg (call, 0);
1679 /* Instrument the access to the first byte of str_arg. i.e:
1681 _1 = str_arg; instrument (_1); */
1682 gimple str_arg_ssa =
1683 gimple_build_assign_with_ops (NOP_EXPR,
1684 make_ssa_name (build_pointer_type
1685 (char_type_node), NULL),
1686 str_arg, NULL);
1687 gimple_set_location (str_arg_ssa, loc);
1688 gimple_stmt_iterator gsi = *iter;
1689 gsi_insert_before (&gsi, str_arg_ssa, GSI_NEW_STMT);
1690 build_check_stmt (loc, gimple_assign_lhs (str_arg_ssa), &gsi,
1691 /*before_p=*/false, /*is_store=*/false, 1);
1693 /* If we initially had an instruction like:
1695 int n = strlen (str)
1697 we now want to instrument the access to str[n], after the
1698 instruction above.*/
1700 /* So let's build the access to str[n] that is, access through the
1701 pointer_plus expr: (_1 + len). */
1702 gimple stmt =
1703 gimple_build_assign_with_ops (POINTER_PLUS_EXPR,
1704 make_ssa_name (TREE_TYPE (str_arg),
1705 NULL),
1706 gimple_assign_lhs (str_arg_ssa),
1707 len);
1708 gimple_set_location (stmt, loc);
1709 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
1711 build_check_stmt (loc, gimple_assign_lhs (stmt), &gsi,
1712 /*before_p=*/false, /*is_store=*/false, 1);
1714 /* Ensure that iter points to the statement logically following the
1715 one it was initially pointing to. */
1716 *iter = gsi;
1717 /* As *ITER has been advanced to point to the next statement, let's
1718 return true to inform transform_statements that it shouldn't
1719 advance *ITER anymore; otherwises it will skip that next
1720 statement, which wouldn't be instrumented. */
1721 return true;
1724 /* Instrument the call to a built-in memory access function that is
1725 pointed to by the iterator ITER.
1727 Upon completion, return TRUE iff *ITER has been advanced to the
1728 statement following the one it was originally pointing to. */
1730 static bool
1731 instrument_builtin_call (gimple_stmt_iterator *iter)
1733 bool iter_advanced_p = false;
1734 gimple call = gsi_stmt (*iter);
1736 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
1738 tree callee = gimple_call_fndecl (call);
1739 location_t loc = gimple_location (call);
1741 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STRLEN)
1742 iter_advanced_p = instrument_strlen_call (iter);
1743 else
1745 asan_mem_ref src0, src1, dest;
1746 asan_mem_ref_init (&src0, NULL, 1);
1747 asan_mem_ref_init (&src1, NULL, 1);
1748 asan_mem_ref_init (&dest, NULL, 1);
1750 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
1751 bool src0_is_store = false, src1_is_store = false,
1752 dest_is_store = false, dest_is_deref = false;
1754 if (get_mem_refs_of_builtin_call (call,
1755 &src0, &src0_len, &src0_is_store,
1756 &src1, &src1_len, &src1_is_store,
1757 &dest, &dest_len, &dest_is_store,
1758 &dest_is_deref))
1760 if (dest_is_deref)
1762 instrument_derefs (iter, dest.start, loc, dest_is_store);
1763 gsi_next (iter);
1764 iter_advanced_p = true;
1766 else if (src0_len || src1_len || dest_len)
1768 if (src0.start != NULL_TREE)
1769 instrument_mem_region_access (src0.start, src0_len,
1770 iter, loc, /*is_store=*/false);
1771 if (src1.start != NULL_TREE)
1772 instrument_mem_region_access (src1.start, src1_len,
1773 iter, loc, /*is_store=*/false);
1774 if (dest.start != NULL_TREE)
1775 instrument_mem_region_access (dest.start, dest_len,
1776 iter, loc, /*is_store=*/true);
1777 *iter = gsi_for_stmt (call);
1778 gsi_next (iter);
1779 iter_advanced_p = true;
1783 return iter_advanced_p;
1786 /* Instrument the assignment statement ITER if it is subject to
1787 instrumentation. Return TRUE iff instrumentation actually
1788 happened. In that case, the iterator ITER is advanced to the next
1789 logical expression following the one initially pointed to by ITER,
1790 and the relevant memory reference that which access has been
1791 instrumented is added to the memory references hash table. */
1793 static bool
1794 maybe_instrument_assignment (gimple_stmt_iterator *iter)
1796 gimple s = gsi_stmt (*iter);
1798 gcc_assert (gimple_assign_single_p (s));
1800 tree ref_expr = NULL_TREE;
1801 bool is_store, is_instrumented = false;
1803 if (gimple_store_p (s))
1805 ref_expr = gimple_assign_lhs (s);
1806 is_store = true;
1807 instrument_derefs (iter, ref_expr,
1808 gimple_location (s),
1809 is_store);
1810 is_instrumented = true;
1813 if (gimple_assign_load_p (s))
1815 ref_expr = gimple_assign_rhs1 (s);
1816 is_store = false;
1817 instrument_derefs (iter, ref_expr,
1818 gimple_location (s),
1819 is_store);
1820 is_instrumented = true;
1823 if (is_instrumented)
1824 gsi_next (iter);
1826 return is_instrumented;
1829 /* Instrument the function call pointed to by the iterator ITER, if it
1830 is subject to instrumentation. At the moment, the only function
1831 calls that are instrumented are some built-in functions that access
1832 memory. Look at instrument_builtin_call to learn more.
1834 Upon completion return TRUE iff *ITER was advanced to the statement
1835 following the one it was originally pointing to. */
1837 static bool
1838 maybe_instrument_call (gimple_stmt_iterator *iter)
1840 gimple stmt = gsi_stmt (*iter);
1841 bool is_builtin = gimple_call_builtin_p (stmt, BUILT_IN_NORMAL);
1843 if (is_builtin && instrument_builtin_call (iter))
1844 return true;
1846 if (gimple_call_noreturn_p (stmt))
1848 if (is_builtin)
1850 tree callee = gimple_call_fndecl (stmt);
1851 switch (DECL_FUNCTION_CODE (callee))
1853 case BUILT_IN_UNREACHABLE:
1854 case BUILT_IN_TRAP:
1855 /* Don't instrument these. */
1856 return false;
1859 tree decl = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN);
1860 gimple g = gimple_build_call (decl, 0);
1861 gimple_set_location (g, gimple_location (stmt));
1862 gsi_insert_before (iter, g, GSI_SAME_STMT);
1864 return false;
1867 /* Walk each instruction of all basic block and instrument those that
1868 represent memory references: loads, stores, or function calls.
1869 In a given basic block, this function avoids instrumenting memory
1870 references that have already been instrumented. */
1872 static void
1873 transform_statements (void)
1875 basic_block bb, last_bb = NULL;
1876 gimple_stmt_iterator i;
1877 int saved_last_basic_block = last_basic_block;
1879 FOR_EACH_BB (bb)
1881 basic_block prev_bb = bb;
1883 if (bb->index >= saved_last_basic_block) continue;
1885 /* Flush the mem ref hash table, if current bb doesn't have
1886 exactly one predecessor, or if that predecessor (skipping
1887 over asan created basic blocks) isn't the last processed
1888 basic block. Thus we effectively flush on extended basic
1889 block boundaries. */
1890 while (single_pred_p (prev_bb))
1892 prev_bb = single_pred (prev_bb);
1893 if (prev_bb->index < saved_last_basic_block)
1894 break;
1896 if (prev_bb != last_bb)
1897 empty_mem_ref_hash_table ();
1898 last_bb = bb;
1900 for (i = gsi_start_bb (bb); !gsi_end_p (i);)
1902 gimple s = gsi_stmt (i);
1904 if (has_stmt_been_instrumented_p (s))
1905 gsi_next (&i);
1906 else if (gimple_assign_single_p (s)
1907 && maybe_instrument_assignment (&i))
1908 /* Nothing to do as maybe_instrument_assignment advanced
1909 the iterator I. */;
1910 else if (is_gimple_call (s) && maybe_instrument_call (&i))
1911 /* Nothing to do as maybe_instrument_call
1912 advanced the iterator I. */;
1913 else
1915 /* No instrumentation happened.
1917 If the current instruction is a function call that
1918 might free something, let's forget about the memory
1919 references that got instrumented. Otherwise we might
1920 miss some instrumentation opportunities. */
1921 if (is_gimple_call (s) && !nonfreeing_call_p (s))
1922 empty_mem_ref_hash_table ();
1924 gsi_next (&i);
1928 free_mem_ref_resources ();
1931 /* Build
1932 struct __asan_global
1934 const void *__beg;
1935 uptr __size;
1936 uptr __size_with_redzone;
1937 const void *__name;
1938 uptr __has_dynamic_init;
1939 } type. */
1941 static tree
1942 asan_global_struct (void)
1944 static const char *field_names[5]
1945 = { "__beg", "__size", "__size_with_redzone",
1946 "__name", "__has_dynamic_init" };
1947 tree fields[5], ret;
1948 int i;
1950 ret = make_node (RECORD_TYPE);
1951 for (i = 0; i < 5; i++)
1953 fields[i]
1954 = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
1955 get_identifier (field_names[i]),
1956 (i == 0 || i == 3) ? const_ptr_type_node
1957 : build_nonstandard_integer_type (POINTER_SIZE, 1));
1958 DECL_CONTEXT (fields[i]) = ret;
1959 if (i)
1960 DECL_CHAIN (fields[i - 1]) = fields[i];
1962 TYPE_FIELDS (ret) = fields[0];
1963 TYPE_NAME (ret) = get_identifier ("__asan_global");
1964 layout_type (ret);
1965 return ret;
1968 /* Append description of a single global DECL into vector V.
1969 TYPE is __asan_global struct type as returned by asan_global_struct. */
1971 static void
1972 asan_add_global (tree decl, tree type, vec<constructor_elt, va_gc> *v)
1974 tree init, uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
1975 unsigned HOST_WIDE_INT size;
1976 tree str_cst, refdecl = decl;
1977 vec<constructor_elt, va_gc> *vinner = NULL;
1979 if (!asan_pp_initialized)
1980 asan_pp_initialize ();
1982 pp_clear_output_area (&asan_pp);
1983 if (DECL_NAME (decl))
1984 pp_base_tree_identifier (&asan_pp, DECL_NAME (decl));
1985 else
1986 pp_string (&asan_pp, "<unknown>");
1987 pp_space (&asan_pp);
1988 pp_left_paren (&asan_pp);
1989 pp_string (&asan_pp, main_input_filename);
1990 pp_right_paren (&asan_pp);
1991 str_cst = asan_pp_string ();
1993 if (asan_needs_local_alias (decl))
1995 char buf[20];
1996 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", vec_safe_length (v) + 1);
1997 refdecl = build_decl (DECL_SOURCE_LOCATION (decl),
1998 VAR_DECL, get_identifier (buf), TREE_TYPE (decl));
1999 TREE_ADDRESSABLE (refdecl) = TREE_ADDRESSABLE (decl);
2000 TREE_READONLY (refdecl) = TREE_READONLY (decl);
2001 TREE_THIS_VOLATILE (refdecl) = TREE_THIS_VOLATILE (decl);
2002 DECL_GIMPLE_REG_P (refdecl) = DECL_GIMPLE_REG_P (decl);
2003 DECL_ARTIFICIAL (refdecl) = DECL_ARTIFICIAL (decl);
2004 DECL_IGNORED_P (refdecl) = DECL_IGNORED_P (decl);
2005 TREE_STATIC (refdecl) = 1;
2006 TREE_PUBLIC (refdecl) = 0;
2007 TREE_USED (refdecl) = 1;
2008 assemble_alias (refdecl, DECL_ASSEMBLER_NAME (decl));
2011 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2012 fold_convert (const_ptr_type_node,
2013 build_fold_addr_expr (refdecl)));
2014 size = tree_low_cst (DECL_SIZE_UNIT (decl), 1);
2015 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2016 size += asan_red_zone_size (size);
2017 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2018 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2019 fold_convert (const_ptr_type_node, str_cst));
2020 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, 0));
2021 init = build_constructor (type, vinner);
2022 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, init);
2025 /* Initialize sanitizer.def builtins if the FE hasn't initialized them. */
2026 void
2027 initialize_sanitizer_builtins (void)
2029 tree decl;
2031 if (builtin_decl_implicit_p (BUILT_IN_ASAN_INIT))
2032 return;
2034 tree BT_FN_VOID = build_function_type_list (void_type_node, NULL_TREE);
2035 tree BT_FN_VOID_PTR
2036 = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
2037 tree BT_FN_VOID_PTR_PTRMODE
2038 = build_function_type_list (void_type_node, ptr_type_node,
2039 build_nonstandard_integer_type (POINTER_SIZE,
2040 1), NULL_TREE);
2041 tree BT_FN_VOID_INT
2042 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
2043 tree BT_FN_BOOL_VPTR_PTR_IX_INT_INT[5];
2044 tree BT_FN_IX_CONST_VPTR_INT[5];
2045 tree BT_FN_IX_VPTR_IX_INT[5];
2046 tree BT_FN_VOID_VPTR_IX_INT[5];
2047 tree vptr
2048 = build_pointer_type (build_qualified_type (void_type_node,
2049 TYPE_QUAL_VOLATILE));
2050 tree cvptr
2051 = build_pointer_type (build_qualified_type (void_type_node,
2052 TYPE_QUAL_VOLATILE
2053 |TYPE_QUAL_CONST));
2054 tree boolt
2055 = lang_hooks.types.type_for_size (BOOL_TYPE_SIZE, 1);
2056 int i;
2057 for (i = 0; i < 5; i++)
2059 tree ix = build_nonstandard_integer_type (BITS_PER_UNIT * (1 << i), 1);
2060 BT_FN_BOOL_VPTR_PTR_IX_INT_INT[i]
2061 = build_function_type_list (boolt, vptr, ptr_type_node, ix,
2062 integer_type_node, integer_type_node,
2063 NULL_TREE);
2064 BT_FN_IX_CONST_VPTR_INT[i]
2065 = build_function_type_list (ix, cvptr, integer_type_node, NULL_TREE);
2066 BT_FN_IX_VPTR_IX_INT[i]
2067 = build_function_type_list (ix, vptr, ix, integer_type_node,
2068 NULL_TREE);
2069 BT_FN_VOID_VPTR_IX_INT[i]
2070 = build_function_type_list (void_type_node, vptr, ix,
2071 integer_type_node, NULL_TREE);
2073 #define BT_FN_BOOL_VPTR_PTR_I1_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[0]
2074 #define BT_FN_I1_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[0]
2075 #define BT_FN_I1_VPTR_I1_INT BT_FN_IX_VPTR_IX_INT[0]
2076 #define BT_FN_VOID_VPTR_I1_INT BT_FN_VOID_VPTR_IX_INT[0]
2077 #define BT_FN_BOOL_VPTR_PTR_I2_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[1]
2078 #define BT_FN_I2_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[1]
2079 #define BT_FN_I2_VPTR_I2_INT BT_FN_IX_VPTR_IX_INT[1]
2080 #define BT_FN_VOID_VPTR_I2_INT BT_FN_VOID_VPTR_IX_INT[1]
2081 #define BT_FN_BOOL_VPTR_PTR_I4_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[2]
2082 #define BT_FN_I4_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[2]
2083 #define BT_FN_I4_VPTR_I4_INT BT_FN_IX_VPTR_IX_INT[2]
2084 #define BT_FN_VOID_VPTR_I4_INT BT_FN_VOID_VPTR_IX_INT[2]
2085 #define BT_FN_BOOL_VPTR_PTR_I8_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[3]
2086 #define BT_FN_I8_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[3]
2087 #define BT_FN_I8_VPTR_I8_INT BT_FN_IX_VPTR_IX_INT[3]
2088 #define BT_FN_VOID_VPTR_I8_INT BT_FN_VOID_VPTR_IX_INT[3]
2089 #define BT_FN_BOOL_VPTR_PTR_I16_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[4]
2090 #define BT_FN_I16_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[4]
2091 #define BT_FN_I16_VPTR_I16_INT BT_FN_IX_VPTR_IX_INT[4]
2092 #define BT_FN_VOID_VPTR_I16_INT BT_FN_VOID_VPTR_IX_INT[4]
2093 #undef ATTR_NOTHROW_LEAF_LIST
2094 #define ATTR_NOTHROW_LEAF_LIST ECF_NOTHROW | ECF_LEAF
2095 #undef ATTR_TMPURE_NOTHROW_LEAF_LIST
2096 #define ATTR_TMPURE_NOTHROW_LEAF_LIST ECF_TM_PURE | ATTR_NOTHROW_LEAF_LIST
2097 #undef ATTR_NORETURN_NOTHROW_LEAF_LIST
2098 #define ATTR_NORETURN_NOTHROW_LEAF_LIST ECF_NORETURN | ATTR_NOTHROW_LEAF_LIST
2099 #undef ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST
2100 #define ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST \
2101 ECF_TM_PURE | ATTR_NORETURN_NOTHROW_LEAF_LIST
2102 #undef DEF_SANITIZER_BUILTIN
2103 #define DEF_SANITIZER_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
2104 decl = add_builtin_function ("__builtin_" NAME, TYPE, ENUM, \
2105 BUILT_IN_NORMAL, NAME, NULL_TREE); \
2106 set_call_expr_flags (decl, ATTRS); \
2107 set_builtin_decl (ENUM, decl, true);
2109 #include "sanitizer.def"
2111 #undef DEF_SANITIZER_BUILTIN
2114 /* Called via htab_traverse. Count number of emitted
2115 STRING_CSTs in the constant hash table. */
2117 static int
2118 count_string_csts (void **slot, void *data)
2120 struct constant_descriptor_tree *desc
2121 = (struct constant_descriptor_tree *) *slot;
2122 if (TREE_CODE (desc->value) == STRING_CST
2123 && TREE_ASM_WRITTEN (desc->value)
2124 && asan_protect_global (desc->value))
2125 ++*((unsigned HOST_WIDE_INT *) data);
2126 return 1;
2129 /* Helper structure to pass two parameters to
2130 add_string_csts. */
2132 struct asan_add_string_csts_data
2134 tree type;
2135 vec<constructor_elt, va_gc> *v;
2138 /* Called via htab_traverse. Call asan_add_global
2139 on emitted STRING_CSTs from the constant hash table. */
2141 static int
2142 add_string_csts (void **slot, void *data)
2144 struct constant_descriptor_tree *desc
2145 = (struct constant_descriptor_tree *) *slot;
2146 if (TREE_CODE (desc->value) == STRING_CST
2147 && TREE_ASM_WRITTEN (desc->value)
2148 && asan_protect_global (desc->value))
2150 struct asan_add_string_csts_data *aascd
2151 = (struct asan_add_string_csts_data *) data;
2152 asan_add_global (SYMBOL_REF_DECL (XEXP (desc->rtl, 0)),
2153 aascd->type, aascd->v);
2155 return 1;
2158 /* Needs to be GTY(()), because cgraph_build_static_cdtor may
2159 invoke ggc_collect. */
2160 static GTY(()) tree asan_ctor_statements;
2162 /* Module-level instrumentation.
2163 - Insert __asan_init() into the list of CTORs.
2164 - TODO: insert redzones around globals.
2167 void
2168 asan_finish_file (void)
2170 struct varpool_node *vnode;
2171 unsigned HOST_WIDE_INT gcount = 0;
2173 if (shadow_ptr_types[0] == NULL_TREE)
2174 asan_init_shadow_ptr_types ();
2175 /* Avoid instrumenting code in the asan ctors/dtors.
2176 We don't need to insert padding after the description strings,
2177 nor after .LASAN* array. */
2178 flag_asan = 0;
2180 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_INIT);
2181 append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
2182 FOR_EACH_DEFINED_VARIABLE (vnode)
2183 if (TREE_ASM_WRITTEN (vnode->symbol.decl)
2184 && asan_protect_global (vnode->symbol.decl))
2185 ++gcount;
2186 htab_t const_desc_htab = constant_pool_htab ();
2187 htab_traverse (const_desc_htab, count_string_csts, &gcount);
2188 if (gcount)
2190 tree type = asan_global_struct (), var, ctor;
2191 tree uptr = build_nonstandard_integer_type (POINTER_SIZE, 1);
2192 tree dtor_statements = NULL_TREE;
2193 vec<constructor_elt, va_gc> *v;
2194 char buf[20];
2196 type = build_array_type_nelts (type, gcount);
2197 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", 0);
2198 var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
2199 type);
2200 TREE_STATIC (var) = 1;
2201 TREE_PUBLIC (var) = 0;
2202 DECL_ARTIFICIAL (var) = 1;
2203 DECL_IGNORED_P (var) = 1;
2204 vec_alloc (v, gcount);
2205 FOR_EACH_DEFINED_VARIABLE (vnode)
2206 if (TREE_ASM_WRITTEN (vnode->symbol.decl)
2207 && asan_protect_global (vnode->symbol.decl))
2208 asan_add_global (vnode->symbol.decl, TREE_TYPE (type), v);
2209 struct asan_add_string_csts_data aascd;
2210 aascd.type = TREE_TYPE (type);
2211 aascd.v = v;
2212 htab_traverse (const_desc_htab, add_string_csts, &aascd);
2213 ctor = build_constructor (type, v);
2214 TREE_CONSTANT (ctor) = 1;
2215 TREE_STATIC (ctor) = 1;
2216 DECL_INITIAL (var) = ctor;
2217 varpool_assemble_decl (varpool_node_for_decl (var));
2219 fn = builtin_decl_implicit (BUILT_IN_ASAN_REGISTER_GLOBALS);
2220 append_to_statement_list (build_call_expr (fn, 2,
2221 build_fold_addr_expr (var),
2222 build_int_cst (uptr, gcount)),
2223 &asan_ctor_statements);
2225 fn = builtin_decl_implicit (BUILT_IN_ASAN_UNREGISTER_GLOBALS);
2226 append_to_statement_list (build_call_expr (fn, 2,
2227 build_fold_addr_expr (var),
2228 build_int_cst (uptr, gcount)),
2229 &dtor_statements);
2230 cgraph_build_static_cdtor ('D', dtor_statements,
2231 MAX_RESERVED_INIT_PRIORITY - 1);
2233 cgraph_build_static_cdtor ('I', asan_ctor_statements,
2234 MAX_RESERVED_INIT_PRIORITY - 1);
2235 flag_asan = 1;
2238 /* Instrument the current function. */
2240 static unsigned int
2241 asan_instrument (void)
2243 if (shadow_ptr_types[0] == NULL_TREE)
2244 asan_init_shadow_ptr_types ();
2245 transform_statements ();
2246 return 0;
2249 static bool
2250 gate_asan (void)
2252 return flag_asan != 0
2253 && !lookup_attribute ("no_sanitize_address",
2254 DECL_ATTRIBUTES (current_function_decl));
2257 struct gimple_opt_pass pass_asan =
2260 GIMPLE_PASS,
2261 "asan", /* name */
2262 OPTGROUP_NONE, /* optinfo_flags */
2263 gate_asan, /* gate */
2264 asan_instrument, /* execute */
2265 NULL, /* sub */
2266 NULL, /* next */
2267 0, /* static_pass_number */
2268 TV_NONE, /* tv_id */
2269 PROP_ssa | PROP_cfg | PROP_gimple_leh,/* properties_required */
2270 0, /* properties_provided */
2271 0, /* properties_destroyed */
2272 0, /* todo_flags_start */
2273 TODO_verify_flow | TODO_verify_stmts
2274 | TODO_update_ssa /* todo_flags_finish */
2278 static bool
2279 gate_asan_O0 (void)
2281 return !optimize && gate_asan ();
2284 struct gimple_opt_pass pass_asan_O0 =
2287 GIMPLE_PASS,
2288 "asan0", /* name */
2289 OPTGROUP_NONE, /* optinfo_flags */
2290 gate_asan_O0, /* gate */
2291 asan_instrument, /* execute */
2292 NULL, /* sub */
2293 NULL, /* next */
2294 0, /* static_pass_number */
2295 TV_NONE, /* tv_id */
2296 PROP_ssa | PROP_cfg | PROP_gimple_leh,/* properties_required */
2297 0, /* properties_provided */
2298 0, /* properties_destroyed */
2299 0, /* todo_flags_start */
2300 TODO_verify_flow | TODO_verify_stmts
2301 | TODO_update_ssa /* todo_flags_finish */
2305 #include "gt-asan.h"