2013-09-04 Teresa Johnson <tejohnson@google.com>
[official-gcc.git] / gcc / asan.c
blobe7b1f4724e25fc1608c0b11b3a30fe40cbef7eb0
1 /* AddressSanitizer, a fast memory error detector.
2 Copyright (C) 2012-2013 Free Software Foundation, Inc.
3 Contributed by Kostya Serebryany <kcc@google.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "gimple.h"
26 #include "tree-iterator.h"
27 #include "tree-flow.h"
28 #include "tree-pass.h"
29 #include "asan.h"
30 #include "gimple-pretty-print.h"
31 #include "target.h"
32 #include "expr.h"
33 #include "optabs.h"
34 #include "output.h"
35 #include "tm_p.h"
36 #include "langhooks.h"
37 #include "hash-table.h"
38 #include "alloc-pool.h"
39 #include "cfgloop.h"
41 /* AddressSanitizer finds out-of-bounds and use-after-free bugs
42 with <2x slowdown on average.
44 The tool consists of two parts:
45 instrumentation module (this file) and a run-time library.
46 The instrumentation module adds a run-time check before every memory insn.
47 For a 8- or 16- byte load accessing address X:
48 ShadowAddr = (X >> 3) + Offset
49 ShadowValue = *(char*)ShadowAddr; // *(short*) for 16-byte access.
50 if (ShadowValue)
51 __asan_report_load8(X);
52 For a load of N bytes (N=1, 2 or 4) from address X:
53 ShadowAddr = (X >> 3) + Offset
54 ShadowValue = *(char*)ShadowAddr;
55 if (ShadowValue)
56 if ((X & 7) + N - 1 > ShadowValue)
57 __asan_report_loadN(X);
58 Stores are instrumented similarly, but using __asan_report_storeN functions.
59 A call too __asan_init() is inserted to the list of module CTORs.
61 The run-time library redefines malloc (so that redzone are inserted around
62 the allocated memory) and free (so that reuse of free-ed memory is delayed),
63 provides __asan_report* and __asan_init functions.
65 Read more:
66 http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm
68 The current implementation supports detection of out-of-bounds and
69 use-after-free in the heap, on the stack and for global variables.
71 [Protection of stack variables]
73 To understand how detection of out-of-bounds and use-after-free works
74 for stack variables, lets look at this example on x86_64 where the
75 stack grows downward:
77 int
78 foo ()
80 char a[23] = {0};
81 int b[2] = {0};
83 a[5] = 1;
84 b[1] = 2;
86 return a[5] + b[1];
89 For this function, the stack protected by asan will be organized as
90 follows, from the top of the stack to the bottom:
92 Slot 1/ [red zone of 32 bytes called 'RIGHT RedZone']
94 Slot 2/ [8 bytes of red zone, that adds up to the space of 'a' to make
95 the next slot be 32 bytes aligned; this one is called Partial
96 Redzone; this 32 bytes alignment is an asan constraint]
98 Slot 3/ [24 bytes for variable 'a']
100 Slot 4/ [red zone of 32 bytes called 'Middle RedZone']
102 Slot 5/ [24 bytes of Partial Red Zone (similar to slot 2]
104 Slot 6/ [8 bytes for variable 'b']
106 Slot 7/ [32 bytes of Red Zone at the bottom of the stack, called
107 'LEFT RedZone']
109 The 32 bytes of LEFT red zone at the bottom of the stack can be
110 decomposed as such:
112 1/ The first 8 bytes contain a magical asan number that is always
113 0x41B58AB3.
115 2/ The following 8 bytes contains a pointer to a string (to be
116 parsed at runtime by the runtime asan library), which format is
117 the following:
119 "<function-name> <space> <num-of-variables-on-the-stack>
120 (<32-bytes-aligned-offset-in-bytes-of-variable> <space>
121 <length-of-var-in-bytes> ){n} "
123 where '(...){n}' means the content inside the parenthesis occurs 'n'
124 times, with 'n' being the number of variables on the stack.
126 3/ The following 16 bytes of the red zone have no particular
127 format.
129 The shadow memory for that stack layout is going to look like this:
131 - content of shadow memory 8 bytes for slot 7: 0xF1F1F1F1.
132 The F1 byte pattern is a magic number called
133 ASAN_STACK_MAGIC_LEFT and is a way for the runtime to know that
134 the memory for that shadow byte is part of a the LEFT red zone
135 intended to seat at the bottom of the variables on the stack.
137 - content of shadow memory 8 bytes for slots 6 and 5:
138 0xF4F4F400. The F4 byte pattern is a magic number
139 called ASAN_STACK_MAGIC_PARTIAL. It flags the fact that the
140 memory region for this shadow byte is a PARTIAL red zone
141 intended to pad a variable A, so that the slot following
142 {A,padding} is 32 bytes aligned.
144 Note that the fact that the least significant byte of this
145 shadow memory content is 00 means that 8 bytes of its
146 corresponding memory (which corresponds to the memory of
147 variable 'b') is addressable.
149 - content of shadow memory 8 bytes for slot 4: 0xF2F2F2F2.
150 The F2 byte pattern is a magic number called
151 ASAN_STACK_MAGIC_MIDDLE. It flags the fact that the memory
152 region for this shadow byte is a MIDDLE red zone intended to
153 seat between two 32 aligned slots of {variable,padding}.
155 - content of shadow memory 8 bytes for slot 3 and 2:
156 0xF4000000. This represents is the concatenation of
157 variable 'a' and the partial red zone following it, like what we
158 had for variable 'b'. The least significant 3 bytes being 00
159 means that the 3 bytes of variable 'a' are addressable.
161 - content of shadow memory 8 bytes for slot 1: 0xF3F3F3F3.
162 The F3 byte pattern is a magic number called
163 ASAN_STACK_MAGIC_RIGHT. It flags the fact that the memory
164 region for this shadow byte is a RIGHT red zone intended to seat
165 at the top of the variables of the stack.
167 Note that the real variable layout is done in expand_used_vars in
168 cfgexpand.c. As far as Address Sanitizer is concerned, it lays out
169 stack variables as well as the different red zones, emits some
170 prologue code to populate the shadow memory as to poison (mark as
171 non-accessible) the regions of the red zones and mark the regions of
172 stack variables as accessible, and emit some epilogue code to
173 un-poison (mark as accessible) the regions of red zones right before
174 the function exits.
176 [Protection of global variables]
178 The basic idea is to insert a red zone between two global variables
179 and install a constructor function that calls the asan runtime to do
180 the populating of the relevant shadow memory regions at load time.
182 So the global variables are laid out as to insert a red zone between
183 them. The size of the red zones is so that each variable starts on a
184 32 bytes boundary.
186 Then a constructor function is installed so that, for each global
187 variable, it calls the runtime asan library function
188 __asan_register_globals_with an instance of this type:
190 struct __asan_global
192 // Address of the beginning of the global variable.
193 const void *__beg;
195 // Initial size of the global variable.
196 uptr __size;
198 // Size of the global variable + size of the red zone. This
199 // size is 32 bytes aligned.
200 uptr __size_with_redzone;
202 // Name of the global variable.
203 const void *__name;
205 // This is always set to NULL for now.
206 uptr __has_dynamic_init;
209 A destructor function that calls the runtime asan library function
210 _asan_unregister_globals is also installed. */
212 alias_set_type asan_shadow_set = -1;
214 /* Pointer types to 1 resp. 2 byte integers in shadow memory. A separate
215 alias set is used for all shadow memory accesses. */
216 static GTY(()) tree shadow_ptr_types[2];
218 /* Hashtable support for memory references used by gimple
219 statements. */
221 /* This type represents a reference to a memory region. */
222 struct asan_mem_ref
224 /* The expression of the beginning of the memory region. */
225 tree start;
227 /* The size of the access (can be 1, 2, 4, 8, 16 for now). */
228 char access_size;
231 static alloc_pool asan_mem_ref_alloc_pool;
233 /* This creates the alloc pool used to store the instances of
234 asan_mem_ref that are stored in the hash table asan_mem_ref_ht. */
236 static alloc_pool
237 asan_mem_ref_get_alloc_pool ()
239 if (asan_mem_ref_alloc_pool == NULL)
240 asan_mem_ref_alloc_pool = create_alloc_pool ("asan_mem_ref",
241 sizeof (asan_mem_ref),
242 10);
243 return asan_mem_ref_alloc_pool;
247 /* Initializes an instance of asan_mem_ref. */
249 static void
250 asan_mem_ref_init (asan_mem_ref *ref, tree start, char access_size)
252 ref->start = start;
253 ref->access_size = access_size;
256 /* Allocates memory for an instance of asan_mem_ref into the memory
257 pool returned by asan_mem_ref_get_alloc_pool and initialize it.
258 START is the address of (or the expression pointing to) the
259 beginning of memory reference. ACCESS_SIZE is the size of the
260 access to the referenced memory. */
262 static asan_mem_ref*
263 asan_mem_ref_new (tree start, char access_size)
265 asan_mem_ref *ref =
266 (asan_mem_ref *) pool_alloc (asan_mem_ref_get_alloc_pool ());
268 asan_mem_ref_init (ref, start, access_size);
269 return ref;
272 /* This builds and returns a pointer to the end of the memory region
273 that starts at START and of length LEN. */
275 tree
276 asan_mem_ref_get_end (tree start, tree len)
278 if (len == NULL_TREE || integer_zerop (len))
279 return start;
281 return fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (start), start, len);
284 /* Return a tree expression that represents the end of the referenced
285 memory region. Beware that this function can actually build a new
286 tree expression. */
288 tree
289 asan_mem_ref_get_end (const asan_mem_ref *ref, tree len)
291 return asan_mem_ref_get_end (ref->start, len);
294 struct asan_mem_ref_hasher
295 : typed_noop_remove <asan_mem_ref>
297 typedef asan_mem_ref value_type;
298 typedef asan_mem_ref compare_type;
300 static inline hashval_t hash (const value_type *);
301 static inline bool equal (const value_type *, const compare_type *);
304 /* Hash a memory reference. */
306 inline hashval_t
307 asan_mem_ref_hasher::hash (const asan_mem_ref *mem_ref)
309 hashval_t h = iterative_hash_expr (mem_ref->start, 0);
310 h = iterative_hash_hashval_t (h, mem_ref->access_size);
311 return h;
314 /* Compare two memory references. We accept the length of either
315 memory references to be NULL_TREE. */
317 inline bool
318 asan_mem_ref_hasher::equal (const asan_mem_ref *m1,
319 const asan_mem_ref *m2)
321 return (m1->access_size == m2->access_size
322 && operand_equal_p (m1->start, m2->start, 0));
325 static hash_table <asan_mem_ref_hasher> asan_mem_ref_ht;
327 /* Returns a reference to the hash table containing memory references.
328 This function ensures that the hash table is created. Note that
329 this hash table is updated by the function
330 update_mem_ref_hash_table. */
332 static hash_table <asan_mem_ref_hasher> &
333 get_mem_ref_hash_table ()
335 if (!asan_mem_ref_ht.is_created ())
336 asan_mem_ref_ht.create (10);
338 return asan_mem_ref_ht;
341 /* Clear all entries from the memory references hash table. */
343 static void
344 empty_mem_ref_hash_table ()
346 if (asan_mem_ref_ht.is_created ())
347 asan_mem_ref_ht.empty ();
350 /* Free the memory references hash table. */
352 static void
353 free_mem_ref_resources ()
355 if (asan_mem_ref_ht.is_created ())
356 asan_mem_ref_ht.dispose ();
358 if (asan_mem_ref_alloc_pool)
360 free_alloc_pool (asan_mem_ref_alloc_pool);
361 asan_mem_ref_alloc_pool = NULL;
365 /* Return true iff the memory reference REF has been instrumented. */
367 static bool
368 has_mem_ref_been_instrumented (tree ref, char access_size)
370 asan_mem_ref r;
371 asan_mem_ref_init (&r, ref, access_size);
373 return (get_mem_ref_hash_table ().find (&r) != NULL);
376 /* Return true iff the memory reference REF has been instrumented. */
378 static bool
379 has_mem_ref_been_instrumented (const asan_mem_ref *ref)
381 return has_mem_ref_been_instrumented (ref->start, ref->access_size);
384 /* Return true iff access to memory region starting at REF and of
385 length LEN has been instrumented. */
387 static bool
388 has_mem_ref_been_instrumented (const asan_mem_ref *ref, tree len)
390 /* First let's see if the address of the beginning of REF has been
391 instrumented. */
392 if (!has_mem_ref_been_instrumented (ref))
393 return false;
395 if (len != 0)
397 /* Let's see if the end of the region has been instrumented. */
398 if (!has_mem_ref_been_instrumented (asan_mem_ref_get_end (ref, len),
399 ref->access_size))
400 return false;
402 return true;
405 /* Set REF to the memory reference present in a gimple assignment
406 ASSIGNMENT. Return true upon successful completion, false
407 otherwise. */
409 static bool
410 get_mem_ref_of_assignment (const gimple assignment,
411 asan_mem_ref *ref,
412 bool *ref_is_store)
414 gcc_assert (gimple_assign_single_p (assignment));
416 if (gimple_store_p (assignment)
417 && !gimple_clobber_p (assignment))
419 ref->start = gimple_assign_lhs (assignment);
420 *ref_is_store = true;
422 else if (gimple_assign_load_p (assignment))
424 ref->start = gimple_assign_rhs1 (assignment);
425 *ref_is_store = false;
427 else
428 return false;
430 ref->access_size = int_size_in_bytes (TREE_TYPE (ref->start));
431 return true;
434 /* Return the memory references contained in a gimple statement
435 representing a builtin call that has to do with memory access. */
437 static bool
438 get_mem_refs_of_builtin_call (const gimple call,
439 asan_mem_ref *src0,
440 tree *src0_len,
441 bool *src0_is_store,
442 asan_mem_ref *src1,
443 tree *src1_len,
444 bool *src1_is_store,
445 asan_mem_ref *dst,
446 tree *dst_len,
447 bool *dst_is_store,
448 bool *dest_is_deref)
450 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
452 tree callee = gimple_call_fndecl (call);
453 tree source0 = NULL_TREE, source1 = NULL_TREE,
454 dest = NULL_TREE, len = NULL_TREE;
455 bool is_store = true, got_reference_p = false;
456 char access_size = 1;
458 switch (DECL_FUNCTION_CODE (callee))
460 /* (s, s, n) style memops. */
461 case BUILT_IN_BCMP:
462 case BUILT_IN_MEMCMP:
463 source0 = gimple_call_arg (call, 0);
464 source1 = gimple_call_arg (call, 1);
465 len = gimple_call_arg (call, 2);
466 break;
468 /* (src, dest, n) style memops. */
469 case BUILT_IN_BCOPY:
470 source0 = gimple_call_arg (call, 0);
471 dest = gimple_call_arg (call, 1);
472 len = gimple_call_arg (call, 2);
473 break;
475 /* (dest, src, n) style memops. */
476 case BUILT_IN_MEMCPY:
477 case BUILT_IN_MEMCPY_CHK:
478 case BUILT_IN_MEMMOVE:
479 case BUILT_IN_MEMMOVE_CHK:
480 case BUILT_IN_MEMPCPY:
481 case BUILT_IN_MEMPCPY_CHK:
482 dest = gimple_call_arg (call, 0);
483 source0 = gimple_call_arg (call, 1);
484 len = gimple_call_arg (call, 2);
485 break;
487 /* (dest, n) style memops. */
488 case BUILT_IN_BZERO:
489 dest = gimple_call_arg (call, 0);
490 len = gimple_call_arg (call, 1);
491 break;
493 /* (dest, x, n) style memops*/
494 case BUILT_IN_MEMSET:
495 case BUILT_IN_MEMSET_CHK:
496 dest = gimple_call_arg (call, 0);
497 len = gimple_call_arg (call, 2);
498 break;
500 case BUILT_IN_STRLEN:
501 source0 = gimple_call_arg (call, 0);
502 len = gimple_call_lhs (call);
503 break ;
505 /* And now the __atomic* and __sync builtins.
506 These are handled differently from the classical memory memory
507 access builtins above. */
509 case BUILT_IN_ATOMIC_LOAD_1:
510 case BUILT_IN_ATOMIC_LOAD_2:
511 case BUILT_IN_ATOMIC_LOAD_4:
512 case BUILT_IN_ATOMIC_LOAD_8:
513 case BUILT_IN_ATOMIC_LOAD_16:
514 is_store = false;
515 /* fall through. */
517 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
518 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
519 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
520 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
521 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
523 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
524 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
525 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
526 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
527 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
529 case BUILT_IN_SYNC_FETCH_AND_OR_1:
530 case BUILT_IN_SYNC_FETCH_AND_OR_2:
531 case BUILT_IN_SYNC_FETCH_AND_OR_4:
532 case BUILT_IN_SYNC_FETCH_AND_OR_8:
533 case BUILT_IN_SYNC_FETCH_AND_OR_16:
535 case BUILT_IN_SYNC_FETCH_AND_AND_1:
536 case BUILT_IN_SYNC_FETCH_AND_AND_2:
537 case BUILT_IN_SYNC_FETCH_AND_AND_4:
538 case BUILT_IN_SYNC_FETCH_AND_AND_8:
539 case BUILT_IN_SYNC_FETCH_AND_AND_16:
541 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
542 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
543 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
544 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
545 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
547 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
548 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
549 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
550 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
552 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
553 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
554 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
555 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
556 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
558 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
559 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
560 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
561 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
562 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
564 case BUILT_IN_SYNC_OR_AND_FETCH_1:
565 case BUILT_IN_SYNC_OR_AND_FETCH_2:
566 case BUILT_IN_SYNC_OR_AND_FETCH_4:
567 case BUILT_IN_SYNC_OR_AND_FETCH_8:
568 case BUILT_IN_SYNC_OR_AND_FETCH_16:
570 case BUILT_IN_SYNC_AND_AND_FETCH_1:
571 case BUILT_IN_SYNC_AND_AND_FETCH_2:
572 case BUILT_IN_SYNC_AND_AND_FETCH_4:
573 case BUILT_IN_SYNC_AND_AND_FETCH_8:
574 case BUILT_IN_SYNC_AND_AND_FETCH_16:
576 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
577 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
578 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
579 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
580 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
582 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
583 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
584 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
585 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
587 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
588 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
589 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
590 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
591 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
593 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
594 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
595 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
596 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
597 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
599 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
600 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
601 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
602 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
603 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
605 case BUILT_IN_SYNC_LOCK_RELEASE_1:
606 case BUILT_IN_SYNC_LOCK_RELEASE_2:
607 case BUILT_IN_SYNC_LOCK_RELEASE_4:
608 case BUILT_IN_SYNC_LOCK_RELEASE_8:
609 case BUILT_IN_SYNC_LOCK_RELEASE_16:
611 case BUILT_IN_ATOMIC_EXCHANGE_1:
612 case BUILT_IN_ATOMIC_EXCHANGE_2:
613 case BUILT_IN_ATOMIC_EXCHANGE_4:
614 case BUILT_IN_ATOMIC_EXCHANGE_8:
615 case BUILT_IN_ATOMIC_EXCHANGE_16:
617 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
618 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
619 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
620 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
621 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
623 case BUILT_IN_ATOMIC_STORE_1:
624 case BUILT_IN_ATOMIC_STORE_2:
625 case BUILT_IN_ATOMIC_STORE_4:
626 case BUILT_IN_ATOMIC_STORE_8:
627 case BUILT_IN_ATOMIC_STORE_16:
629 case BUILT_IN_ATOMIC_ADD_FETCH_1:
630 case BUILT_IN_ATOMIC_ADD_FETCH_2:
631 case BUILT_IN_ATOMIC_ADD_FETCH_4:
632 case BUILT_IN_ATOMIC_ADD_FETCH_8:
633 case BUILT_IN_ATOMIC_ADD_FETCH_16:
635 case BUILT_IN_ATOMIC_SUB_FETCH_1:
636 case BUILT_IN_ATOMIC_SUB_FETCH_2:
637 case BUILT_IN_ATOMIC_SUB_FETCH_4:
638 case BUILT_IN_ATOMIC_SUB_FETCH_8:
639 case BUILT_IN_ATOMIC_SUB_FETCH_16:
641 case BUILT_IN_ATOMIC_AND_FETCH_1:
642 case BUILT_IN_ATOMIC_AND_FETCH_2:
643 case BUILT_IN_ATOMIC_AND_FETCH_4:
644 case BUILT_IN_ATOMIC_AND_FETCH_8:
645 case BUILT_IN_ATOMIC_AND_FETCH_16:
647 case BUILT_IN_ATOMIC_NAND_FETCH_1:
648 case BUILT_IN_ATOMIC_NAND_FETCH_2:
649 case BUILT_IN_ATOMIC_NAND_FETCH_4:
650 case BUILT_IN_ATOMIC_NAND_FETCH_8:
651 case BUILT_IN_ATOMIC_NAND_FETCH_16:
653 case BUILT_IN_ATOMIC_XOR_FETCH_1:
654 case BUILT_IN_ATOMIC_XOR_FETCH_2:
655 case BUILT_IN_ATOMIC_XOR_FETCH_4:
656 case BUILT_IN_ATOMIC_XOR_FETCH_8:
657 case BUILT_IN_ATOMIC_XOR_FETCH_16:
659 case BUILT_IN_ATOMIC_OR_FETCH_1:
660 case BUILT_IN_ATOMIC_OR_FETCH_2:
661 case BUILT_IN_ATOMIC_OR_FETCH_4:
662 case BUILT_IN_ATOMIC_OR_FETCH_8:
663 case BUILT_IN_ATOMIC_OR_FETCH_16:
665 case BUILT_IN_ATOMIC_FETCH_ADD_1:
666 case BUILT_IN_ATOMIC_FETCH_ADD_2:
667 case BUILT_IN_ATOMIC_FETCH_ADD_4:
668 case BUILT_IN_ATOMIC_FETCH_ADD_8:
669 case BUILT_IN_ATOMIC_FETCH_ADD_16:
671 case BUILT_IN_ATOMIC_FETCH_SUB_1:
672 case BUILT_IN_ATOMIC_FETCH_SUB_2:
673 case BUILT_IN_ATOMIC_FETCH_SUB_4:
674 case BUILT_IN_ATOMIC_FETCH_SUB_8:
675 case BUILT_IN_ATOMIC_FETCH_SUB_16:
677 case BUILT_IN_ATOMIC_FETCH_AND_1:
678 case BUILT_IN_ATOMIC_FETCH_AND_2:
679 case BUILT_IN_ATOMIC_FETCH_AND_4:
680 case BUILT_IN_ATOMIC_FETCH_AND_8:
681 case BUILT_IN_ATOMIC_FETCH_AND_16:
683 case BUILT_IN_ATOMIC_FETCH_NAND_1:
684 case BUILT_IN_ATOMIC_FETCH_NAND_2:
685 case BUILT_IN_ATOMIC_FETCH_NAND_4:
686 case BUILT_IN_ATOMIC_FETCH_NAND_8:
687 case BUILT_IN_ATOMIC_FETCH_NAND_16:
689 case BUILT_IN_ATOMIC_FETCH_XOR_1:
690 case BUILT_IN_ATOMIC_FETCH_XOR_2:
691 case BUILT_IN_ATOMIC_FETCH_XOR_4:
692 case BUILT_IN_ATOMIC_FETCH_XOR_8:
693 case BUILT_IN_ATOMIC_FETCH_XOR_16:
695 case BUILT_IN_ATOMIC_FETCH_OR_1:
696 case BUILT_IN_ATOMIC_FETCH_OR_2:
697 case BUILT_IN_ATOMIC_FETCH_OR_4:
698 case BUILT_IN_ATOMIC_FETCH_OR_8:
699 case BUILT_IN_ATOMIC_FETCH_OR_16:
701 dest = gimple_call_arg (call, 0);
702 /* DEST represents the address of a memory location.
703 instrument_derefs wants the memory location, so lets
704 dereference the address DEST before handing it to
705 instrument_derefs. */
706 if (TREE_CODE (dest) == ADDR_EXPR)
707 dest = TREE_OPERAND (dest, 0);
708 else if (TREE_CODE (dest) == SSA_NAME)
709 dest = build2 (MEM_REF, TREE_TYPE (TREE_TYPE (dest)),
710 dest, build_int_cst (TREE_TYPE (dest), 0));
711 else
712 gcc_unreachable ();
714 access_size = int_size_in_bytes (TREE_TYPE (dest));
717 default:
718 /* The other builtins memory access are not instrumented in this
719 function because they either don't have any length parameter,
720 or their length parameter is just a limit. */
721 break;
724 if (len != NULL_TREE)
726 if (source0 != NULL_TREE)
728 src0->start = source0;
729 src0->access_size = access_size;
730 *src0_len = len;
731 *src0_is_store = false;
734 if (source1 != NULL_TREE)
736 src1->start = source1;
737 src1->access_size = access_size;
738 *src1_len = len;
739 *src1_is_store = false;
742 if (dest != NULL_TREE)
744 dst->start = dest;
745 dst->access_size = access_size;
746 *dst_len = len;
747 *dst_is_store = true;
750 got_reference_p = true;
752 else if (dest)
754 dst->start = dest;
755 dst->access_size = access_size;
756 *dst_len = NULL_TREE;
757 *dst_is_store = is_store;
758 *dest_is_deref = true;
759 got_reference_p = true;
762 return got_reference_p;
765 /* Return true iff a given gimple statement has been instrumented.
766 Note that the statement is "defined" by the memory references it
767 contains. */
769 static bool
770 has_stmt_been_instrumented_p (gimple stmt)
772 if (gimple_assign_single_p (stmt))
774 bool r_is_store;
775 asan_mem_ref r;
776 asan_mem_ref_init (&r, NULL, 1);
778 if (get_mem_ref_of_assignment (stmt, &r, &r_is_store))
779 return has_mem_ref_been_instrumented (&r);
781 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
783 asan_mem_ref src0, src1, dest;
784 asan_mem_ref_init (&src0, NULL, 1);
785 asan_mem_ref_init (&src1, NULL, 1);
786 asan_mem_ref_init (&dest, NULL, 1);
788 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
789 bool src0_is_store = false, src1_is_store = false,
790 dest_is_store = false, dest_is_deref = false;
791 if (get_mem_refs_of_builtin_call (stmt,
792 &src0, &src0_len, &src0_is_store,
793 &src1, &src1_len, &src1_is_store,
794 &dest, &dest_len, &dest_is_store,
795 &dest_is_deref))
797 if (src0.start != NULL_TREE
798 && !has_mem_ref_been_instrumented (&src0, src0_len))
799 return false;
801 if (src1.start != NULL_TREE
802 && !has_mem_ref_been_instrumented (&src1, src1_len))
803 return false;
805 if (dest.start != NULL_TREE
806 && !has_mem_ref_been_instrumented (&dest, dest_len))
807 return false;
809 return true;
812 return false;
815 /* Insert a memory reference into the hash table. */
817 static void
818 update_mem_ref_hash_table (tree ref, char access_size)
820 hash_table <asan_mem_ref_hasher> ht = get_mem_ref_hash_table ();
822 asan_mem_ref r;
823 asan_mem_ref_init (&r, ref, access_size);
825 asan_mem_ref **slot = ht.find_slot (&r, INSERT);
826 if (*slot == NULL)
827 *slot = asan_mem_ref_new (ref, access_size);
830 /* Initialize shadow_ptr_types array. */
832 static void
833 asan_init_shadow_ptr_types (void)
835 asan_shadow_set = new_alias_set ();
836 shadow_ptr_types[0] = build_distinct_type_copy (signed_char_type_node);
837 TYPE_ALIAS_SET (shadow_ptr_types[0]) = asan_shadow_set;
838 shadow_ptr_types[0] = build_pointer_type (shadow_ptr_types[0]);
839 shadow_ptr_types[1] = build_distinct_type_copy (short_integer_type_node);
840 TYPE_ALIAS_SET (shadow_ptr_types[1]) = asan_shadow_set;
841 shadow_ptr_types[1] = build_pointer_type (shadow_ptr_types[1]);
842 initialize_sanitizer_builtins ();
845 /* Create ADDR_EXPR of STRING_CST with the PP pretty printer text. */
847 static tree
848 asan_pp_string (pretty_printer *pp)
850 const char *buf = pp_formatted_text (pp);
851 size_t len = strlen (buf);
852 tree ret = build_string (len + 1, buf);
853 TREE_TYPE (ret)
854 = build_array_type (TREE_TYPE (shadow_ptr_types[0]),
855 build_index_type (size_int (len)));
856 TREE_READONLY (ret) = 1;
857 TREE_STATIC (ret) = 1;
858 return build1 (ADDR_EXPR, shadow_ptr_types[0], ret);
861 /* Return a CONST_INT representing 4 subsequent shadow memory bytes. */
863 static rtx
864 asan_shadow_cst (unsigned char shadow_bytes[4])
866 int i;
867 unsigned HOST_WIDE_INT val = 0;
868 gcc_assert (WORDS_BIG_ENDIAN == BYTES_BIG_ENDIAN);
869 for (i = 0; i < 4; i++)
870 val |= (unsigned HOST_WIDE_INT) shadow_bytes[BYTES_BIG_ENDIAN ? 3 - i : i]
871 << (BITS_PER_UNIT * i);
872 return GEN_INT (trunc_int_for_mode (val, SImode));
875 /* Clear shadow memory at SHADOW_MEM, LEN bytes. Can't call a library call here
876 though. */
878 static void
879 asan_clear_shadow (rtx shadow_mem, HOST_WIDE_INT len)
881 rtx insn, insns, top_label, end, addr, tmp, jump;
883 start_sequence ();
884 clear_storage (shadow_mem, GEN_INT (len), BLOCK_OP_NORMAL);
885 insns = get_insns ();
886 end_sequence ();
887 for (insn = insns; insn; insn = NEXT_INSN (insn))
888 if (CALL_P (insn))
889 break;
890 if (insn == NULL_RTX)
892 emit_insn (insns);
893 return;
896 gcc_assert ((len & 3) == 0);
897 top_label = gen_label_rtx ();
898 addr = force_reg (Pmode, XEXP (shadow_mem, 0));
899 shadow_mem = adjust_automodify_address (shadow_mem, SImode, addr, 0);
900 end = force_reg (Pmode, plus_constant (Pmode, addr, len));
901 emit_label (top_label);
903 emit_move_insn (shadow_mem, const0_rtx);
904 tmp = expand_simple_binop (Pmode, PLUS, addr, GEN_INT (4), addr,
905 true, OPTAB_LIB_WIDEN);
906 if (tmp != addr)
907 emit_move_insn (addr, tmp);
908 emit_cmp_and_jump_insns (addr, end, LT, NULL_RTX, Pmode, true, top_label);
909 jump = get_last_insn ();
910 gcc_assert (JUMP_P (jump));
911 add_reg_note (jump, REG_BR_PROB, GEN_INT (REG_BR_PROB_BASE * 80 / 100));
914 /* Insert code to protect stack vars. The prologue sequence should be emitted
915 directly, epilogue sequence returned. BASE is the register holding the
916 stack base, against which OFFSETS array offsets are relative to, OFFSETS
917 array contains pairs of offsets in reverse order, always the end offset
918 of some gap that needs protection followed by starting offset,
919 and DECLS is an array of representative decls for each var partition.
920 LENGTH is the length of the OFFSETS array, DECLS array is LENGTH / 2 - 1
921 elements long (OFFSETS include gap before the first variable as well
922 as gaps after each stack variable). */
925 asan_emit_stack_protection (rtx base, HOST_WIDE_INT *offsets, tree *decls,
926 int length)
928 rtx shadow_base, shadow_mem, ret, mem;
929 unsigned char shadow_bytes[4];
930 HOST_WIDE_INT base_offset = offsets[length - 1], offset, prev_offset;
931 HOST_WIDE_INT last_offset, last_size;
932 int l;
933 unsigned char cur_shadow_byte = ASAN_STACK_MAGIC_LEFT;
934 tree str_cst;
936 if (shadow_ptr_types[0] == NULL_TREE)
937 asan_init_shadow_ptr_types ();
939 /* First of all, prepare the description string. */
940 pretty_printer asan_pp;
942 if (DECL_NAME (current_function_decl))
943 pp_tree_identifier (&asan_pp, DECL_NAME (current_function_decl));
944 else
945 pp_string (&asan_pp, "<unknown>");
946 pp_space (&asan_pp);
947 pp_decimal_int (&asan_pp, length / 2 - 1);
948 pp_space (&asan_pp);
949 for (l = length - 2; l; l -= 2)
951 tree decl = decls[l / 2 - 1];
952 pp_wide_integer (&asan_pp, offsets[l] - base_offset);
953 pp_space (&asan_pp);
954 pp_wide_integer (&asan_pp, offsets[l - 1] - offsets[l]);
955 pp_space (&asan_pp);
956 if (DECL_P (decl) && DECL_NAME (decl))
958 pp_decimal_int (&asan_pp, IDENTIFIER_LENGTH (DECL_NAME (decl)));
959 pp_space (&asan_pp);
960 pp_tree_identifier (&asan_pp, DECL_NAME (decl));
962 else
963 pp_string (&asan_pp, "9 <unknown>");
964 pp_space (&asan_pp);
966 str_cst = asan_pp_string (&asan_pp);
968 /* Emit the prologue sequence. */
969 base = expand_binop (Pmode, add_optab, base, GEN_INT (base_offset),
970 NULL_RTX, 1, OPTAB_DIRECT);
971 mem = gen_rtx_MEM (ptr_mode, base);
972 emit_move_insn (mem, GEN_INT (ASAN_STACK_FRAME_MAGIC));
973 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
974 emit_move_insn (mem, expand_normal (str_cst));
975 shadow_base = expand_binop (Pmode, lshr_optab, base,
976 GEN_INT (ASAN_SHADOW_SHIFT),
977 NULL_RTX, 1, OPTAB_DIRECT);
978 shadow_base = expand_binop (Pmode, add_optab, shadow_base,
979 GEN_INT (targetm.asan_shadow_offset ()),
980 NULL_RTX, 1, OPTAB_DIRECT);
981 gcc_assert (asan_shadow_set != -1
982 && (ASAN_RED_ZONE_SIZE >> ASAN_SHADOW_SHIFT) == 4);
983 shadow_mem = gen_rtx_MEM (SImode, shadow_base);
984 set_mem_alias_set (shadow_mem, asan_shadow_set);
985 prev_offset = base_offset;
986 for (l = length; l; l -= 2)
988 if (l == 2)
989 cur_shadow_byte = ASAN_STACK_MAGIC_RIGHT;
990 offset = offsets[l - 1];
991 if ((offset - base_offset) & (ASAN_RED_ZONE_SIZE - 1))
993 int i;
994 HOST_WIDE_INT aoff
995 = base_offset + ((offset - base_offset)
996 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
997 shadow_mem = adjust_address (shadow_mem, VOIDmode,
998 (aoff - prev_offset)
999 >> ASAN_SHADOW_SHIFT);
1000 prev_offset = aoff;
1001 for (i = 0; i < 4; i++, aoff += (1 << ASAN_SHADOW_SHIFT))
1002 if (aoff < offset)
1004 if (aoff < offset - (1 << ASAN_SHADOW_SHIFT) + 1)
1005 shadow_bytes[i] = 0;
1006 else
1007 shadow_bytes[i] = offset - aoff;
1009 else
1010 shadow_bytes[i] = ASAN_STACK_MAGIC_PARTIAL;
1011 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1012 offset = aoff;
1014 while (offset <= offsets[l - 2] - ASAN_RED_ZONE_SIZE)
1016 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1017 (offset - prev_offset)
1018 >> ASAN_SHADOW_SHIFT);
1019 prev_offset = offset;
1020 memset (shadow_bytes, cur_shadow_byte, 4);
1021 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1022 offset += ASAN_RED_ZONE_SIZE;
1024 cur_shadow_byte = ASAN_STACK_MAGIC_MIDDLE;
1026 do_pending_stack_adjust ();
1028 /* Construct epilogue sequence. */
1029 start_sequence ();
1031 shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1032 set_mem_alias_set (shadow_mem, asan_shadow_set);
1033 prev_offset = base_offset;
1034 last_offset = base_offset;
1035 last_size = 0;
1036 for (l = length; l; l -= 2)
1038 offset = base_offset + ((offsets[l - 1] - base_offset)
1039 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1040 if (last_offset + last_size != offset)
1042 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1043 (last_offset - prev_offset)
1044 >> ASAN_SHADOW_SHIFT);
1045 prev_offset = last_offset;
1046 asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
1047 last_offset = offset;
1048 last_size = 0;
1050 last_size += base_offset + ((offsets[l - 2] - base_offset)
1051 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1))
1052 - offset;
1054 if (last_size)
1056 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1057 (last_offset - prev_offset)
1058 >> ASAN_SHADOW_SHIFT);
1059 asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
1062 do_pending_stack_adjust ();
1064 ret = get_insns ();
1065 end_sequence ();
1066 return ret;
1069 /* Return true if DECL, a global var, might be overridden and needs
1070 therefore a local alias. */
1072 static bool
1073 asan_needs_local_alias (tree decl)
1075 return DECL_WEAK (decl) || !targetm.binds_local_p (decl);
1078 /* Return true if DECL is a VAR_DECL that should be protected
1079 by Address Sanitizer, by appending a red zone with protected
1080 shadow memory after it and aligning it to at least
1081 ASAN_RED_ZONE_SIZE bytes. */
1083 bool
1084 asan_protect_global (tree decl)
1086 rtx rtl, symbol;
1088 if (TREE_CODE (decl) == STRING_CST)
1090 /* Instrument all STRING_CSTs except those created
1091 by asan_pp_string here. */
1092 if (shadow_ptr_types[0] != NULL_TREE
1093 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1094 && TREE_TYPE (TREE_TYPE (decl)) == TREE_TYPE (shadow_ptr_types[0]))
1095 return false;
1096 return true;
1098 if (TREE_CODE (decl) != VAR_DECL
1099 /* TLS vars aren't statically protectable. */
1100 || DECL_THREAD_LOCAL_P (decl)
1101 /* Externs will be protected elsewhere. */
1102 || DECL_EXTERNAL (decl)
1103 || !DECL_RTL_SET_P (decl)
1104 /* Comdat vars pose an ABI problem, we can't know if
1105 the var that is selected by the linker will have
1106 padding or not. */
1107 || DECL_ONE_ONLY (decl)
1108 /* Similarly for common vars. People can use -fno-common. */
1109 || (DECL_COMMON (decl) && TREE_PUBLIC (decl))
1110 /* Don't protect if using user section, often vars placed
1111 into user section from multiple TUs are then assumed
1112 to be an array of such vars, putting padding in there
1113 breaks this assumption. */
1114 || (DECL_SECTION_NAME (decl) != NULL_TREE
1115 && !DECL_HAS_IMPLICIT_SECTION_NAME_P (decl))
1116 || DECL_SIZE (decl) == 0
1117 || ASAN_RED_ZONE_SIZE * BITS_PER_UNIT > MAX_OFILE_ALIGNMENT
1118 || !valid_constant_size_p (DECL_SIZE_UNIT (decl))
1119 || DECL_ALIGN_UNIT (decl) > 2 * ASAN_RED_ZONE_SIZE)
1120 return false;
1122 rtl = DECL_RTL (decl);
1123 if (!MEM_P (rtl) || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF)
1124 return false;
1125 symbol = XEXP (rtl, 0);
1127 if (CONSTANT_POOL_ADDRESS_P (symbol)
1128 || TREE_CONSTANT_POOL_ADDRESS_P (symbol))
1129 return false;
1131 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
1132 return false;
1134 #ifndef ASM_OUTPUT_DEF
1135 if (asan_needs_local_alias (decl))
1136 return false;
1137 #endif
1139 return true;
1142 /* Construct a function tree for __asan_report_{load,store}{1,2,4,8,16}.
1143 IS_STORE is either 1 (for a store) or 0 (for a load).
1144 SIZE_IN_BYTES is one of 1, 2, 4, 8, 16. */
1146 static tree
1147 report_error_func (bool is_store, int size_in_bytes)
1149 static enum built_in_function report[2][5]
1150 = { { BUILT_IN_ASAN_REPORT_LOAD1, BUILT_IN_ASAN_REPORT_LOAD2,
1151 BUILT_IN_ASAN_REPORT_LOAD4, BUILT_IN_ASAN_REPORT_LOAD8,
1152 BUILT_IN_ASAN_REPORT_LOAD16 },
1153 { BUILT_IN_ASAN_REPORT_STORE1, BUILT_IN_ASAN_REPORT_STORE2,
1154 BUILT_IN_ASAN_REPORT_STORE4, BUILT_IN_ASAN_REPORT_STORE8,
1155 BUILT_IN_ASAN_REPORT_STORE16 } };
1156 return builtin_decl_implicit (report[is_store][exact_log2 (size_in_bytes)]);
1159 #define PROB_VERY_UNLIKELY (REG_BR_PROB_BASE / 2000 - 1)
1160 #define PROB_ALWAYS (REG_BR_PROB_BASE)
1162 /* Split the current basic block and create a condition statement
1163 insertion point right before or after the statement pointed to by
1164 ITER. Return an iterator to the point at which the caller might
1165 safely insert the condition statement.
1167 THEN_BLOCK must be set to the address of an uninitialized instance
1168 of basic_block. The function will then set *THEN_BLOCK to the
1169 'then block' of the condition statement to be inserted by the
1170 caller.
1172 If CREATE_THEN_FALLTHRU_EDGE is false, no edge will be created from
1173 *THEN_BLOCK to *FALLTHROUGH_BLOCK.
1175 Similarly, the function will set *FALLTRHOUGH_BLOCK to the 'else
1176 block' of the condition statement to be inserted by the caller.
1178 Note that *FALLTHROUGH_BLOCK is a new block that contains the
1179 statements starting from *ITER, and *THEN_BLOCK is a new empty
1180 block.
1182 *ITER is adjusted to point to always point to the first statement
1183 of the basic block * FALLTHROUGH_BLOCK. That statement is the
1184 same as what ITER was pointing to prior to calling this function,
1185 if BEFORE_P is true; otherwise, it is its following statement. */
1187 static gimple_stmt_iterator
1188 create_cond_insert_point (gimple_stmt_iterator *iter,
1189 bool before_p,
1190 bool then_more_likely_p,
1191 bool create_then_fallthru_edge,
1192 basic_block *then_block,
1193 basic_block *fallthrough_block)
1195 gimple_stmt_iterator gsi = *iter;
1197 if (!gsi_end_p (gsi) && before_p)
1198 gsi_prev (&gsi);
1200 basic_block cur_bb = gsi_bb (*iter);
1202 edge e = split_block (cur_bb, gsi_stmt (gsi));
1204 /* Get a hold on the 'condition block', the 'then block' and the
1205 'else block'. */
1206 basic_block cond_bb = e->src;
1207 basic_block fallthru_bb = e->dest;
1208 basic_block then_bb = create_empty_bb (cond_bb);
1209 if (current_loops)
1211 add_bb_to_loop (then_bb, cond_bb->loop_father);
1212 loops_state_set (LOOPS_NEED_FIXUP);
1215 /* Set up the newly created 'then block'. */
1216 e = make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
1217 int fallthrough_probability
1218 = then_more_likely_p
1219 ? PROB_VERY_UNLIKELY
1220 : PROB_ALWAYS - PROB_VERY_UNLIKELY;
1221 e->probability = PROB_ALWAYS - fallthrough_probability;
1222 if (create_then_fallthru_edge)
1223 make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU);
1225 /* Set up the fallthrough basic block. */
1226 e = find_edge (cond_bb, fallthru_bb);
1227 e->flags = EDGE_FALSE_VALUE;
1228 e->count = cond_bb->count;
1229 e->probability = fallthrough_probability;
1231 /* Update dominance info for the newly created then_bb; note that
1232 fallthru_bb's dominance info has already been updated by
1233 split_bock. */
1234 if (dom_info_available_p (CDI_DOMINATORS))
1235 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
1237 *then_block = then_bb;
1238 *fallthrough_block = fallthru_bb;
1239 *iter = gsi_start_bb (fallthru_bb);
1241 return gsi_last_bb (cond_bb);
1244 /* Insert an if condition followed by a 'then block' right before the
1245 statement pointed to by ITER. The fallthrough block -- which is the
1246 else block of the condition as well as the destination of the
1247 outcoming edge of the 'then block' -- starts with the statement
1248 pointed to by ITER.
1250 COND is the condition of the if.
1252 If THEN_MORE_LIKELY_P is true, the probability of the edge to the
1253 'then block' is higher than the probability of the edge to the
1254 fallthrough block.
1256 Upon completion of the function, *THEN_BB is set to the newly
1257 inserted 'then block' and similarly, *FALLTHROUGH_BB is set to the
1258 fallthrough block.
1260 *ITER is adjusted to still point to the same statement it was
1261 pointing to initially. */
1263 static void
1264 insert_if_then_before_iter (gimple cond,
1265 gimple_stmt_iterator *iter,
1266 bool then_more_likely_p,
1267 basic_block *then_bb,
1268 basic_block *fallthrough_bb)
1270 gimple_stmt_iterator cond_insert_point =
1271 create_cond_insert_point (iter,
1272 /*before_p=*/true,
1273 then_more_likely_p,
1274 /*create_then_fallthru_edge=*/true,
1275 then_bb,
1276 fallthrough_bb);
1277 gsi_insert_after (&cond_insert_point, cond, GSI_NEW_STMT);
1280 /* Instrument the memory access instruction BASE. Insert new
1281 statements before or after ITER.
1283 Note that the memory access represented by BASE can be either an
1284 SSA_NAME, or a non-SSA expression. LOCATION is the source code
1285 location. IS_STORE is TRUE for a store, FALSE for a load.
1286 BEFORE_P is TRUE for inserting the instrumentation code before
1287 ITER, FALSE for inserting it after ITER. SIZE_IN_BYTES is one of
1288 1, 2, 4, 8, 16.
1290 If BEFORE_P is TRUE, *ITER is arranged to still point to the
1291 statement it was pointing to prior to calling this function,
1292 otherwise, it points to the statement logically following it. */
1294 static void
1295 build_check_stmt (location_t location, tree base, gimple_stmt_iterator *iter,
1296 bool before_p, bool is_store, int size_in_bytes)
1298 gimple_stmt_iterator gsi;
1299 basic_block then_bb, else_bb;
1300 tree t, base_addr, shadow;
1301 gimple g;
1302 tree shadow_ptr_type = shadow_ptr_types[size_in_bytes == 16 ? 1 : 0];
1303 tree shadow_type = TREE_TYPE (shadow_ptr_type);
1304 tree uintptr_type
1305 = build_nonstandard_integer_type (TYPE_PRECISION (TREE_TYPE (base)), 1);
1306 tree base_ssa = base;
1308 /* Get an iterator on the point where we can add the condition
1309 statement for the instrumentation. */
1310 gsi = create_cond_insert_point (iter, before_p,
1311 /*then_more_likely_p=*/false,
1312 /*create_then_fallthru_edge=*/false,
1313 &then_bb,
1314 &else_bb);
1316 base = unshare_expr (base);
1318 /* BASE can already be an SSA_NAME; in that case, do not create a
1319 new SSA_NAME for it. */
1320 if (TREE_CODE (base) != SSA_NAME)
1322 g = gimple_build_assign_with_ops (TREE_CODE (base),
1323 make_ssa_name (TREE_TYPE (base), NULL),
1324 base, NULL_TREE);
1325 gimple_set_location (g, location);
1326 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1327 base_ssa = gimple_assign_lhs (g);
1330 g = gimple_build_assign_with_ops (NOP_EXPR,
1331 make_ssa_name (uintptr_type, NULL),
1332 base_ssa, NULL_TREE);
1333 gimple_set_location (g, location);
1334 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1335 base_addr = gimple_assign_lhs (g);
1337 /* Build
1338 (base_addr >> ASAN_SHADOW_SHIFT) + targetm.asan_shadow_offset (). */
1340 t = build_int_cst (uintptr_type, ASAN_SHADOW_SHIFT);
1341 g = gimple_build_assign_with_ops (RSHIFT_EXPR,
1342 make_ssa_name (uintptr_type, NULL),
1343 base_addr, t);
1344 gimple_set_location (g, location);
1345 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1347 t = build_int_cst (uintptr_type, targetm.asan_shadow_offset ());
1348 g = gimple_build_assign_with_ops (PLUS_EXPR,
1349 make_ssa_name (uintptr_type, NULL),
1350 gimple_assign_lhs (g), t);
1351 gimple_set_location (g, location);
1352 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1354 g = gimple_build_assign_with_ops (NOP_EXPR,
1355 make_ssa_name (shadow_ptr_type, NULL),
1356 gimple_assign_lhs (g), NULL_TREE);
1357 gimple_set_location (g, location);
1358 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1360 t = build2 (MEM_REF, shadow_type, gimple_assign_lhs (g),
1361 build_int_cst (shadow_ptr_type, 0));
1362 g = gimple_build_assign_with_ops (MEM_REF,
1363 make_ssa_name (shadow_type, NULL),
1364 t, NULL_TREE);
1365 gimple_set_location (g, location);
1366 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1367 shadow = gimple_assign_lhs (g);
1369 if (size_in_bytes < 8)
1371 /* Slow path for 1, 2 and 4 byte accesses.
1372 Test (shadow != 0)
1373 & ((base_addr & 7) + (size_in_bytes - 1)) >= shadow). */
1374 gimple_seq seq = NULL;
1375 gimple shadow_test = build_assign (NE_EXPR, shadow, 0);
1376 gimple_seq_add_stmt (&seq, shadow_test);
1377 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, base_addr, 7));
1378 gimple_seq_add_stmt (&seq, build_type_cast (shadow_type,
1379 gimple_seq_last (seq)));
1380 if (size_in_bytes > 1)
1381 gimple_seq_add_stmt (&seq,
1382 build_assign (PLUS_EXPR, gimple_seq_last (seq),
1383 size_in_bytes - 1));
1384 gimple_seq_add_stmt (&seq, build_assign (GE_EXPR, gimple_seq_last (seq),
1385 shadow));
1386 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
1387 gimple_seq_last (seq)));
1388 t = gimple_assign_lhs (gimple_seq_last (seq));
1389 gimple_seq_set_location (seq, location);
1390 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
1392 else
1393 t = shadow;
1395 g = gimple_build_cond (NE_EXPR, t, build_int_cst (TREE_TYPE (t), 0),
1396 NULL_TREE, NULL_TREE);
1397 gimple_set_location (g, location);
1398 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1400 /* Generate call to the run-time library (e.g. __asan_report_load8). */
1401 gsi = gsi_start_bb (then_bb);
1402 g = gimple_build_call (report_error_func (is_store, size_in_bytes),
1403 1, base_addr);
1404 gimple_set_location (g, location);
1405 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1407 *iter = gsi_start_bb (else_bb);
1410 /* If T represents a memory access, add instrumentation code before ITER.
1411 LOCATION is source code location.
1412 IS_STORE is either TRUE (for a store) or FALSE (for a load). */
1414 static void
1415 instrument_derefs (gimple_stmt_iterator *iter, tree t,
1416 location_t location, bool is_store)
1418 tree type, base;
1419 HOST_WIDE_INT size_in_bytes;
1421 type = TREE_TYPE (t);
1422 switch (TREE_CODE (t))
1424 case ARRAY_REF:
1425 case COMPONENT_REF:
1426 case INDIRECT_REF:
1427 case MEM_REF:
1428 break;
1429 default:
1430 return;
1433 size_in_bytes = int_size_in_bytes (type);
1434 if ((size_in_bytes & (size_in_bytes - 1)) != 0
1435 || (unsigned HOST_WIDE_INT) size_in_bytes - 1 >= 16)
1436 return;
1438 HOST_WIDE_INT bitsize, bitpos;
1439 tree offset;
1440 enum machine_mode mode;
1441 int volatilep = 0, unsignedp = 0;
1442 get_inner_reference (t, &bitsize, &bitpos, &offset,
1443 &mode, &unsignedp, &volatilep, false);
1444 if (bitpos % (size_in_bytes * BITS_PER_UNIT)
1445 || bitsize != size_in_bytes * BITS_PER_UNIT)
1447 if (TREE_CODE (t) == COMPONENT_REF
1448 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)) != NULL_TREE)
1450 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1));
1451 instrument_derefs (iter, build3 (COMPONENT_REF, TREE_TYPE (repr),
1452 TREE_OPERAND (t, 0), repr,
1453 NULL_TREE), location, is_store);
1455 return;
1458 base = build_fold_addr_expr (t);
1459 if (!has_mem_ref_been_instrumented (base, size_in_bytes))
1461 build_check_stmt (location, base, iter, /*before_p=*/true,
1462 is_store, size_in_bytes);
1463 update_mem_ref_hash_table (base, size_in_bytes);
1464 update_mem_ref_hash_table (t, size_in_bytes);
1469 /* Instrument an access to a contiguous memory region that starts at
1470 the address pointed to by BASE, over a length of LEN (expressed in
1471 the sizeof (*BASE) bytes). ITER points to the instruction before
1472 which the instrumentation instructions must be inserted. LOCATION
1473 is the source location that the instrumentation instructions must
1474 have. If IS_STORE is true, then the memory access is a store;
1475 otherwise, it's a load. */
1477 static void
1478 instrument_mem_region_access (tree base, tree len,
1479 gimple_stmt_iterator *iter,
1480 location_t location, bool is_store)
1482 if (!POINTER_TYPE_P (TREE_TYPE (base))
1483 || !INTEGRAL_TYPE_P (TREE_TYPE (len))
1484 || integer_zerop (len))
1485 return;
1487 gimple_stmt_iterator gsi = *iter;
1489 basic_block fallthrough_bb = NULL, then_bb = NULL;
1491 /* If the beginning of the memory region has already been
1492 instrumented, do not instrument it. */
1493 bool start_instrumented = has_mem_ref_been_instrumented (base, 1);
1495 /* If the end of the memory region has already been instrumented, do
1496 not instrument it. */
1497 tree end = asan_mem_ref_get_end (base, len);
1498 bool end_instrumented = has_mem_ref_been_instrumented (end, 1);
1500 if (start_instrumented && end_instrumented)
1501 return;
1503 if (!is_gimple_constant (len))
1505 /* So, the length of the memory area to asan-protect is
1506 non-constant. Let's guard the generated instrumentation code
1507 like:
1509 if (len != 0)
1511 //asan instrumentation code goes here.
1513 // falltrough instructions, starting with *ITER. */
1515 gimple g = gimple_build_cond (NE_EXPR,
1516 len,
1517 build_int_cst (TREE_TYPE (len), 0),
1518 NULL_TREE, NULL_TREE);
1519 gimple_set_location (g, location);
1520 insert_if_then_before_iter (g, iter, /*then_more_likely_p=*/true,
1521 &then_bb, &fallthrough_bb);
1522 /* Note that fallthrough_bb starts with the statement that was
1523 pointed to by ITER. */
1525 /* The 'then block' of the 'if (len != 0) condition is where
1526 we'll generate the asan instrumentation code now. */
1527 gsi = gsi_last_bb (then_bb);
1530 if (!start_instrumented)
1532 /* Instrument the beginning of the memory region to be accessed,
1533 and arrange for the rest of the intrumentation code to be
1534 inserted in the then block *after* the current gsi. */
1535 build_check_stmt (location, base, &gsi, /*before_p=*/true, is_store, 1);
1537 if (then_bb)
1538 /* We are in the case where the length of the region is not
1539 constant; so instrumentation code is being generated in the
1540 'then block' of the 'if (len != 0) condition. Let's arrange
1541 for the subsequent instrumentation statements to go in the
1542 'then block'. */
1543 gsi = gsi_last_bb (then_bb);
1544 else
1546 *iter = gsi;
1547 /* Don't remember this access as instrumented, if length
1548 is unknown. It might be zero and not being actually
1549 instrumented, so we can't rely on it being instrumented. */
1550 update_mem_ref_hash_table (base, 1);
1554 if (end_instrumented)
1555 return;
1557 /* We want to instrument the access at the end of the memory region,
1558 which is at (base + len - 1). */
1560 /* offset = len - 1; */
1561 len = unshare_expr (len);
1562 tree offset;
1563 gimple_seq seq = NULL;
1564 if (TREE_CODE (len) == INTEGER_CST)
1565 offset = fold_build2 (MINUS_EXPR, size_type_node,
1566 fold_convert (size_type_node, len),
1567 build_int_cst (size_type_node, 1));
1568 else
1570 gimple g;
1571 tree t;
1573 if (TREE_CODE (len) != SSA_NAME)
1575 t = make_ssa_name (TREE_TYPE (len), NULL);
1576 g = gimple_build_assign_with_ops (TREE_CODE (len), t, len, NULL);
1577 gimple_set_location (g, location);
1578 gimple_seq_add_stmt_without_update (&seq, g);
1579 len = t;
1581 if (!useless_type_conversion_p (size_type_node, TREE_TYPE (len)))
1583 t = make_ssa_name (size_type_node, NULL);
1584 g = gimple_build_assign_with_ops (NOP_EXPR, t, len, NULL);
1585 gimple_set_location (g, location);
1586 gimple_seq_add_stmt_without_update (&seq, g);
1587 len = t;
1590 t = make_ssa_name (size_type_node, NULL);
1591 g = gimple_build_assign_with_ops (MINUS_EXPR, t, len,
1592 build_int_cst (size_type_node, 1));
1593 gimple_set_location (g, location);
1594 gimple_seq_add_stmt_without_update (&seq, g);
1595 offset = gimple_assign_lhs (g);
1598 /* _1 = base; */
1599 base = unshare_expr (base);
1600 gimple region_end =
1601 gimple_build_assign_with_ops (TREE_CODE (base),
1602 make_ssa_name (TREE_TYPE (base), NULL),
1603 base, NULL);
1604 gimple_set_location (region_end, location);
1605 gimple_seq_add_stmt_without_update (&seq, region_end);
1607 /* _2 = _1 + offset; */
1608 region_end =
1609 gimple_build_assign_with_ops (POINTER_PLUS_EXPR,
1610 make_ssa_name (TREE_TYPE (base), NULL),
1611 gimple_assign_lhs (region_end),
1612 offset);
1613 gimple_set_location (region_end, location);
1614 gimple_seq_add_stmt_without_update (&seq, region_end);
1615 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
1617 /* instrument access at _2; */
1618 gsi = gsi_for_stmt (region_end);
1619 build_check_stmt (location, gimple_assign_lhs (region_end),
1620 &gsi, /*before_p=*/false, is_store, 1);
1622 if (then_bb == NULL)
1623 update_mem_ref_hash_table (end, 1);
1625 *iter = gsi_for_stmt (gsi_stmt (*iter));
1628 /* Instrument the call (to the builtin strlen function) pointed to by
1629 ITER.
1631 This function instruments the access to the first byte of the
1632 argument, right before the call. After the call it instruments the
1633 access to the last byte of the argument; it uses the result of the
1634 call to deduce the offset of that last byte.
1636 Upon completion, iff the call has actually been instrumented, this
1637 function returns TRUE and *ITER points to the statement logically
1638 following the built-in strlen function call *ITER was initially
1639 pointing to. Otherwise, the function returns FALSE and *ITER
1640 remains unchanged. */
1642 static bool
1643 instrument_strlen_call (gimple_stmt_iterator *iter)
1645 gimple call = gsi_stmt (*iter);
1646 gcc_assert (is_gimple_call (call));
1648 tree callee = gimple_call_fndecl (call);
1649 gcc_assert (is_builtin_fn (callee)
1650 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
1651 && DECL_FUNCTION_CODE (callee) == BUILT_IN_STRLEN);
1653 tree len = gimple_call_lhs (call);
1654 if (len == NULL)
1655 /* Some passes might clear the return value of the strlen call;
1656 bail out in that case. Return FALSE as we are not advancing
1657 *ITER. */
1658 return false;
1659 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (len)));
1661 location_t loc = gimple_location (call);
1662 tree str_arg = gimple_call_arg (call, 0);
1664 /* Instrument the access to the first byte of str_arg. i.e:
1666 _1 = str_arg; instrument (_1); */
1667 tree cptr_type = build_pointer_type (char_type_node);
1668 gimple str_arg_ssa =
1669 gimple_build_assign_with_ops (NOP_EXPR,
1670 make_ssa_name (cptr_type, NULL),
1671 str_arg, NULL);
1672 gimple_set_location (str_arg_ssa, loc);
1673 gimple_stmt_iterator gsi = *iter;
1674 gsi_insert_before (&gsi, str_arg_ssa, GSI_NEW_STMT);
1675 build_check_stmt (loc, gimple_assign_lhs (str_arg_ssa), &gsi,
1676 /*before_p=*/false, /*is_store=*/false, 1);
1678 /* If we initially had an instruction like:
1680 int n = strlen (str)
1682 we now want to instrument the access to str[n], after the
1683 instruction above.*/
1685 /* So let's build the access to str[n] that is, access through the
1686 pointer_plus expr: (_1 + len). */
1687 gimple stmt =
1688 gimple_build_assign_with_ops (POINTER_PLUS_EXPR,
1689 make_ssa_name (cptr_type, NULL),
1690 gimple_assign_lhs (str_arg_ssa),
1691 len);
1692 gimple_set_location (stmt, loc);
1693 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
1695 build_check_stmt (loc, gimple_assign_lhs (stmt), &gsi,
1696 /*before_p=*/false, /*is_store=*/false, 1);
1698 /* Ensure that iter points to the statement logically following the
1699 one it was initially pointing to. */
1700 *iter = gsi;
1701 /* As *ITER has been advanced to point to the next statement, let's
1702 return true to inform transform_statements that it shouldn't
1703 advance *ITER anymore; otherwises it will skip that next
1704 statement, which wouldn't be instrumented. */
1705 return true;
1708 /* Instrument the call to a built-in memory access function that is
1709 pointed to by the iterator ITER.
1711 Upon completion, return TRUE iff *ITER has been advanced to the
1712 statement following the one it was originally pointing to. */
1714 static bool
1715 instrument_builtin_call (gimple_stmt_iterator *iter)
1717 bool iter_advanced_p = false;
1718 gimple call = gsi_stmt (*iter);
1720 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
1722 tree callee = gimple_call_fndecl (call);
1723 location_t loc = gimple_location (call);
1725 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STRLEN)
1726 iter_advanced_p = instrument_strlen_call (iter);
1727 else
1729 asan_mem_ref src0, src1, dest;
1730 asan_mem_ref_init (&src0, NULL, 1);
1731 asan_mem_ref_init (&src1, NULL, 1);
1732 asan_mem_ref_init (&dest, NULL, 1);
1734 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
1735 bool src0_is_store = false, src1_is_store = false,
1736 dest_is_store = false, dest_is_deref = false;
1738 if (get_mem_refs_of_builtin_call (call,
1739 &src0, &src0_len, &src0_is_store,
1740 &src1, &src1_len, &src1_is_store,
1741 &dest, &dest_len, &dest_is_store,
1742 &dest_is_deref))
1744 if (dest_is_deref)
1746 instrument_derefs (iter, dest.start, loc, dest_is_store);
1747 gsi_next (iter);
1748 iter_advanced_p = true;
1750 else if (src0_len || src1_len || dest_len)
1752 if (src0.start != NULL_TREE)
1753 instrument_mem_region_access (src0.start, src0_len,
1754 iter, loc, /*is_store=*/false);
1755 if (src1.start != NULL_TREE)
1756 instrument_mem_region_access (src1.start, src1_len,
1757 iter, loc, /*is_store=*/false);
1758 if (dest.start != NULL_TREE)
1759 instrument_mem_region_access (dest.start, dest_len,
1760 iter, loc, /*is_store=*/true);
1761 *iter = gsi_for_stmt (call);
1762 gsi_next (iter);
1763 iter_advanced_p = true;
1767 return iter_advanced_p;
1770 /* Instrument the assignment statement ITER if it is subject to
1771 instrumentation. Return TRUE iff instrumentation actually
1772 happened. In that case, the iterator ITER is advanced to the next
1773 logical expression following the one initially pointed to by ITER,
1774 and the relevant memory reference that which access has been
1775 instrumented is added to the memory references hash table. */
1777 static bool
1778 maybe_instrument_assignment (gimple_stmt_iterator *iter)
1780 gimple s = gsi_stmt (*iter);
1782 gcc_assert (gimple_assign_single_p (s));
1784 tree ref_expr = NULL_TREE;
1785 bool is_store, is_instrumented = false;
1787 if (gimple_store_p (s))
1789 ref_expr = gimple_assign_lhs (s);
1790 is_store = true;
1791 instrument_derefs (iter, ref_expr,
1792 gimple_location (s),
1793 is_store);
1794 is_instrumented = true;
1797 if (gimple_assign_load_p (s))
1799 ref_expr = gimple_assign_rhs1 (s);
1800 is_store = false;
1801 instrument_derefs (iter, ref_expr,
1802 gimple_location (s),
1803 is_store);
1804 is_instrumented = true;
1807 if (is_instrumented)
1808 gsi_next (iter);
1810 return is_instrumented;
1813 /* Instrument the function call pointed to by the iterator ITER, if it
1814 is subject to instrumentation. At the moment, the only function
1815 calls that are instrumented are some built-in functions that access
1816 memory. Look at instrument_builtin_call to learn more.
1818 Upon completion return TRUE iff *ITER was advanced to the statement
1819 following the one it was originally pointing to. */
1821 static bool
1822 maybe_instrument_call (gimple_stmt_iterator *iter)
1824 gimple stmt = gsi_stmt (*iter);
1825 bool is_builtin = gimple_call_builtin_p (stmt, BUILT_IN_NORMAL);
1827 if (is_builtin && instrument_builtin_call (iter))
1828 return true;
1830 if (gimple_call_noreturn_p (stmt))
1832 if (is_builtin)
1834 tree callee = gimple_call_fndecl (stmt);
1835 switch (DECL_FUNCTION_CODE (callee))
1837 case BUILT_IN_UNREACHABLE:
1838 case BUILT_IN_TRAP:
1839 /* Don't instrument these. */
1840 return false;
1843 tree decl = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN);
1844 gimple g = gimple_build_call (decl, 0);
1845 gimple_set_location (g, gimple_location (stmt));
1846 gsi_insert_before (iter, g, GSI_SAME_STMT);
1848 return false;
1851 /* Walk each instruction of all basic block and instrument those that
1852 represent memory references: loads, stores, or function calls.
1853 In a given basic block, this function avoids instrumenting memory
1854 references that have already been instrumented. */
1856 static void
1857 transform_statements (void)
1859 basic_block bb, last_bb = NULL;
1860 gimple_stmt_iterator i;
1861 int saved_last_basic_block = last_basic_block;
1863 FOR_EACH_BB (bb)
1865 basic_block prev_bb = bb;
1867 if (bb->index >= saved_last_basic_block) continue;
1869 /* Flush the mem ref hash table, if current bb doesn't have
1870 exactly one predecessor, or if that predecessor (skipping
1871 over asan created basic blocks) isn't the last processed
1872 basic block. Thus we effectively flush on extended basic
1873 block boundaries. */
1874 while (single_pred_p (prev_bb))
1876 prev_bb = single_pred (prev_bb);
1877 if (prev_bb->index < saved_last_basic_block)
1878 break;
1880 if (prev_bb != last_bb)
1881 empty_mem_ref_hash_table ();
1882 last_bb = bb;
1884 for (i = gsi_start_bb (bb); !gsi_end_p (i);)
1886 gimple s = gsi_stmt (i);
1888 if (has_stmt_been_instrumented_p (s))
1889 gsi_next (&i);
1890 else if (gimple_assign_single_p (s)
1891 && maybe_instrument_assignment (&i))
1892 /* Nothing to do as maybe_instrument_assignment advanced
1893 the iterator I. */;
1894 else if (is_gimple_call (s) && maybe_instrument_call (&i))
1895 /* Nothing to do as maybe_instrument_call
1896 advanced the iterator I. */;
1897 else
1899 /* No instrumentation happened.
1901 If the current instruction is a function call that
1902 might free something, let's forget about the memory
1903 references that got instrumented. Otherwise we might
1904 miss some instrumentation opportunities. */
1905 if (is_gimple_call (s) && !nonfreeing_call_p (s))
1906 empty_mem_ref_hash_table ();
1908 gsi_next (&i);
1912 free_mem_ref_resources ();
1915 /* Build
1916 struct __asan_global
1918 const void *__beg;
1919 uptr __size;
1920 uptr __size_with_redzone;
1921 const void *__name;
1922 uptr __has_dynamic_init;
1923 } type. */
1925 static tree
1926 asan_global_struct (void)
1928 static const char *field_names[5]
1929 = { "__beg", "__size", "__size_with_redzone",
1930 "__name", "__has_dynamic_init" };
1931 tree fields[5], ret;
1932 int i;
1934 ret = make_node (RECORD_TYPE);
1935 for (i = 0; i < 5; i++)
1937 fields[i]
1938 = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
1939 get_identifier (field_names[i]),
1940 (i == 0 || i == 3) ? const_ptr_type_node
1941 : pointer_sized_int_node);
1942 DECL_CONTEXT (fields[i]) = ret;
1943 if (i)
1944 DECL_CHAIN (fields[i - 1]) = fields[i];
1946 TYPE_FIELDS (ret) = fields[0];
1947 TYPE_NAME (ret) = get_identifier ("__asan_global");
1948 layout_type (ret);
1949 return ret;
1952 /* Append description of a single global DECL into vector V.
1953 TYPE is __asan_global struct type as returned by asan_global_struct. */
1955 static void
1956 asan_add_global (tree decl, tree type, vec<constructor_elt, va_gc> *v)
1958 tree init, uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
1959 unsigned HOST_WIDE_INT size;
1960 tree str_cst, refdecl = decl;
1961 vec<constructor_elt, va_gc> *vinner = NULL;
1963 pretty_printer asan_pp;
1965 if (DECL_NAME (decl))
1966 pp_tree_identifier (&asan_pp, DECL_NAME (decl));
1967 else
1968 pp_string (&asan_pp, "<unknown>");
1969 pp_space (&asan_pp);
1970 pp_left_paren (&asan_pp);
1971 pp_string (&asan_pp, main_input_filename);
1972 pp_right_paren (&asan_pp);
1973 str_cst = asan_pp_string (&asan_pp);
1975 if (asan_needs_local_alias (decl))
1977 char buf[20];
1978 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", vec_safe_length (v) + 1);
1979 refdecl = build_decl (DECL_SOURCE_LOCATION (decl),
1980 VAR_DECL, get_identifier (buf), TREE_TYPE (decl));
1981 TREE_ADDRESSABLE (refdecl) = TREE_ADDRESSABLE (decl);
1982 TREE_READONLY (refdecl) = TREE_READONLY (decl);
1983 TREE_THIS_VOLATILE (refdecl) = TREE_THIS_VOLATILE (decl);
1984 DECL_GIMPLE_REG_P (refdecl) = DECL_GIMPLE_REG_P (decl);
1985 DECL_ARTIFICIAL (refdecl) = DECL_ARTIFICIAL (decl);
1986 DECL_IGNORED_P (refdecl) = DECL_IGNORED_P (decl);
1987 TREE_STATIC (refdecl) = 1;
1988 TREE_PUBLIC (refdecl) = 0;
1989 TREE_USED (refdecl) = 1;
1990 assemble_alias (refdecl, DECL_ASSEMBLER_NAME (decl));
1993 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
1994 fold_convert (const_ptr_type_node,
1995 build_fold_addr_expr (refdecl)));
1996 size = tree_low_cst (DECL_SIZE_UNIT (decl), 1);
1997 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
1998 size += asan_red_zone_size (size);
1999 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2000 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2001 fold_convert (const_ptr_type_node, str_cst));
2002 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, 0));
2003 init = build_constructor (type, vinner);
2004 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, init);
2007 /* Initialize sanitizer.def builtins if the FE hasn't initialized them. */
2008 void
2009 initialize_sanitizer_builtins (void)
2011 tree decl;
2013 if (builtin_decl_implicit_p (BUILT_IN_ASAN_INIT))
2014 return;
2016 tree BT_FN_VOID = build_function_type_list (void_type_node, NULL_TREE);
2017 tree BT_FN_VOID_PTR
2018 = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
2019 tree BT_FN_VOID_PTR_PTR_PTR
2020 = build_function_type_list (void_type_node, ptr_type_node,
2021 ptr_type_node, ptr_type_node, NULL_TREE);
2022 tree BT_FN_VOID_PTR_PTRMODE
2023 = build_function_type_list (void_type_node, ptr_type_node,
2024 pointer_sized_int_node, NULL_TREE);
2025 tree BT_FN_VOID_INT
2026 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
2027 tree BT_FN_BOOL_VPTR_PTR_IX_INT_INT[5];
2028 tree BT_FN_IX_CONST_VPTR_INT[5];
2029 tree BT_FN_IX_VPTR_IX_INT[5];
2030 tree BT_FN_VOID_VPTR_IX_INT[5];
2031 tree vptr
2032 = build_pointer_type (build_qualified_type (void_type_node,
2033 TYPE_QUAL_VOLATILE));
2034 tree cvptr
2035 = build_pointer_type (build_qualified_type (void_type_node,
2036 TYPE_QUAL_VOLATILE
2037 |TYPE_QUAL_CONST));
2038 tree boolt
2039 = lang_hooks.types.type_for_size (BOOL_TYPE_SIZE, 1);
2040 int i;
2041 for (i = 0; i < 5; i++)
2043 tree ix = build_nonstandard_integer_type (BITS_PER_UNIT * (1 << i), 1);
2044 BT_FN_BOOL_VPTR_PTR_IX_INT_INT[i]
2045 = build_function_type_list (boolt, vptr, ptr_type_node, ix,
2046 integer_type_node, integer_type_node,
2047 NULL_TREE);
2048 BT_FN_IX_CONST_VPTR_INT[i]
2049 = build_function_type_list (ix, cvptr, integer_type_node, NULL_TREE);
2050 BT_FN_IX_VPTR_IX_INT[i]
2051 = build_function_type_list (ix, vptr, ix, integer_type_node,
2052 NULL_TREE);
2053 BT_FN_VOID_VPTR_IX_INT[i]
2054 = build_function_type_list (void_type_node, vptr, ix,
2055 integer_type_node, NULL_TREE);
2057 #define BT_FN_BOOL_VPTR_PTR_I1_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[0]
2058 #define BT_FN_I1_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[0]
2059 #define BT_FN_I1_VPTR_I1_INT BT_FN_IX_VPTR_IX_INT[0]
2060 #define BT_FN_VOID_VPTR_I1_INT BT_FN_VOID_VPTR_IX_INT[0]
2061 #define BT_FN_BOOL_VPTR_PTR_I2_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[1]
2062 #define BT_FN_I2_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[1]
2063 #define BT_FN_I2_VPTR_I2_INT BT_FN_IX_VPTR_IX_INT[1]
2064 #define BT_FN_VOID_VPTR_I2_INT BT_FN_VOID_VPTR_IX_INT[1]
2065 #define BT_FN_BOOL_VPTR_PTR_I4_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[2]
2066 #define BT_FN_I4_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[2]
2067 #define BT_FN_I4_VPTR_I4_INT BT_FN_IX_VPTR_IX_INT[2]
2068 #define BT_FN_VOID_VPTR_I4_INT BT_FN_VOID_VPTR_IX_INT[2]
2069 #define BT_FN_BOOL_VPTR_PTR_I8_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[3]
2070 #define BT_FN_I8_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[3]
2071 #define BT_FN_I8_VPTR_I8_INT BT_FN_IX_VPTR_IX_INT[3]
2072 #define BT_FN_VOID_VPTR_I8_INT BT_FN_VOID_VPTR_IX_INT[3]
2073 #define BT_FN_BOOL_VPTR_PTR_I16_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[4]
2074 #define BT_FN_I16_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[4]
2075 #define BT_FN_I16_VPTR_I16_INT BT_FN_IX_VPTR_IX_INT[4]
2076 #define BT_FN_VOID_VPTR_I16_INT BT_FN_VOID_VPTR_IX_INT[4]
2077 #undef ATTR_NOTHROW_LEAF_LIST
2078 #define ATTR_NOTHROW_LEAF_LIST ECF_NOTHROW | ECF_LEAF
2079 #undef ATTR_TMPURE_NOTHROW_LEAF_LIST
2080 #define ATTR_TMPURE_NOTHROW_LEAF_LIST ECF_TM_PURE | ATTR_NOTHROW_LEAF_LIST
2081 #undef ATTR_NORETURN_NOTHROW_LEAF_LIST
2082 #define ATTR_NORETURN_NOTHROW_LEAF_LIST ECF_NORETURN | ATTR_NOTHROW_LEAF_LIST
2083 #undef ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST
2084 #define ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST \
2085 ECF_TM_PURE | ATTR_NORETURN_NOTHROW_LEAF_LIST
2086 #undef ATTR_COLD_NOTHROW_LEAF_LIST
2087 #define ATTR_COLD_NOTHROW_LEAF_LIST \
2088 /* ECF_COLD missing */ ATTR_NOTHROW_LEAF_LIST
2089 #undef ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST
2090 #define ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST \
2091 /* ECF_COLD missing */ ATTR_NORETURN_NOTHROW_LEAF_LIST
2092 #undef DEF_SANITIZER_BUILTIN
2093 #define DEF_SANITIZER_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
2094 decl = add_builtin_function ("__builtin_" NAME, TYPE, ENUM, \
2095 BUILT_IN_NORMAL, NAME, NULL_TREE); \
2096 set_call_expr_flags (decl, ATTRS); \
2097 set_builtin_decl (ENUM, decl, true);
2099 #include "sanitizer.def"
2101 #undef DEF_SANITIZER_BUILTIN
2104 /* Called via htab_traverse. Count number of emitted
2105 STRING_CSTs in the constant hash table. */
2107 static int
2108 count_string_csts (void **slot, void *data)
2110 struct constant_descriptor_tree *desc
2111 = (struct constant_descriptor_tree *) *slot;
2112 if (TREE_CODE (desc->value) == STRING_CST
2113 && TREE_ASM_WRITTEN (desc->value)
2114 && asan_protect_global (desc->value))
2115 ++*((unsigned HOST_WIDE_INT *) data);
2116 return 1;
2119 /* Helper structure to pass two parameters to
2120 add_string_csts. */
2122 struct asan_add_string_csts_data
2124 tree type;
2125 vec<constructor_elt, va_gc> *v;
2128 /* Called via htab_traverse. Call asan_add_global
2129 on emitted STRING_CSTs from the constant hash table. */
2131 static int
2132 add_string_csts (void **slot, void *data)
2134 struct constant_descriptor_tree *desc
2135 = (struct constant_descriptor_tree *) *slot;
2136 if (TREE_CODE (desc->value) == STRING_CST
2137 && TREE_ASM_WRITTEN (desc->value)
2138 && asan_protect_global (desc->value))
2140 struct asan_add_string_csts_data *aascd
2141 = (struct asan_add_string_csts_data *) data;
2142 asan_add_global (SYMBOL_REF_DECL (XEXP (desc->rtl, 0)),
2143 aascd->type, aascd->v);
2145 return 1;
2148 /* Needs to be GTY(()), because cgraph_build_static_cdtor may
2149 invoke ggc_collect. */
2150 static GTY(()) tree asan_ctor_statements;
2152 /* Module-level instrumentation.
2153 - Insert __asan_init() into the list of CTORs.
2154 - TODO: insert redzones around globals.
2157 void
2158 asan_finish_file (void)
2160 struct varpool_node *vnode;
2161 unsigned HOST_WIDE_INT gcount = 0;
2163 if (shadow_ptr_types[0] == NULL_TREE)
2164 asan_init_shadow_ptr_types ();
2165 /* Avoid instrumenting code in the asan ctors/dtors.
2166 We don't need to insert padding after the description strings,
2167 nor after .LASAN* array. */
2168 flag_sanitize &= ~SANITIZE_ADDRESS;
2170 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_INIT);
2171 append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
2172 FOR_EACH_DEFINED_VARIABLE (vnode)
2173 if (TREE_ASM_WRITTEN (vnode->symbol.decl)
2174 && asan_protect_global (vnode->symbol.decl))
2175 ++gcount;
2176 htab_t const_desc_htab = constant_pool_htab ();
2177 htab_traverse (const_desc_htab, count_string_csts, &gcount);
2178 if (gcount)
2180 tree type = asan_global_struct (), var, ctor;
2181 tree dtor_statements = NULL_TREE;
2182 vec<constructor_elt, va_gc> *v;
2183 char buf[20];
2185 type = build_array_type_nelts (type, gcount);
2186 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", 0);
2187 var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
2188 type);
2189 TREE_STATIC (var) = 1;
2190 TREE_PUBLIC (var) = 0;
2191 DECL_ARTIFICIAL (var) = 1;
2192 DECL_IGNORED_P (var) = 1;
2193 vec_alloc (v, gcount);
2194 FOR_EACH_DEFINED_VARIABLE (vnode)
2195 if (TREE_ASM_WRITTEN (vnode->symbol.decl)
2196 && asan_protect_global (vnode->symbol.decl))
2197 asan_add_global (vnode->symbol.decl, TREE_TYPE (type), v);
2198 struct asan_add_string_csts_data aascd;
2199 aascd.type = TREE_TYPE (type);
2200 aascd.v = v;
2201 htab_traverse (const_desc_htab, add_string_csts, &aascd);
2202 ctor = build_constructor (type, v);
2203 TREE_CONSTANT (ctor) = 1;
2204 TREE_STATIC (ctor) = 1;
2205 DECL_INITIAL (var) = ctor;
2206 varpool_assemble_decl (varpool_node_for_decl (var));
2208 fn = builtin_decl_implicit (BUILT_IN_ASAN_REGISTER_GLOBALS);
2209 tree gcount_tree = build_int_cst (pointer_sized_int_node, gcount);
2210 append_to_statement_list (build_call_expr (fn, 2,
2211 build_fold_addr_expr (var),
2212 gcount_tree),
2213 &asan_ctor_statements);
2215 fn = builtin_decl_implicit (BUILT_IN_ASAN_UNREGISTER_GLOBALS);
2216 append_to_statement_list (build_call_expr (fn, 2,
2217 build_fold_addr_expr (var),
2218 gcount_tree),
2219 &dtor_statements);
2220 cgraph_build_static_cdtor ('D', dtor_statements,
2221 MAX_RESERVED_INIT_PRIORITY - 1);
2223 cgraph_build_static_cdtor ('I', asan_ctor_statements,
2224 MAX_RESERVED_INIT_PRIORITY - 1);
2225 flag_sanitize |= SANITIZE_ADDRESS;
2228 /* Instrument the current function. */
2230 static unsigned int
2231 asan_instrument (void)
2233 if (shadow_ptr_types[0] == NULL_TREE)
2234 asan_init_shadow_ptr_types ();
2235 transform_statements ();
2236 return 0;
2239 static bool
2240 gate_asan (void)
2242 return (flag_sanitize & SANITIZE_ADDRESS) != 0
2243 && !lookup_attribute ("no_sanitize_address",
2244 DECL_ATTRIBUTES (current_function_decl));
2247 namespace {
2249 const pass_data pass_data_asan =
2251 GIMPLE_PASS, /* type */
2252 "asan", /* name */
2253 OPTGROUP_NONE, /* optinfo_flags */
2254 true, /* has_gate */
2255 true, /* has_execute */
2256 TV_NONE, /* tv_id */
2257 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
2258 0, /* properties_provided */
2259 0, /* properties_destroyed */
2260 0, /* todo_flags_start */
2261 ( TODO_verify_flow | TODO_verify_stmts
2262 | TODO_update_ssa ), /* todo_flags_finish */
2265 class pass_asan : public gimple_opt_pass
2267 public:
2268 pass_asan(gcc::context *ctxt)
2269 : gimple_opt_pass(pass_data_asan, ctxt)
2272 /* opt_pass methods: */
2273 opt_pass * clone () { return new pass_asan (ctxt_); }
2274 bool gate () { return gate_asan (); }
2275 unsigned int execute () { return asan_instrument (); }
2277 }; // class pass_asan
2279 } // anon namespace
2281 gimple_opt_pass *
2282 make_pass_asan (gcc::context *ctxt)
2284 return new pass_asan (ctxt);
2287 static bool
2288 gate_asan_O0 (void)
2290 return !optimize && gate_asan ();
2293 namespace {
2295 const pass_data pass_data_asan_O0 =
2297 GIMPLE_PASS, /* type */
2298 "asan0", /* name */
2299 OPTGROUP_NONE, /* optinfo_flags */
2300 true, /* has_gate */
2301 true, /* has_execute */
2302 TV_NONE, /* tv_id */
2303 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
2304 0, /* properties_provided */
2305 0, /* properties_destroyed */
2306 0, /* todo_flags_start */
2307 ( TODO_verify_flow | TODO_verify_stmts
2308 | TODO_update_ssa ), /* todo_flags_finish */
2311 class pass_asan_O0 : public gimple_opt_pass
2313 public:
2314 pass_asan_O0(gcc::context *ctxt)
2315 : gimple_opt_pass(pass_data_asan_O0, ctxt)
2318 /* opt_pass methods: */
2319 bool gate () { return gate_asan_O0 (); }
2320 unsigned int execute () { return asan_instrument (); }
2322 }; // class pass_asan_O0
2324 } // anon namespace
2326 gimple_opt_pass *
2327 make_pass_asan_O0 (gcc::context *ctxt)
2329 return new pass_asan_O0 (ctxt);
2332 #include "gt-asan.h"