1 /* AddressSanitizer, a fast memory error detector.
2 Copyright (C) 2012-2013 Free Software Foundation, Inc.
3 Contributed by Kostya Serebryany <kcc@google.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
26 #include "tree-iterator.h"
28 #include "tree-pass.h"
30 #include "gimple-pretty-print.h"
36 #include "langhooks.h"
37 #include "hash-table.h"
38 #include "alloc-pool.h"
41 /* AddressSanitizer finds out-of-bounds and use-after-free bugs
42 with <2x slowdown on average.
44 The tool consists of two parts:
45 instrumentation module (this file) and a run-time library.
46 The instrumentation module adds a run-time check before every memory insn.
47 For a 8- or 16- byte load accessing address X:
48 ShadowAddr = (X >> 3) + Offset
49 ShadowValue = *(char*)ShadowAddr; // *(short*) for 16-byte access.
51 __asan_report_load8(X);
52 For a load of N bytes (N=1, 2 or 4) from address X:
53 ShadowAddr = (X >> 3) + Offset
54 ShadowValue = *(char*)ShadowAddr;
56 if ((X & 7) + N - 1 > ShadowValue)
57 __asan_report_loadN(X);
58 Stores are instrumented similarly, but using __asan_report_storeN functions.
59 A call too __asan_init() is inserted to the list of module CTORs.
61 The run-time library redefines malloc (so that redzone are inserted around
62 the allocated memory) and free (so that reuse of free-ed memory is delayed),
63 provides __asan_report* and __asan_init functions.
66 http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm
68 The current implementation supports detection of out-of-bounds and
69 use-after-free in the heap, on the stack and for global variables.
71 [Protection of stack variables]
73 To understand how detection of out-of-bounds and use-after-free works
74 for stack variables, lets look at this example on x86_64 where the
89 For this function, the stack protected by asan will be organized as
90 follows, from the top of the stack to the bottom:
92 Slot 1/ [red zone of 32 bytes called 'RIGHT RedZone']
94 Slot 2/ [8 bytes of red zone, that adds up to the space of 'a' to make
95 the next slot be 32 bytes aligned; this one is called Partial
96 Redzone; this 32 bytes alignment is an asan constraint]
98 Slot 3/ [24 bytes for variable 'a']
100 Slot 4/ [red zone of 32 bytes called 'Middle RedZone']
102 Slot 5/ [24 bytes of Partial Red Zone (similar to slot 2]
104 Slot 6/ [8 bytes for variable 'b']
106 Slot 7/ [32 bytes of Red Zone at the bottom of the stack, called
109 The 32 bytes of LEFT red zone at the bottom of the stack can be
112 1/ The first 8 bytes contain a magical asan number that is always
115 2/ The following 8 bytes contains a pointer to a string (to be
116 parsed at runtime by the runtime asan library), which format is
119 "<function-name> <space> <num-of-variables-on-the-stack>
120 (<32-bytes-aligned-offset-in-bytes-of-variable> <space>
121 <length-of-var-in-bytes> ){n} "
123 where '(...){n}' means the content inside the parenthesis occurs 'n'
124 times, with 'n' being the number of variables on the stack.
126 3/ The following 16 bytes of the red zone have no particular
129 The shadow memory for that stack layout is going to look like this:
131 - content of shadow memory 8 bytes for slot 7: 0xF1F1F1F1.
132 The F1 byte pattern is a magic number called
133 ASAN_STACK_MAGIC_LEFT and is a way for the runtime to know that
134 the memory for that shadow byte is part of a the LEFT red zone
135 intended to seat at the bottom of the variables on the stack.
137 - content of shadow memory 8 bytes for slots 6 and 5:
138 0xF4F4F400. The F4 byte pattern is a magic number
139 called ASAN_STACK_MAGIC_PARTIAL. It flags the fact that the
140 memory region for this shadow byte is a PARTIAL red zone
141 intended to pad a variable A, so that the slot following
142 {A,padding} is 32 bytes aligned.
144 Note that the fact that the least significant byte of this
145 shadow memory content is 00 means that 8 bytes of its
146 corresponding memory (which corresponds to the memory of
147 variable 'b') is addressable.
149 - content of shadow memory 8 bytes for slot 4: 0xF2F2F2F2.
150 The F2 byte pattern is a magic number called
151 ASAN_STACK_MAGIC_MIDDLE. It flags the fact that the memory
152 region for this shadow byte is a MIDDLE red zone intended to
153 seat between two 32 aligned slots of {variable,padding}.
155 - content of shadow memory 8 bytes for slot 3 and 2:
156 0xF4000000. This represents is the concatenation of
157 variable 'a' and the partial red zone following it, like what we
158 had for variable 'b'. The least significant 3 bytes being 00
159 means that the 3 bytes of variable 'a' are addressable.
161 - content of shadow memory 8 bytes for slot 1: 0xF3F3F3F3.
162 The F3 byte pattern is a magic number called
163 ASAN_STACK_MAGIC_RIGHT. It flags the fact that the memory
164 region for this shadow byte is a RIGHT red zone intended to seat
165 at the top of the variables of the stack.
167 Note that the real variable layout is done in expand_used_vars in
168 cfgexpand.c. As far as Address Sanitizer is concerned, it lays out
169 stack variables as well as the different red zones, emits some
170 prologue code to populate the shadow memory as to poison (mark as
171 non-accessible) the regions of the red zones and mark the regions of
172 stack variables as accessible, and emit some epilogue code to
173 un-poison (mark as accessible) the regions of red zones right before
176 [Protection of global variables]
178 The basic idea is to insert a red zone between two global variables
179 and install a constructor function that calls the asan runtime to do
180 the populating of the relevant shadow memory regions at load time.
182 So the global variables are laid out as to insert a red zone between
183 them. The size of the red zones is so that each variable starts on a
186 Then a constructor function is installed so that, for each global
187 variable, it calls the runtime asan library function
188 __asan_register_globals_with an instance of this type:
192 // Address of the beginning of the global variable.
195 // Initial size of the global variable.
198 // Size of the global variable + size of the red zone. This
199 // size is 32 bytes aligned.
200 uptr __size_with_redzone;
202 // Name of the global variable.
205 // This is always set to NULL for now.
206 uptr __has_dynamic_init;
209 A destructor function that calls the runtime asan library function
210 _asan_unregister_globals is also installed. */
212 alias_set_type asan_shadow_set
= -1;
214 /* Pointer types to 1 resp. 2 byte integers in shadow memory. A separate
215 alias set is used for all shadow memory accesses. */
216 static GTY(()) tree shadow_ptr_types
[2];
218 /* Hashtable support for memory references used by gimple
221 /* This type represents a reference to a memory region. */
224 /* The expression of the beginning of the memory region. */
227 /* The size of the access (can be 1, 2, 4, 8, 16 for now). */
231 static alloc_pool asan_mem_ref_alloc_pool
;
233 /* This creates the alloc pool used to store the instances of
234 asan_mem_ref that are stored in the hash table asan_mem_ref_ht. */
237 asan_mem_ref_get_alloc_pool ()
239 if (asan_mem_ref_alloc_pool
== NULL
)
240 asan_mem_ref_alloc_pool
= create_alloc_pool ("asan_mem_ref",
241 sizeof (asan_mem_ref
),
243 return asan_mem_ref_alloc_pool
;
247 /* Initializes an instance of asan_mem_ref. */
250 asan_mem_ref_init (asan_mem_ref
*ref
, tree start
, char access_size
)
253 ref
->access_size
= access_size
;
256 /* Allocates memory for an instance of asan_mem_ref into the memory
257 pool returned by asan_mem_ref_get_alloc_pool and initialize it.
258 START is the address of (or the expression pointing to) the
259 beginning of memory reference. ACCESS_SIZE is the size of the
260 access to the referenced memory. */
263 asan_mem_ref_new (tree start
, char access_size
)
266 (asan_mem_ref
*) pool_alloc (asan_mem_ref_get_alloc_pool ());
268 asan_mem_ref_init (ref
, start
, access_size
);
272 /* This builds and returns a pointer to the end of the memory region
273 that starts at START and of length LEN. */
276 asan_mem_ref_get_end (tree start
, tree len
)
278 if (len
== NULL_TREE
|| integer_zerop (len
))
281 return fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (start
), start
, len
);
284 /* Return a tree expression that represents the end of the referenced
285 memory region. Beware that this function can actually build a new
289 asan_mem_ref_get_end (const asan_mem_ref
*ref
, tree len
)
291 return asan_mem_ref_get_end (ref
->start
, len
);
294 struct asan_mem_ref_hasher
295 : typed_noop_remove
<asan_mem_ref
>
297 typedef asan_mem_ref value_type
;
298 typedef asan_mem_ref compare_type
;
300 static inline hashval_t
hash (const value_type
*);
301 static inline bool equal (const value_type
*, const compare_type
*);
304 /* Hash a memory reference. */
307 asan_mem_ref_hasher::hash (const asan_mem_ref
*mem_ref
)
309 hashval_t h
= iterative_hash_expr (mem_ref
->start
, 0);
310 h
= iterative_hash_hashval_t (h
, mem_ref
->access_size
);
314 /* Compare two memory references. We accept the length of either
315 memory references to be NULL_TREE. */
318 asan_mem_ref_hasher::equal (const asan_mem_ref
*m1
,
319 const asan_mem_ref
*m2
)
321 return (m1
->access_size
== m2
->access_size
322 && operand_equal_p (m1
->start
, m2
->start
, 0));
325 static hash_table
<asan_mem_ref_hasher
> asan_mem_ref_ht
;
327 /* Returns a reference to the hash table containing memory references.
328 This function ensures that the hash table is created. Note that
329 this hash table is updated by the function
330 update_mem_ref_hash_table. */
332 static hash_table
<asan_mem_ref_hasher
> &
333 get_mem_ref_hash_table ()
335 if (!asan_mem_ref_ht
.is_created ())
336 asan_mem_ref_ht
.create (10);
338 return asan_mem_ref_ht
;
341 /* Clear all entries from the memory references hash table. */
344 empty_mem_ref_hash_table ()
346 if (asan_mem_ref_ht
.is_created ())
347 asan_mem_ref_ht
.empty ();
350 /* Free the memory references hash table. */
353 free_mem_ref_resources ()
355 if (asan_mem_ref_ht
.is_created ())
356 asan_mem_ref_ht
.dispose ();
358 if (asan_mem_ref_alloc_pool
)
360 free_alloc_pool (asan_mem_ref_alloc_pool
);
361 asan_mem_ref_alloc_pool
= NULL
;
365 /* Return true iff the memory reference REF has been instrumented. */
368 has_mem_ref_been_instrumented (tree ref
, char access_size
)
371 asan_mem_ref_init (&r
, ref
, access_size
);
373 return (get_mem_ref_hash_table ().find (&r
) != NULL
);
376 /* Return true iff the memory reference REF has been instrumented. */
379 has_mem_ref_been_instrumented (const asan_mem_ref
*ref
)
381 return has_mem_ref_been_instrumented (ref
->start
, ref
->access_size
);
384 /* Return true iff access to memory region starting at REF and of
385 length LEN has been instrumented. */
388 has_mem_ref_been_instrumented (const asan_mem_ref
*ref
, tree len
)
390 /* First let's see if the address of the beginning of REF has been
392 if (!has_mem_ref_been_instrumented (ref
))
397 /* Let's see if the end of the region has been instrumented. */
398 if (!has_mem_ref_been_instrumented (asan_mem_ref_get_end (ref
, len
),
405 /* Set REF to the memory reference present in a gimple assignment
406 ASSIGNMENT. Return true upon successful completion, false
410 get_mem_ref_of_assignment (const gimple assignment
,
414 gcc_assert (gimple_assign_single_p (assignment
));
416 if (gimple_store_p (assignment
)
417 && !gimple_clobber_p (assignment
))
419 ref
->start
= gimple_assign_lhs (assignment
);
420 *ref_is_store
= true;
422 else if (gimple_assign_load_p (assignment
))
424 ref
->start
= gimple_assign_rhs1 (assignment
);
425 *ref_is_store
= false;
430 ref
->access_size
= int_size_in_bytes (TREE_TYPE (ref
->start
));
434 /* Return the memory references contained in a gimple statement
435 representing a builtin call that has to do with memory access. */
438 get_mem_refs_of_builtin_call (const gimple call
,
450 gcc_checking_assert (gimple_call_builtin_p (call
, BUILT_IN_NORMAL
));
452 tree callee
= gimple_call_fndecl (call
);
453 tree source0
= NULL_TREE
, source1
= NULL_TREE
,
454 dest
= NULL_TREE
, len
= NULL_TREE
;
455 bool is_store
= true, got_reference_p
= false;
456 char access_size
= 1;
458 switch (DECL_FUNCTION_CODE (callee
))
460 /* (s, s, n) style memops. */
462 case BUILT_IN_MEMCMP
:
463 source0
= gimple_call_arg (call
, 0);
464 source1
= gimple_call_arg (call
, 1);
465 len
= gimple_call_arg (call
, 2);
468 /* (src, dest, n) style memops. */
470 source0
= gimple_call_arg (call
, 0);
471 dest
= gimple_call_arg (call
, 1);
472 len
= gimple_call_arg (call
, 2);
475 /* (dest, src, n) style memops. */
476 case BUILT_IN_MEMCPY
:
477 case BUILT_IN_MEMCPY_CHK
:
478 case BUILT_IN_MEMMOVE
:
479 case BUILT_IN_MEMMOVE_CHK
:
480 case BUILT_IN_MEMPCPY
:
481 case BUILT_IN_MEMPCPY_CHK
:
482 dest
= gimple_call_arg (call
, 0);
483 source0
= gimple_call_arg (call
, 1);
484 len
= gimple_call_arg (call
, 2);
487 /* (dest, n) style memops. */
489 dest
= gimple_call_arg (call
, 0);
490 len
= gimple_call_arg (call
, 1);
493 /* (dest, x, n) style memops*/
494 case BUILT_IN_MEMSET
:
495 case BUILT_IN_MEMSET_CHK
:
496 dest
= gimple_call_arg (call
, 0);
497 len
= gimple_call_arg (call
, 2);
500 case BUILT_IN_STRLEN
:
501 source0
= gimple_call_arg (call
, 0);
502 len
= gimple_call_lhs (call
);
505 /* And now the __atomic* and __sync builtins.
506 These are handled differently from the classical memory memory
507 access builtins above. */
509 case BUILT_IN_ATOMIC_LOAD_1
:
510 case BUILT_IN_ATOMIC_LOAD_2
:
511 case BUILT_IN_ATOMIC_LOAD_4
:
512 case BUILT_IN_ATOMIC_LOAD_8
:
513 case BUILT_IN_ATOMIC_LOAD_16
:
517 case BUILT_IN_SYNC_FETCH_AND_ADD_1
:
518 case BUILT_IN_SYNC_FETCH_AND_ADD_2
:
519 case BUILT_IN_SYNC_FETCH_AND_ADD_4
:
520 case BUILT_IN_SYNC_FETCH_AND_ADD_8
:
521 case BUILT_IN_SYNC_FETCH_AND_ADD_16
:
523 case BUILT_IN_SYNC_FETCH_AND_SUB_1
:
524 case BUILT_IN_SYNC_FETCH_AND_SUB_2
:
525 case BUILT_IN_SYNC_FETCH_AND_SUB_4
:
526 case BUILT_IN_SYNC_FETCH_AND_SUB_8
:
527 case BUILT_IN_SYNC_FETCH_AND_SUB_16
:
529 case BUILT_IN_SYNC_FETCH_AND_OR_1
:
530 case BUILT_IN_SYNC_FETCH_AND_OR_2
:
531 case BUILT_IN_SYNC_FETCH_AND_OR_4
:
532 case BUILT_IN_SYNC_FETCH_AND_OR_8
:
533 case BUILT_IN_SYNC_FETCH_AND_OR_16
:
535 case BUILT_IN_SYNC_FETCH_AND_AND_1
:
536 case BUILT_IN_SYNC_FETCH_AND_AND_2
:
537 case BUILT_IN_SYNC_FETCH_AND_AND_4
:
538 case BUILT_IN_SYNC_FETCH_AND_AND_8
:
539 case BUILT_IN_SYNC_FETCH_AND_AND_16
:
541 case BUILT_IN_SYNC_FETCH_AND_XOR_1
:
542 case BUILT_IN_SYNC_FETCH_AND_XOR_2
:
543 case BUILT_IN_SYNC_FETCH_AND_XOR_4
:
544 case BUILT_IN_SYNC_FETCH_AND_XOR_8
:
545 case BUILT_IN_SYNC_FETCH_AND_XOR_16
:
547 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
548 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
549 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
550 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
552 case BUILT_IN_SYNC_ADD_AND_FETCH_1
:
553 case BUILT_IN_SYNC_ADD_AND_FETCH_2
:
554 case BUILT_IN_SYNC_ADD_AND_FETCH_4
:
555 case BUILT_IN_SYNC_ADD_AND_FETCH_8
:
556 case BUILT_IN_SYNC_ADD_AND_FETCH_16
:
558 case BUILT_IN_SYNC_SUB_AND_FETCH_1
:
559 case BUILT_IN_SYNC_SUB_AND_FETCH_2
:
560 case BUILT_IN_SYNC_SUB_AND_FETCH_4
:
561 case BUILT_IN_SYNC_SUB_AND_FETCH_8
:
562 case BUILT_IN_SYNC_SUB_AND_FETCH_16
:
564 case BUILT_IN_SYNC_OR_AND_FETCH_1
:
565 case BUILT_IN_SYNC_OR_AND_FETCH_2
:
566 case BUILT_IN_SYNC_OR_AND_FETCH_4
:
567 case BUILT_IN_SYNC_OR_AND_FETCH_8
:
568 case BUILT_IN_SYNC_OR_AND_FETCH_16
:
570 case BUILT_IN_SYNC_AND_AND_FETCH_1
:
571 case BUILT_IN_SYNC_AND_AND_FETCH_2
:
572 case BUILT_IN_SYNC_AND_AND_FETCH_4
:
573 case BUILT_IN_SYNC_AND_AND_FETCH_8
:
574 case BUILT_IN_SYNC_AND_AND_FETCH_16
:
576 case BUILT_IN_SYNC_XOR_AND_FETCH_1
:
577 case BUILT_IN_SYNC_XOR_AND_FETCH_2
:
578 case BUILT_IN_SYNC_XOR_AND_FETCH_4
:
579 case BUILT_IN_SYNC_XOR_AND_FETCH_8
:
580 case BUILT_IN_SYNC_XOR_AND_FETCH_16
:
582 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
583 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
584 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
585 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
587 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
:
588 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2
:
589 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4
:
590 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8
:
591 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16
:
593 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
:
594 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2
:
595 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4
:
596 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8
:
597 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16
:
599 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
:
600 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2
:
601 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4
:
602 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8
:
603 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16
:
605 case BUILT_IN_SYNC_LOCK_RELEASE_1
:
606 case BUILT_IN_SYNC_LOCK_RELEASE_2
:
607 case BUILT_IN_SYNC_LOCK_RELEASE_4
:
608 case BUILT_IN_SYNC_LOCK_RELEASE_8
:
609 case BUILT_IN_SYNC_LOCK_RELEASE_16
:
611 case BUILT_IN_ATOMIC_EXCHANGE_1
:
612 case BUILT_IN_ATOMIC_EXCHANGE_2
:
613 case BUILT_IN_ATOMIC_EXCHANGE_4
:
614 case BUILT_IN_ATOMIC_EXCHANGE_8
:
615 case BUILT_IN_ATOMIC_EXCHANGE_16
:
617 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
618 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
619 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
620 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
621 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
623 case BUILT_IN_ATOMIC_STORE_1
:
624 case BUILT_IN_ATOMIC_STORE_2
:
625 case BUILT_IN_ATOMIC_STORE_4
:
626 case BUILT_IN_ATOMIC_STORE_8
:
627 case BUILT_IN_ATOMIC_STORE_16
:
629 case BUILT_IN_ATOMIC_ADD_FETCH_1
:
630 case BUILT_IN_ATOMIC_ADD_FETCH_2
:
631 case BUILT_IN_ATOMIC_ADD_FETCH_4
:
632 case BUILT_IN_ATOMIC_ADD_FETCH_8
:
633 case BUILT_IN_ATOMIC_ADD_FETCH_16
:
635 case BUILT_IN_ATOMIC_SUB_FETCH_1
:
636 case BUILT_IN_ATOMIC_SUB_FETCH_2
:
637 case BUILT_IN_ATOMIC_SUB_FETCH_4
:
638 case BUILT_IN_ATOMIC_SUB_FETCH_8
:
639 case BUILT_IN_ATOMIC_SUB_FETCH_16
:
641 case BUILT_IN_ATOMIC_AND_FETCH_1
:
642 case BUILT_IN_ATOMIC_AND_FETCH_2
:
643 case BUILT_IN_ATOMIC_AND_FETCH_4
:
644 case BUILT_IN_ATOMIC_AND_FETCH_8
:
645 case BUILT_IN_ATOMIC_AND_FETCH_16
:
647 case BUILT_IN_ATOMIC_NAND_FETCH_1
:
648 case BUILT_IN_ATOMIC_NAND_FETCH_2
:
649 case BUILT_IN_ATOMIC_NAND_FETCH_4
:
650 case BUILT_IN_ATOMIC_NAND_FETCH_8
:
651 case BUILT_IN_ATOMIC_NAND_FETCH_16
:
653 case BUILT_IN_ATOMIC_XOR_FETCH_1
:
654 case BUILT_IN_ATOMIC_XOR_FETCH_2
:
655 case BUILT_IN_ATOMIC_XOR_FETCH_4
:
656 case BUILT_IN_ATOMIC_XOR_FETCH_8
:
657 case BUILT_IN_ATOMIC_XOR_FETCH_16
:
659 case BUILT_IN_ATOMIC_OR_FETCH_1
:
660 case BUILT_IN_ATOMIC_OR_FETCH_2
:
661 case BUILT_IN_ATOMIC_OR_FETCH_4
:
662 case BUILT_IN_ATOMIC_OR_FETCH_8
:
663 case BUILT_IN_ATOMIC_OR_FETCH_16
:
665 case BUILT_IN_ATOMIC_FETCH_ADD_1
:
666 case BUILT_IN_ATOMIC_FETCH_ADD_2
:
667 case BUILT_IN_ATOMIC_FETCH_ADD_4
:
668 case BUILT_IN_ATOMIC_FETCH_ADD_8
:
669 case BUILT_IN_ATOMIC_FETCH_ADD_16
:
671 case BUILT_IN_ATOMIC_FETCH_SUB_1
:
672 case BUILT_IN_ATOMIC_FETCH_SUB_2
:
673 case BUILT_IN_ATOMIC_FETCH_SUB_4
:
674 case BUILT_IN_ATOMIC_FETCH_SUB_8
:
675 case BUILT_IN_ATOMIC_FETCH_SUB_16
:
677 case BUILT_IN_ATOMIC_FETCH_AND_1
:
678 case BUILT_IN_ATOMIC_FETCH_AND_2
:
679 case BUILT_IN_ATOMIC_FETCH_AND_4
:
680 case BUILT_IN_ATOMIC_FETCH_AND_8
:
681 case BUILT_IN_ATOMIC_FETCH_AND_16
:
683 case BUILT_IN_ATOMIC_FETCH_NAND_1
:
684 case BUILT_IN_ATOMIC_FETCH_NAND_2
:
685 case BUILT_IN_ATOMIC_FETCH_NAND_4
:
686 case BUILT_IN_ATOMIC_FETCH_NAND_8
:
687 case BUILT_IN_ATOMIC_FETCH_NAND_16
:
689 case BUILT_IN_ATOMIC_FETCH_XOR_1
:
690 case BUILT_IN_ATOMIC_FETCH_XOR_2
:
691 case BUILT_IN_ATOMIC_FETCH_XOR_4
:
692 case BUILT_IN_ATOMIC_FETCH_XOR_8
:
693 case BUILT_IN_ATOMIC_FETCH_XOR_16
:
695 case BUILT_IN_ATOMIC_FETCH_OR_1
:
696 case BUILT_IN_ATOMIC_FETCH_OR_2
:
697 case BUILT_IN_ATOMIC_FETCH_OR_4
:
698 case BUILT_IN_ATOMIC_FETCH_OR_8
:
699 case BUILT_IN_ATOMIC_FETCH_OR_16
:
701 dest
= gimple_call_arg (call
, 0);
702 /* DEST represents the address of a memory location.
703 instrument_derefs wants the memory location, so lets
704 dereference the address DEST before handing it to
705 instrument_derefs. */
706 if (TREE_CODE (dest
) == ADDR_EXPR
)
707 dest
= TREE_OPERAND (dest
, 0);
708 else if (TREE_CODE (dest
) == SSA_NAME
)
709 dest
= build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (dest
)),
710 dest
, build_int_cst (TREE_TYPE (dest
), 0));
714 access_size
= int_size_in_bytes (TREE_TYPE (dest
));
718 /* The other builtins memory access are not instrumented in this
719 function because they either don't have any length parameter,
720 or their length parameter is just a limit. */
724 if (len
!= NULL_TREE
)
726 if (source0
!= NULL_TREE
)
728 src0
->start
= source0
;
729 src0
->access_size
= access_size
;
731 *src0_is_store
= false;
734 if (source1
!= NULL_TREE
)
736 src1
->start
= source1
;
737 src1
->access_size
= access_size
;
739 *src1_is_store
= false;
742 if (dest
!= NULL_TREE
)
745 dst
->access_size
= access_size
;
747 *dst_is_store
= true;
750 got_reference_p
= true;
755 dst
->access_size
= access_size
;
756 *dst_len
= NULL_TREE
;
757 *dst_is_store
= is_store
;
758 *dest_is_deref
= true;
759 got_reference_p
= true;
762 return got_reference_p
;
765 /* Return true iff a given gimple statement has been instrumented.
766 Note that the statement is "defined" by the memory references it
770 has_stmt_been_instrumented_p (gimple stmt
)
772 if (gimple_assign_single_p (stmt
))
776 asan_mem_ref_init (&r
, NULL
, 1);
778 if (get_mem_ref_of_assignment (stmt
, &r
, &r_is_store
))
779 return has_mem_ref_been_instrumented (&r
);
781 else if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
783 asan_mem_ref src0
, src1
, dest
;
784 asan_mem_ref_init (&src0
, NULL
, 1);
785 asan_mem_ref_init (&src1
, NULL
, 1);
786 asan_mem_ref_init (&dest
, NULL
, 1);
788 tree src0_len
= NULL_TREE
, src1_len
= NULL_TREE
, dest_len
= NULL_TREE
;
789 bool src0_is_store
= false, src1_is_store
= false,
790 dest_is_store
= false, dest_is_deref
= false;
791 if (get_mem_refs_of_builtin_call (stmt
,
792 &src0
, &src0_len
, &src0_is_store
,
793 &src1
, &src1_len
, &src1_is_store
,
794 &dest
, &dest_len
, &dest_is_store
,
797 if (src0
.start
!= NULL_TREE
798 && !has_mem_ref_been_instrumented (&src0
, src0_len
))
801 if (src1
.start
!= NULL_TREE
802 && !has_mem_ref_been_instrumented (&src1
, src1_len
))
805 if (dest
.start
!= NULL_TREE
806 && !has_mem_ref_been_instrumented (&dest
, dest_len
))
815 /* Insert a memory reference into the hash table. */
818 update_mem_ref_hash_table (tree ref
, char access_size
)
820 hash_table
<asan_mem_ref_hasher
> ht
= get_mem_ref_hash_table ();
823 asan_mem_ref_init (&r
, ref
, access_size
);
825 asan_mem_ref
**slot
= ht
.find_slot (&r
, INSERT
);
827 *slot
= asan_mem_ref_new (ref
, access_size
);
830 /* Initialize shadow_ptr_types array. */
833 asan_init_shadow_ptr_types (void)
835 asan_shadow_set
= new_alias_set ();
836 shadow_ptr_types
[0] = build_distinct_type_copy (signed_char_type_node
);
837 TYPE_ALIAS_SET (shadow_ptr_types
[0]) = asan_shadow_set
;
838 shadow_ptr_types
[0] = build_pointer_type (shadow_ptr_types
[0]);
839 shadow_ptr_types
[1] = build_distinct_type_copy (short_integer_type_node
);
840 TYPE_ALIAS_SET (shadow_ptr_types
[1]) = asan_shadow_set
;
841 shadow_ptr_types
[1] = build_pointer_type (shadow_ptr_types
[1]);
842 initialize_sanitizer_builtins ();
845 /* Create ADDR_EXPR of STRING_CST with the PP pretty printer text. */
848 asan_pp_string (pretty_printer
*pp
)
850 const char *buf
= pp_formatted_text (pp
);
851 size_t len
= strlen (buf
);
852 tree ret
= build_string (len
+ 1, buf
);
854 = build_array_type (TREE_TYPE (shadow_ptr_types
[0]),
855 build_index_type (size_int (len
)));
856 TREE_READONLY (ret
) = 1;
857 TREE_STATIC (ret
) = 1;
858 return build1 (ADDR_EXPR
, shadow_ptr_types
[0], ret
);
861 /* Return a CONST_INT representing 4 subsequent shadow memory bytes. */
864 asan_shadow_cst (unsigned char shadow_bytes
[4])
867 unsigned HOST_WIDE_INT val
= 0;
868 gcc_assert (WORDS_BIG_ENDIAN
== BYTES_BIG_ENDIAN
);
869 for (i
= 0; i
< 4; i
++)
870 val
|= (unsigned HOST_WIDE_INT
) shadow_bytes
[BYTES_BIG_ENDIAN
? 3 - i
: i
]
871 << (BITS_PER_UNIT
* i
);
872 return gen_int_mode (val
, SImode
);
875 /* Clear shadow memory at SHADOW_MEM, LEN bytes. Can't call a library call here
879 asan_clear_shadow (rtx shadow_mem
, HOST_WIDE_INT len
)
881 rtx insn
, insns
, top_label
, end
, addr
, tmp
, jump
;
884 clear_storage (shadow_mem
, GEN_INT (len
), BLOCK_OP_NORMAL
);
885 insns
= get_insns ();
887 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
890 if (insn
== NULL_RTX
)
896 gcc_assert ((len
& 3) == 0);
897 top_label
= gen_label_rtx ();
898 addr
= force_reg (Pmode
, XEXP (shadow_mem
, 0));
899 shadow_mem
= adjust_automodify_address (shadow_mem
, SImode
, addr
, 0);
900 end
= force_reg (Pmode
, plus_constant (Pmode
, addr
, len
));
901 emit_label (top_label
);
903 emit_move_insn (shadow_mem
, const0_rtx
);
904 tmp
= expand_simple_binop (Pmode
, PLUS
, addr
, gen_int_mode (4, Pmode
), addr
,
905 true, OPTAB_LIB_WIDEN
);
907 emit_move_insn (addr
, tmp
);
908 emit_cmp_and_jump_insns (addr
, end
, LT
, NULL_RTX
, Pmode
, true, top_label
);
909 jump
= get_last_insn ();
910 gcc_assert (JUMP_P (jump
));
911 add_reg_note (jump
, REG_BR_PROB
, GEN_INT (REG_BR_PROB_BASE
* 80 / 100));
914 /* Insert code to protect stack vars. The prologue sequence should be emitted
915 directly, epilogue sequence returned. BASE is the register holding the
916 stack base, against which OFFSETS array offsets are relative to, OFFSETS
917 array contains pairs of offsets in reverse order, always the end offset
918 of some gap that needs protection followed by starting offset,
919 and DECLS is an array of representative decls for each var partition.
920 LENGTH is the length of the OFFSETS array, DECLS array is LENGTH / 2 - 1
921 elements long (OFFSETS include gap before the first variable as well
922 as gaps after each stack variable). */
925 asan_emit_stack_protection (rtx base
, HOST_WIDE_INT
*offsets
, tree
*decls
,
928 rtx shadow_base
, shadow_mem
, ret
, mem
;
929 unsigned char shadow_bytes
[4];
930 HOST_WIDE_INT base_offset
= offsets
[length
- 1], offset
, prev_offset
;
931 HOST_WIDE_INT last_offset
, last_size
;
933 unsigned char cur_shadow_byte
= ASAN_STACK_MAGIC_LEFT
;
936 if (shadow_ptr_types
[0] == NULL_TREE
)
937 asan_init_shadow_ptr_types ();
939 /* First of all, prepare the description string. */
940 pretty_printer asan_pp
;
942 if (DECL_NAME (current_function_decl
))
943 pp_tree_identifier (&asan_pp
, DECL_NAME (current_function_decl
));
945 pp_string (&asan_pp
, "<unknown>");
947 pp_decimal_int (&asan_pp
, length
/ 2 - 1);
949 for (l
= length
- 2; l
; l
-= 2)
951 tree decl
= decls
[l
/ 2 - 1];
952 pp_wide_integer (&asan_pp
, offsets
[l
] - base_offset
);
954 pp_wide_integer (&asan_pp
, offsets
[l
- 1] - offsets
[l
]);
956 if (DECL_P (decl
) && DECL_NAME (decl
))
958 pp_decimal_int (&asan_pp
, IDENTIFIER_LENGTH (DECL_NAME (decl
)));
960 pp_tree_identifier (&asan_pp
, DECL_NAME (decl
));
963 pp_string (&asan_pp
, "9 <unknown>");
966 str_cst
= asan_pp_string (&asan_pp
);
968 /* Emit the prologue sequence. */
969 base
= expand_binop (Pmode
, add_optab
, base
,
970 gen_int_mode (base_offset
, Pmode
),
971 NULL_RTX
, 1, OPTAB_DIRECT
);
972 mem
= gen_rtx_MEM (ptr_mode
, base
);
973 emit_move_insn (mem
, gen_int_mode (ASAN_STACK_FRAME_MAGIC
, ptr_mode
));
974 mem
= adjust_address (mem
, VOIDmode
, GET_MODE_SIZE (ptr_mode
));
975 emit_move_insn (mem
, expand_normal (str_cst
));
976 shadow_base
= expand_binop (Pmode
, lshr_optab
, base
,
977 GEN_INT (ASAN_SHADOW_SHIFT
),
978 NULL_RTX
, 1, OPTAB_DIRECT
);
979 shadow_base
= expand_binop (Pmode
, add_optab
, shadow_base
,
980 gen_int_mode (targetm
.asan_shadow_offset (),
982 NULL_RTX
, 1, OPTAB_DIRECT
);
983 gcc_assert (asan_shadow_set
!= -1
984 && (ASAN_RED_ZONE_SIZE
>> ASAN_SHADOW_SHIFT
) == 4);
985 shadow_mem
= gen_rtx_MEM (SImode
, shadow_base
);
986 set_mem_alias_set (shadow_mem
, asan_shadow_set
);
987 prev_offset
= base_offset
;
988 for (l
= length
; l
; l
-= 2)
991 cur_shadow_byte
= ASAN_STACK_MAGIC_RIGHT
;
992 offset
= offsets
[l
- 1];
993 if ((offset
- base_offset
) & (ASAN_RED_ZONE_SIZE
- 1))
997 = base_offset
+ ((offset
- base_offset
)
998 & ~(ASAN_RED_ZONE_SIZE
- HOST_WIDE_INT_1
));
999 shadow_mem
= adjust_address (shadow_mem
, VOIDmode
,
1000 (aoff
- prev_offset
)
1001 >> ASAN_SHADOW_SHIFT
);
1003 for (i
= 0; i
< 4; i
++, aoff
+= (1 << ASAN_SHADOW_SHIFT
))
1006 if (aoff
< offset
- (1 << ASAN_SHADOW_SHIFT
) + 1)
1007 shadow_bytes
[i
] = 0;
1009 shadow_bytes
[i
] = offset
- aoff
;
1012 shadow_bytes
[i
] = ASAN_STACK_MAGIC_PARTIAL
;
1013 emit_move_insn (shadow_mem
, asan_shadow_cst (shadow_bytes
));
1016 while (offset
<= offsets
[l
- 2] - ASAN_RED_ZONE_SIZE
)
1018 shadow_mem
= adjust_address (shadow_mem
, VOIDmode
,
1019 (offset
- prev_offset
)
1020 >> ASAN_SHADOW_SHIFT
);
1021 prev_offset
= offset
;
1022 memset (shadow_bytes
, cur_shadow_byte
, 4);
1023 emit_move_insn (shadow_mem
, asan_shadow_cst (shadow_bytes
));
1024 offset
+= ASAN_RED_ZONE_SIZE
;
1026 cur_shadow_byte
= ASAN_STACK_MAGIC_MIDDLE
;
1028 do_pending_stack_adjust ();
1030 /* Construct epilogue sequence. */
1033 shadow_mem
= gen_rtx_MEM (BLKmode
, shadow_base
);
1034 set_mem_alias_set (shadow_mem
, asan_shadow_set
);
1035 prev_offset
= base_offset
;
1036 last_offset
= base_offset
;
1038 for (l
= length
; l
; l
-= 2)
1040 offset
= base_offset
+ ((offsets
[l
- 1] - base_offset
)
1041 & ~(ASAN_RED_ZONE_SIZE
- HOST_WIDE_INT_1
));
1042 if (last_offset
+ last_size
!= offset
)
1044 shadow_mem
= adjust_address (shadow_mem
, VOIDmode
,
1045 (last_offset
- prev_offset
)
1046 >> ASAN_SHADOW_SHIFT
);
1047 prev_offset
= last_offset
;
1048 asan_clear_shadow (shadow_mem
, last_size
>> ASAN_SHADOW_SHIFT
);
1049 last_offset
= offset
;
1052 last_size
+= base_offset
+ ((offsets
[l
- 2] - base_offset
)
1053 & ~(ASAN_RED_ZONE_SIZE
- HOST_WIDE_INT_1
))
1058 shadow_mem
= adjust_address (shadow_mem
, VOIDmode
,
1059 (last_offset
- prev_offset
)
1060 >> ASAN_SHADOW_SHIFT
);
1061 asan_clear_shadow (shadow_mem
, last_size
>> ASAN_SHADOW_SHIFT
);
1064 do_pending_stack_adjust ();
1071 /* Return true if DECL, a global var, might be overridden and needs
1072 therefore a local alias. */
1075 asan_needs_local_alias (tree decl
)
1077 return DECL_WEAK (decl
) || !targetm
.binds_local_p (decl
);
1080 /* Return true if DECL is a VAR_DECL that should be protected
1081 by Address Sanitizer, by appending a red zone with protected
1082 shadow memory after it and aligning it to at least
1083 ASAN_RED_ZONE_SIZE bytes. */
1086 asan_protect_global (tree decl
)
1090 if (TREE_CODE (decl
) == STRING_CST
)
1092 /* Instrument all STRING_CSTs except those created
1093 by asan_pp_string here. */
1094 if (shadow_ptr_types
[0] != NULL_TREE
1095 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
1096 && TREE_TYPE (TREE_TYPE (decl
)) == TREE_TYPE (shadow_ptr_types
[0]))
1100 if (TREE_CODE (decl
) != VAR_DECL
1101 /* TLS vars aren't statically protectable. */
1102 || DECL_THREAD_LOCAL_P (decl
)
1103 /* Externs will be protected elsewhere. */
1104 || DECL_EXTERNAL (decl
)
1105 || !DECL_RTL_SET_P (decl
)
1106 /* Comdat vars pose an ABI problem, we can't know if
1107 the var that is selected by the linker will have
1109 || DECL_ONE_ONLY (decl
)
1110 /* Similarly for common vars. People can use -fno-common. */
1111 || (DECL_COMMON (decl
) && TREE_PUBLIC (decl
))
1112 /* Don't protect if using user section, often vars placed
1113 into user section from multiple TUs are then assumed
1114 to be an array of such vars, putting padding in there
1115 breaks this assumption. */
1116 || (DECL_SECTION_NAME (decl
) != NULL_TREE
1117 && !DECL_HAS_IMPLICIT_SECTION_NAME_P (decl
))
1118 || DECL_SIZE (decl
) == 0
1119 || ASAN_RED_ZONE_SIZE
* BITS_PER_UNIT
> MAX_OFILE_ALIGNMENT
1120 || !valid_constant_size_p (DECL_SIZE_UNIT (decl
))
1121 || DECL_ALIGN_UNIT (decl
) > 2 * ASAN_RED_ZONE_SIZE
)
1124 rtl
= DECL_RTL (decl
);
1125 if (!MEM_P (rtl
) || GET_CODE (XEXP (rtl
, 0)) != SYMBOL_REF
)
1127 symbol
= XEXP (rtl
, 0);
1129 if (CONSTANT_POOL_ADDRESS_P (symbol
)
1130 || TREE_CONSTANT_POOL_ADDRESS_P (symbol
))
1133 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl
)))
1136 #ifndef ASM_OUTPUT_DEF
1137 if (asan_needs_local_alias (decl
))
1144 /* Construct a function tree for __asan_report_{load,store}{1,2,4,8,16}.
1145 IS_STORE is either 1 (for a store) or 0 (for a load).
1146 SIZE_IN_BYTES is one of 1, 2, 4, 8, 16. */
1149 report_error_func (bool is_store
, int size_in_bytes
)
1151 static enum built_in_function report
[2][5]
1152 = { { BUILT_IN_ASAN_REPORT_LOAD1
, BUILT_IN_ASAN_REPORT_LOAD2
,
1153 BUILT_IN_ASAN_REPORT_LOAD4
, BUILT_IN_ASAN_REPORT_LOAD8
,
1154 BUILT_IN_ASAN_REPORT_LOAD16
},
1155 { BUILT_IN_ASAN_REPORT_STORE1
, BUILT_IN_ASAN_REPORT_STORE2
,
1156 BUILT_IN_ASAN_REPORT_STORE4
, BUILT_IN_ASAN_REPORT_STORE8
,
1157 BUILT_IN_ASAN_REPORT_STORE16
} };
1158 return builtin_decl_implicit (report
[is_store
][exact_log2 (size_in_bytes
)]);
1161 #define PROB_VERY_UNLIKELY (REG_BR_PROB_BASE / 2000 - 1)
1162 #define PROB_ALWAYS (REG_BR_PROB_BASE)
1164 /* Split the current basic block and create a condition statement
1165 insertion point right before or after the statement pointed to by
1166 ITER. Return an iterator to the point at which the caller might
1167 safely insert the condition statement.
1169 THEN_BLOCK must be set to the address of an uninitialized instance
1170 of basic_block. The function will then set *THEN_BLOCK to the
1171 'then block' of the condition statement to be inserted by the
1174 If CREATE_THEN_FALLTHRU_EDGE is false, no edge will be created from
1175 *THEN_BLOCK to *FALLTHROUGH_BLOCK.
1177 Similarly, the function will set *FALLTRHOUGH_BLOCK to the 'else
1178 block' of the condition statement to be inserted by the caller.
1180 Note that *FALLTHROUGH_BLOCK is a new block that contains the
1181 statements starting from *ITER, and *THEN_BLOCK is a new empty
1184 *ITER is adjusted to point to always point to the first statement
1185 of the basic block * FALLTHROUGH_BLOCK. That statement is the
1186 same as what ITER was pointing to prior to calling this function,
1187 if BEFORE_P is true; otherwise, it is its following statement. */
1189 static gimple_stmt_iterator
1190 create_cond_insert_point (gimple_stmt_iterator
*iter
,
1192 bool then_more_likely_p
,
1193 bool create_then_fallthru_edge
,
1194 basic_block
*then_block
,
1195 basic_block
*fallthrough_block
)
1197 gimple_stmt_iterator gsi
= *iter
;
1199 if (!gsi_end_p (gsi
) && before_p
)
1202 basic_block cur_bb
= gsi_bb (*iter
);
1204 edge e
= split_block (cur_bb
, gsi_stmt (gsi
));
1206 /* Get a hold on the 'condition block', the 'then block' and the
1208 basic_block cond_bb
= e
->src
;
1209 basic_block fallthru_bb
= e
->dest
;
1210 basic_block then_bb
= create_empty_bb (cond_bb
);
1213 add_bb_to_loop (then_bb
, cond_bb
->loop_father
);
1214 loops_state_set (LOOPS_NEED_FIXUP
);
1217 /* Set up the newly created 'then block'. */
1218 e
= make_edge (cond_bb
, then_bb
, EDGE_TRUE_VALUE
);
1219 int fallthrough_probability
1220 = then_more_likely_p
1221 ? PROB_VERY_UNLIKELY
1222 : PROB_ALWAYS
- PROB_VERY_UNLIKELY
;
1223 e
->probability
= PROB_ALWAYS
- fallthrough_probability
;
1224 if (create_then_fallthru_edge
)
1225 make_single_succ_edge (then_bb
, fallthru_bb
, EDGE_FALLTHRU
);
1227 /* Set up the fallthrough basic block. */
1228 e
= find_edge (cond_bb
, fallthru_bb
);
1229 e
->flags
= EDGE_FALSE_VALUE
;
1230 e
->count
= cond_bb
->count
;
1231 e
->probability
= fallthrough_probability
;
1233 /* Update dominance info for the newly created then_bb; note that
1234 fallthru_bb's dominance info has already been updated by
1236 if (dom_info_available_p (CDI_DOMINATORS
))
1237 set_immediate_dominator (CDI_DOMINATORS
, then_bb
, cond_bb
);
1239 *then_block
= then_bb
;
1240 *fallthrough_block
= fallthru_bb
;
1241 *iter
= gsi_start_bb (fallthru_bb
);
1243 return gsi_last_bb (cond_bb
);
1246 /* Insert an if condition followed by a 'then block' right before the
1247 statement pointed to by ITER. The fallthrough block -- which is the
1248 else block of the condition as well as the destination of the
1249 outcoming edge of the 'then block' -- starts with the statement
1252 COND is the condition of the if.
1254 If THEN_MORE_LIKELY_P is true, the probability of the edge to the
1255 'then block' is higher than the probability of the edge to the
1258 Upon completion of the function, *THEN_BB is set to the newly
1259 inserted 'then block' and similarly, *FALLTHROUGH_BB is set to the
1262 *ITER is adjusted to still point to the same statement it was
1263 pointing to initially. */
1266 insert_if_then_before_iter (gimple cond
,
1267 gimple_stmt_iterator
*iter
,
1268 bool then_more_likely_p
,
1269 basic_block
*then_bb
,
1270 basic_block
*fallthrough_bb
)
1272 gimple_stmt_iterator cond_insert_point
=
1273 create_cond_insert_point (iter
,
1276 /*create_then_fallthru_edge=*/true,
1279 gsi_insert_after (&cond_insert_point
, cond
, GSI_NEW_STMT
);
1282 /* Instrument the memory access instruction BASE. Insert new
1283 statements before or after ITER.
1285 Note that the memory access represented by BASE can be either an
1286 SSA_NAME, or a non-SSA expression. LOCATION is the source code
1287 location. IS_STORE is TRUE for a store, FALSE for a load.
1288 BEFORE_P is TRUE for inserting the instrumentation code before
1289 ITER, FALSE for inserting it after ITER. SIZE_IN_BYTES is one of
1292 If BEFORE_P is TRUE, *ITER is arranged to still point to the
1293 statement it was pointing to prior to calling this function,
1294 otherwise, it points to the statement logically following it. */
1297 build_check_stmt (location_t location
, tree base
, gimple_stmt_iterator
*iter
,
1298 bool before_p
, bool is_store
, int size_in_bytes
)
1300 gimple_stmt_iterator gsi
;
1301 basic_block then_bb
, else_bb
;
1302 tree t
, base_addr
, shadow
;
1304 tree shadow_ptr_type
= shadow_ptr_types
[size_in_bytes
== 16 ? 1 : 0];
1305 tree shadow_type
= TREE_TYPE (shadow_ptr_type
);
1307 = build_nonstandard_integer_type (TYPE_PRECISION (TREE_TYPE (base
)), 1);
1308 tree base_ssa
= base
;
1310 /* Get an iterator on the point where we can add the condition
1311 statement for the instrumentation. */
1312 gsi
= create_cond_insert_point (iter
, before_p
,
1313 /*then_more_likely_p=*/false,
1314 /*create_then_fallthru_edge=*/false,
1318 base
= unshare_expr (base
);
1320 /* BASE can already be an SSA_NAME; in that case, do not create a
1321 new SSA_NAME for it. */
1322 if (TREE_CODE (base
) != SSA_NAME
)
1324 g
= gimple_build_assign_with_ops (TREE_CODE (base
),
1325 make_ssa_name (TREE_TYPE (base
), NULL
),
1327 gimple_set_location (g
, location
);
1328 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
1329 base_ssa
= gimple_assign_lhs (g
);
1332 g
= gimple_build_assign_with_ops (NOP_EXPR
,
1333 make_ssa_name (uintptr_type
, NULL
),
1334 base_ssa
, NULL_TREE
);
1335 gimple_set_location (g
, location
);
1336 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
1337 base_addr
= gimple_assign_lhs (g
);
1340 (base_addr >> ASAN_SHADOW_SHIFT) + targetm.asan_shadow_offset (). */
1342 t
= build_int_cst (uintptr_type
, ASAN_SHADOW_SHIFT
);
1343 g
= gimple_build_assign_with_ops (RSHIFT_EXPR
,
1344 make_ssa_name (uintptr_type
, NULL
),
1346 gimple_set_location (g
, location
);
1347 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
1349 t
= build_int_cst (uintptr_type
, targetm
.asan_shadow_offset ());
1350 g
= gimple_build_assign_with_ops (PLUS_EXPR
,
1351 make_ssa_name (uintptr_type
, NULL
),
1352 gimple_assign_lhs (g
), t
);
1353 gimple_set_location (g
, location
);
1354 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
1356 g
= gimple_build_assign_with_ops (NOP_EXPR
,
1357 make_ssa_name (shadow_ptr_type
, NULL
),
1358 gimple_assign_lhs (g
), NULL_TREE
);
1359 gimple_set_location (g
, location
);
1360 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
1362 t
= build2 (MEM_REF
, shadow_type
, gimple_assign_lhs (g
),
1363 build_int_cst (shadow_ptr_type
, 0));
1364 g
= gimple_build_assign_with_ops (MEM_REF
,
1365 make_ssa_name (shadow_type
, NULL
),
1367 gimple_set_location (g
, location
);
1368 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
1369 shadow
= gimple_assign_lhs (g
);
1371 if (size_in_bytes
< 8)
1373 /* Slow path for 1, 2 and 4 byte accesses.
1375 & ((base_addr & 7) + (size_in_bytes - 1)) >= shadow). */
1376 gimple_seq seq
= NULL
;
1377 gimple shadow_test
= build_assign (NE_EXPR
, shadow
, 0);
1378 gimple_seq_add_stmt (&seq
, shadow_test
);
1379 gimple_seq_add_stmt (&seq
, build_assign (BIT_AND_EXPR
, base_addr
, 7));
1380 gimple_seq_add_stmt (&seq
, build_type_cast (shadow_type
,
1381 gimple_seq_last (seq
)));
1382 if (size_in_bytes
> 1)
1383 gimple_seq_add_stmt (&seq
,
1384 build_assign (PLUS_EXPR
, gimple_seq_last (seq
),
1385 size_in_bytes
- 1));
1386 gimple_seq_add_stmt (&seq
, build_assign (GE_EXPR
, gimple_seq_last (seq
),
1388 gimple_seq_add_stmt (&seq
, build_assign (BIT_AND_EXPR
, shadow_test
,
1389 gimple_seq_last (seq
)));
1390 t
= gimple_assign_lhs (gimple_seq_last (seq
));
1391 gimple_seq_set_location (seq
, location
);
1392 gsi_insert_seq_after (&gsi
, seq
, GSI_CONTINUE_LINKING
);
1397 g
= gimple_build_cond (NE_EXPR
, t
, build_int_cst (TREE_TYPE (t
), 0),
1398 NULL_TREE
, NULL_TREE
);
1399 gimple_set_location (g
, location
);
1400 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
1402 /* Generate call to the run-time library (e.g. __asan_report_load8). */
1403 gsi
= gsi_start_bb (then_bb
);
1404 g
= gimple_build_call (report_error_func (is_store
, size_in_bytes
),
1406 gimple_set_location (g
, location
);
1407 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
1409 *iter
= gsi_start_bb (else_bb
);
1412 /* If T represents a memory access, add instrumentation code before ITER.
1413 LOCATION is source code location.
1414 IS_STORE is either TRUE (for a store) or FALSE (for a load). */
1417 instrument_derefs (gimple_stmt_iterator
*iter
, tree t
,
1418 location_t location
, bool is_store
)
1421 HOST_WIDE_INT size_in_bytes
;
1423 type
= TREE_TYPE (t
);
1424 switch (TREE_CODE (t
))
1435 size_in_bytes
= int_size_in_bytes (type
);
1436 if ((size_in_bytes
& (size_in_bytes
- 1)) != 0
1437 || (unsigned HOST_WIDE_INT
) size_in_bytes
- 1 >= 16)
1440 HOST_WIDE_INT bitsize
, bitpos
;
1442 enum machine_mode mode
;
1443 int volatilep
= 0, unsignedp
= 0;
1444 get_inner_reference (t
, &bitsize
, &bitpos
, &offset
,
1445 &mode
, &unsignedp
, &volatilep
, false);
1446 if (bitpos
% (size_in_bytes
* BITS_PER_UNIT
)
1447 || bitsize
!= size_in_bytes
* BITS_PER_UNIT
)
1449 if (TREE_CODE (t
) == COMPONENT_REF
1450 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t
, 1)) != NULL_TREE
)
1452 tree repr
= DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t
, 1));
1453 instrument_derefs (iter
, build3 (COMPONENT_REF
, TREE_TYPE (repr
),
1454 TREE_OPERAND (t
, 0), repr
,
1455 NULL_TREE
), location
, is_store
);
1460 base
= build_fold_addr_expr (t
);
1461 if (!has_mem_ref_been_instrumented (base
, size_in_bytes
))
1463 build_check_stmt (location
, base
, iter
, /*before_p=*/true,
1464 is_store
, size_in_bytes
);
1465 update_mem_ref_hash_table (base
, size_in_bytes
);
1466 update_mem_ref_hash_table (t
, size_in_bytes
);
1471 /* Instrument an access to a contiguous memory region that starts at
1472 the address pointed to by BASE, over a length of LEN (expressed in
1473 the sizeof (*BASE) bytes). ITER points to the instruction before
1474 which the instrumentation instructions must be inserted. LOCATION
1475 is the source location that the instrumentation instructions must
1476 have. If IS_STORE is true, then the memory access is a store;
1477 otherwise, it's a load. */
1480 instrument_mem_region_access (tree base
, tree len
,
1481 gimple_stmt_iterator
*iter
,
1482 location_t location
, bool is_store
)
1484 if (!POINTER_TYPE_P (TREE_TYPE (base
))
1485 || !INTEGRAL_TYPE_P (TREE_TYPE (len
))
1486 || integer_zerop (len
))
1489 gimple_stmt_iterator gsi
= *iter
;
1491 basic_block fallthrough_bb
= NULL
, then_bb
= NULL
;
1493 /* If the beginning of the memory region has already been
1494 instrumented, do not instrument it. */
1495 bool start_instrumented
= has_mem_ref_been_instrumented (base
, 1);
1497 /* If the end of the memory region has already been instrumented, do
1498 not instrument it. */
1499 tree end
= asan_mem_ref_get_end (base
, len
);
1500 bool end_instrumented
= has_mem_ref_been_instrumented (end
, 1);
1502 if (start_instrumented
&& end_instrumented
)
1505 if (!is_gimple_constant (len
))
1507 /* So, the length of the memory area to asan-protect is
1508 non-constant. Let's guard the generated instrumentation code
1513 //asan instrumentation code goes here.
1515 // falltrough instructions, starting with *ITER. */
1517 gimple g
= gimple_build_cond (NE_EXPR
,
1519 build_int_cst (TREE_TYPE (len
), 0),
1520 NULL_TREE
, NULL_TREE
);
1521 gimple_set_location (g
, location
);
1522 insert_if_then_before_iter (g
, iter
, /*then_more_likely_p=*/true,
1523 &then_bb
, &fallthrough_bb
);
1524 /* Note that fallthrough_bb starts with the statement that was
1525 pointed to by ITER. */
1527 /* The 'then block' of the 'if (len != 0) condition is where
1528 we'll generate the asan instrumentation code now. */
1529 gsi
= gsi_last_bb (then_bb
);
1532 if (!start_instrumented
)
1534 /* Instrument the beginning of the memory region to be accessed,
1535 and arrange for the rest of the intrumentation code to be
1536 inserted in the then block *after* the current gsi. */
1537 build_check_stmt (location
, base
, &gsi
, /*before_p=*/true, is_store
, 1);
1540 /* We are in the case where the length of the region is not
1541 constant; so instrumentation code is being generated in the
1542 'then block' of the 'if (len != 0) condition. Let's arrange
1543 for the subsequent instrumentation statements to go in the
1545 gsi
= gsi_last_bb (then_bb
);
1549 /* Don't remember this access as instrumented, if length
1550 is unknown. It might be zero and not being actually
1551 instrumented, so we can't rely on it being instrumented. */
1552 update_mem_ref_hash_table (base
, 1);
1556 if (end_instrumented
)
1559 /* We want to instrument the access at the end of the memory region,
1560 which is at (base + len - 1). */
1562 /* offset = len - 1; */
1563 len
= unshare_expr (len
);
1565 gimple_seq seq
= NULL
;
1566 if (TREE_CODE (len
) == INTEGER_CST
)
1567 offset
= fold_build2 (MINUS_EXPR
, size_type_node
,
1568 fold_convert (size_type_node
, len
),
1569 build_int_cst (size_type_node
, 1));
1575 if (TREE_CODE (len
) != SSA_NAME
)
1577 t
= make_ssa_name (TREE_TYPE (len
), NULL
);
1578 g
= gimple_build_assign_with_ops (TREE_CODE (len
), t
, len
, NULL
);
1579 gimple_set_location (g
, location
);
1580 gimple_seq_add_stmt_without_update (&seq
, g
);
1583 if (!useless_type_conversion_p (size_type_node
, TREE_TYPE (len
)))
1585 t
= make_ssa_name (size_type_node
, NULL
);
1586 g
= gimple_build_assign_with_ops (NOP_EXPR
, t
, len
, NULL
);
1587 gimple_set_location (g
, location
);
1588 gimple_seq_add_stmt_without_update (&seq
, g
);
1592 t
= make_ssa_name (size_type_node
, NULL
);
1593 g
= gimple_build_assign_with_ops (MINUS_EXPR
, t
, len
,
1594 build_int_cst (size_type_node
, 1));
1595 gimple_set_location (g
, location
);
1596 gimple_seq_add_stmt_without_update (&seq
, g
);
1597 offset
= gimple_assign_lhs (g
);
1601 base
= unshare_expr (base
);
1603 gimple_build_assign_with_ops (TREE_CODE (base
),
1604 make_ssa_name (TREE_TYPE (base
), NULL
),
1606 gimple_set_location (region_end
, location
);
1607 gimple_seq_add_stmt_without_update (&seq
, region_end
);
1609 /* _2 = _1 + offset; */
1611 gimple_build_assign_with_ops (POINTER_PLUS_EXPR
,
1612 make_ssa_name (TREE_TYPE (base
), NULL
),
1613 gimple_assign_lhs (region_end
),
1615 gimple_set_location (region_end
, location
);
1616 gimple_seq_add_stmt_without_update (&seq
, region_end
);
1617 gsi_insert_seq_before (&gsi
, seq
, GSI_SAME_STMT
);
1619 /* instrument access at _2; */
1620 gsi
= gsi_for_stmt (region_end
);
1621 build_check_stmt (location
, gimple_assign_lhs (region_end
),
1622 &gsi
, /*before_p=*/false, is_store
, 1);
1624 if (then_bb
== NULL
)
1625 update_mem_ref_hash_table (end
, 1);
1627 *iter
= gsi_for_stmt (gsi_stmt (*iter
));
1630 /* Instrument the call (to the builtin strlen function) pointed to by
1633 This function instruments the access to the first byte of the
1634 argument, right before the call. After the call it instruments the
1635 access to the last byte of the argument; it uses the result of the
1636 call to deduce the offset of that last byte.
1638 Upon completion, iff the call has actually been instrumented, this
1639 function returns TRUE and *ITER points to the statement logically
1640 following the built-in strlen function call *ITER was initially
1641 pointing to. Otherwise, the function returns FALSE and *ITER
1642 remains unchanged. */
1645 instrument_strlen_call (gimple_stmt_iterator
*iter
)
1647 gimple call
= gsi_stmt (*iter
);
1648 gcc_assert (is_gimple_call (call
));
1650 tree callee
= gimple_call_fndecl (call
);
1651 gcc_assert (is_builtin_fn (callee
)
1652 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
1653 && DECL_FUNCTION_CODE (callee
) == BUILT_IN_STRLEN
);
1655 tree len
= gimple_call_lhs (call
);
1657 /* Some passes might clear the return value of the strlen call;
1658 bail out in that case. Return FALSE as we are not advancing
1661 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (len
)));
1663 location_t loc
= gimple_location (call
);
1664 tree str_arg
= gimple_call_arg (call
, 0);
1666 /* Instrument the access to the first byte of str_arg. i.e:
1668 _1 = str_arg; instrument (_1); */
1669 tree cptr_type
= build_pointer_type (char_type_node
);
1670 gimple str_arg_ssa
=
1671 gimple_build_assign_with_ops (NOP_EXPR
,
1672 make_ssa_name (cptr_type
, NULL
),
1674 gimple_set_location (str_arg_ssa
, loc
);
1675 gimple_stmt_iterator gsi
= *iter
;
1676 gsi_insert_before (&gsi
, str_arg_ssa
, GSI_NEW_STMT
);
1677 build_check_stmt (loc
, gimple_assign_lhs (str_arg_ssa
), &gsi
,
1678 /*before_p=*/false, /*is_store=*/false, 1);
1680 /* If we initially had an instruction like:
1682 int n = strlen (str)
1684 we now want to instrument the access to str[n], after the
1685 instruction above.*/
1687 /* So let's build the access to str[n] that is, access through the
1688 pointer_plus expr: (_1 + len). */
1690 gimple_build_assign_with_ops (POINTER_PLUS_EXPR
,
1691 make_ssa_name (cptr_type
, NULL
),
1692 gimple_assign_lhs (str_arg_ssa
),
1694 gimple_set_location (stmt
, loc
);
1695 gsi_insert_after (&gsi
, stmt
, GSI_NEW_STMT
);
1697 build_check_stmt (loc
, gimple_assign_lhs (stmt
), &gsi
,
1698 /*before_p=*/false, /*is_store=*/false, 1);
1700 /* Ensure that iter points to the statement logically following the
1701 one it was initially pointing to. */
1703 /* As *ITER has been advanced to point to the next statement, let's
1704 return true to inform transform_statements that it shouldn't
1705 advance *ITER anymore; otherwises it will skip that next
1706 statement, which wouldn't be instrumented. */
1710 /* Instrument the call to a built-in memory access function that is
1711 pointed to by the iterator ITER.
1713 Upon completion, return TRUE iff *ITER has been advanced to the
1714 statement following the one it was originally pointing to. */
1717 instrument_builtin_call (gimple_stmt_iterator
*iter
)
1719 bool iter_advanced_p
= false;
1720 gimple call
= gsi_stmt (*iter
);
1722 gcc_checking_assert (gimple_call_builtin_p (call
, BUILT_IN_NORMAL
));
1724 tree callee
= gimple_call_fndecl (call
);
1725 location_t loc
= gimple_location (call
);
1727 if (DECL_FUNCTION_CODE (callee
) == BUILT_IN_STRLEN
)
1728 iter_advanced_p
= instrument_strlen_call (iter
);
1731 asan_mem_ref src0
, src1
, dest
;
1732 asan_mem_ref_init (&src0
, NULL
, 1);
1733 asan_mem_ref_init (&src1
, NULL
, 1);
1734 asan_mem_ref_init (&dest
, NULL
, 1);
1736 tree src0_len
= NULL_TREE
, src1_len
= NULL_TREE
, dest_len
= NULL_TREE
;
1737 bool src0_is_store
= false, src1_is_store
= false,
1738 dest_is_store
= false, dest_is_deref
= false;
1740 if (get_mem_refs_of_builtin_call (call
,
1741 &src0
, &src0_len
, &src0_is_store
,
1742 &src1
, &src1_len
, &src1_is_store
,
1743 &dest
, &dest_len
, &dest_is_store
,
1748 instrument_derefs (iter
, dest
.start
, loc
, dest_is_store
);
1750 iter_advanced_p
= true;
1752 else if (src0_len
|| src1_len
|| dest_len
)
1754 if (src0
.start
!= NULL_TREE
)
1755 instrument_mem_region_access (src0
.start
, src0_len
,
1756 iter
, loc
, /*is_store=*/false);
1757 if (src1
.start
!= NULL_TREE
)
1758 instrument_mem_region_access (src1
.start
, src1_len
,
1759 iter
, loc
, /*is_store=*/false);
1760 if (dest
.start
!= NULL_TREE
)
1761 instrument_mem_region_access (dest
.start
, dest_len
,
1762 iter
, loc
, /*is_store=*/true);
1763 *iter
= gsi_for_stmt (call
);
1765 iter_advanced_p
= true;
1769 return iter_advanced_p
;
1772 /* Instrument the assignment statement ITER if it is subject to
1773 instrumentation. Return TRUE iff instrumentation actually
1774 happened. In that case, the iterator ITER is advanced to the next
1775 logical expression following the one initially pointed to by ITER,
1776 and the relevant memory reference that which access has been
1777 instrumented is added to the memory references hash table. */
1780 maybe_instrument_assignment (gimple_stmt_iterator
*iter
)
1782 gimple s
= gsi_stmt (*iter
);
1784 gcc_assert (gimple_assign_single_p (s
));
1786 tree ref_expr
= NULL_TREE
;
1787 bool is_store
, is_instrumented
= false;
1789 if (gimple_store_p (s
))
1791 ref_expr
= gimple_assign_lhs (s
);
1793 instrument_derefs (iter
, ref_expr
,
1794 gimple_location (s
),
1796 is_instrumented
= true;
1799 if (gimple_assign_load_p (s
))
1801 ref_expr
= gimple_assign_rhs1 (s
);
1803 instrument_derefs (iter
, ref_expr
,
1804 gimple_location (s
),
1806 is_instrumented
= true;
1809 if (is_instrumented
)
1812 return is_instrumented
;
1815 /* Instrument the function call pointed to by the iterator ITER, if it
1816 is subject to instrumentation. At the moment, the only function
1817 calls that are instrumented are some built-in functions that access
1818 memory. Look at instrument_builtin_call to learn more.
1820 Upon completion return TRUE iff *ITER was advanced to the statement
1821 following the one it was originally pointing to. */
1824 maybe_instrument_call (gimple_stmt_iterator
*iter
)
1826 gimple stmt
= gsi_stmt (*iter
);
1827 bool is_builtin
= gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
);
1829 if (is_builtin
&& instrument_builtin_call (iter
))
1832 if (gimple_call_noreturn_p (stmt
))
1836 tree callee
= gimple_call_fndecl (stmt
);
1837 switch (DECL_FUNCTION_CODE (callee
))
1839 case BUILT_IN_UNREACHABLE
:
1841 /* Don't instrument these. */
1845 tree decl
= builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN
);
1846 gimple g
= gimple_build_call (decl
, 0);
1847 gimple_set_location (g
, gimple_location (stmt
));
1848 gsi_insert_before (iter
, g
, GSI_SAME_STMT
);
1853 /* Walk each instruction of all basic block and instrument those that
1854 represent memory references: loads, stores, or function calls.
1855 In a given basic block, this function avoids instrumenting memory
1856 references that have already been instrumented. */
1859 transform_statements (void)
1861 basic_block bb
, last_bb
= NULL
;
1862 gimple_stmt_iterator i
;
1863 int saved_last_basic_block
= last_basic_block
;
1867 basic_block prev_bb
= bb
;
1869 if (bb
->index
>= saved_last_basic_block
) continue;
1871 /* Flush the mem ref hash table, if current bb doesn't have
1872 exactly one predecessor, or if that predecessor (skipping
1873 over asan created basic blocks) isn't the last processed
1874 basic block. Thus we effectively flush on extended basic
1875 block boundaries. */
1876 while (single_pred_p (prev_bb
))
1878 prev_bb
= single_pred (prev_bb
);
1879 if (prev_bb
->index
< saved_last_basic_block
)
1882 if (prev_bb
!= last_bb
)
1883 empty_mem_ref_hash_table ();
1886 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
);)
1888 gimple s
= gsi_stmt (i
);
1890 if (has_stmt_been_instrumented_p (s
))
1892 else if (gimple_assign_single_p (s
)
1893 && maybe_instrument_assignment (&i
))
1894 /* Nothing to do as maybe_instrument_assignment advanced
1896 else if (is_gimple_call (s
) && maybe_instrument_call (&i
))
1897 /* Nothing to do as maybe_instrument_call
1898 advanced the iterator I. */;
1901 /* No instrumentation happened.
1903 If the current instruction is a function call that
1904 might free something, let's forget about the memory
1905 references that got instrumented. Otherwise we might
1906 miss some instrumentation opportunities. */
1907 if (is_gimple_call (s
) && !nonfreeing_call_p (s
))
1908 empty_mem_ref_hash_table ();
1914 free_mem_ref_resources ();
1918 struct __asan_global
1922 uptr __size_with_redzone;
1924 uptr __has_dynamic_init;
1928 asan_global_struct (void)
1930 static const char *field_names
[5]
1931 = { "__beg", "__size", "__size_with_redzone",
1932 "__name", "__has_dynamic_init" };
1933 tree fields
[5], ret
;
1936 ret
= make_node (RECORD_TYPE
);
1937 for (i
= 0; i
< 5; i
++)
1940 = build_decl (UNKNOWN_LOCATION
, FIELD_DECL
,
1941 get_identifier (field_names
[i
]),
1942 (i
== 0 || i
== 3) ? const_ptr_type_node
1943 : pointer_sized_int_node
);
1944 DECL_CONTEXT (fields
[i
]) = ret
;
1946 DECL_CHAIN (fields
[i
- 1]) = fields
[i
];
1948 TYPE_FIELDS (ret
) = fields
[0];
1949 TYPE_NAME (ret
) = get_identifier ("__asan_global");
1954 /* Append description of a single global DECL into vector V.
1955 TYPE is __asan_global struct type as returned by asan_global_struct. */
1958 asan_add_global (tree decl
, tree type
, vec
<constructor_elt
, va_gc
> *v
)
1960 tree init
, uptr
= TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type
)));
1961 unsigned HOST_WIDE_INT size
;
1962 tree str_cst
, refdecl
= decl
;
1963 vec
<constructor_elt
, va_gc
> *vinner
= NULL
;
1965 pretty_printer asan_pp
;
1967 if (DECL_NAME (decl
))
1968 pp_tree_identifier (&asan_pp
, DECL_NAME (decl
));
1970 pp_string (&asan_pp
, "<unknown>");
1971 pp_space (&asan_pp
);
1972 pp_left_paren (&asan_pp
);
1973 pp_string (&asan_pp
, main_input_filename
);
1974 pp_right_paren (&asan_pp
);
1975 str_cst
= asan_pp_string (&asan_pp
);
1977 if (asan_needs_local_alias (decl
))
1980 ASM_GENERATE_INTERNAL_LABEL (buf
, "LASAN", vec_safe_length (v
) + 1);
1981 refdecl
= build_decl (DECL_SOURCE_LOCATION (decl
),
1982 VAR_DECL
, get_identifier (buf
), TREE_TYPE (decl
));
1983 TREE_ADDRESSABLE (refdecl
) = TREE_ADDRESSABLE (decl
);
1984 TREE_READONLY (refdecl
) = TREE_READONLY (decl
);
1985 TREE_THIS_VOLATILE (refdecl
) = TREE_THIS_VOLATILE (decl
);
1986 DECL_GIMPLE_REG_P (refdecl
) = DECL_GIMPLE_REG_P (decl
);
1987 DECL_ARTIFICIAL (refdecl
) = DECL_ARTIFICIAL (decl
);
1988 DECL_IGNORED_P (refdecl
) = DECL_IGNORED_P (decl
);
1989 TREE_STATIC (refdecl
) = 1;
1990 TREE_PUBLIC (refdecl
) = 0;
1991 TREE_USED (refdecl
) = 1;
1992 assemble_alias (refdecl
, DECL_ASSEMBLER_NAME (decl
));
1995 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
,
1996 fold_convert (const_ptr_type_node
,
1997 build_fold_addr_expr (refdecl
)));
1998 size
= tree_low_cst (DECL_SIZE_UNIT (decl
), 1);
1999 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
, build_int_cst (uptr
, size
));
2000 size
+= asan_red_zone_size (size
);
2001 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
, build_int_cst (uptr
, size
));
2002 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
,
2003 fold_convert (const_ptr_type_node
, str_cst
));
2004 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
, build_int_cst (uptr
, 0));
2005 init
= build_constructor (type
, vinner
);
2006 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, init
);
2009 /* Initialize sanitizer.def builtins if the FE hasn't initialized them. */
2011 initialize_sanitizer_builtins (void)
2015 if (builtin_decl_implicit_p (BUILT_IN_ASAN_INIT
))
2018 tree BT_FN_VOID
= build_function_type_list (void_type_node
, NULL_TREE
);
2020 = build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
2021 tree BT_FN_VOID_PTR_PTR_PTR
2022 = build_function_type_list (void_type_node
, ptr_type_node
,
2023 ptr_type_node
, ptr_type_node
, NULL_TREE
);
2024 tree BT_FN_VOID_PTR_PTRMODE
2025 = build_function_type_list (void_type_node
, ptr_type_node
,
2026 pointer_sized_int_node
, NULL_TREE
);
2028 = build_function_type_list (void_type_node
, integer_type_node
, NULL_TREE
);
2029 tree BT_FN_BOOL_VPTR_PTR_IX_INT_INT
[5];
2030 tree BT_FN_IX_CONST_VPTR_INT
[5];
2031 tree BT_FN_IX_VPTR_IX_INT
[5];
2032 tree BT_FN_VOID_VPTR_IX_INT
[5];
2034 = build_pointer_type (build_qualified_type (void_type_node
,
2035 TYPE_QUAL_VOLATILE
));
2037 = build_pointer_type (build_qualified_type (void_type_node
,
2041 = lang_hooks
.types
.type_for_size (BOOL_TYPE_SIZE
, 1);
2043 for (i
= 0; i
< 5; i
++)
2045 tree ix
= build_nonstandard_integer_type (BITS_PER_UNIT
* (1 << i
), 1);
2046 BT_FN_BOOL_VPTR_PTR_IX_INT_INT
[i
]
2047 = build_function_type_list (boolt
, vptr
, ptr_type_node
, ix
,
2048 integer_type_node
, integer_type_node
,
2050 BT_FN_IX_CONST_VPTR_INT
[i
]
2051 = build_function_type_list (ix
, cvptr
, integer_type_node
, NULL_TREE
);
2052 BT_FN_IX_VPTR_IX_INT
[i
]
2053 = build_function_type_list (ix
, vptr
, ix
, integer_type_node
,
2055 BT_FN_VOID_VPTR_IX_INT
[i
]
2056 = build_function_type_list (void_type_node
, vptr
, ix
,
2057 integer_type_node
, NULL_TREE
);
2059 #define BT_FN_BOOL_VPTR_PTR_I1_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[0]
2060 #define BT_FN_I1_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[0]
2061 #define BT_FN_I1_VPTR_I1_INT BT_FN_IX_VPTR_IX_INT[0]
2062 #define BT_FN_VOID_VPTR_I1_INT BT_FN_VOID_VPTR_IX_INT[0]
2063 #define BT_FN_BOOL_VPTR_PTR_I2_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[1]
2064 #define BT_FN_I2_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[1]
2065 #define BT_FN_I2_VPTR_I2_INT BT_FN_IX_VPTR_IX_INT[1]
2066 #define BT_FN_VOID_VPTR_I2_INT BT_FN_VOID_VPTR_IX_INT[1]
2067 #define BT_FN_BOOL_VPTR_PTR_I4_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[2]
2068 #define BT_FN_I4_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[2]
2069 #define BT_FN_I4_VPTR_I4_INT BT_FN_IX_VPTR_IX_INT[2]
2070 #define BT_FN_VOID_VPTR_I4_INT BT_FN_VOID_VPTR_IX_INT[2]
2071 #define BT_FN_BOOL_VPTR_PTR_I8_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[3]
2072 #define BT_FN_I8_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[3]
2073 #define BT_FN_I8_VPTR_I8_INT BT_FN_IX_VPTR_IX_INT[3]
2074 #define BT_FN_VOID_VPTR_I8_INT BT_FN_VOID_VPTR_IX_INT[3]
2075 #define BT_FN_BOOL_VPTR_PTR_I16_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[4]
2076 #define BT_FN_I16_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[4]
2077 #define BT_FN_I16_VPTR_I16_INT BT_FN_IX_VPTR_IX_INT[4]
2078 #define BT_FN_VOID_VPTR_I16_INT BT_FN_VOID_VPTR_IX_INT[4]
2079 #undef ATTR_NOTHROW_LEAF_LIST
2080 #define ATTR_NOTHROW_LEAF_LIST ECF_NOTHROW | ECF_LEAF
2081 #undef ATTR_TMPURE_NOTHROW_LEAF_LIST
2082 #define ATTR_TMPURE_NOTHROW_LEAF_LIST ECF_TM_PURE | ATTR_NOTHROW_LEAF_LIST
2083 #undef ATTR_NORETURN_NOTHROW_LEAF_LIST
2084 #define ATTR_NORETURN_NOTHROW_LEAF_LIST ECF_NORETURN | ATTR_NOTHROW_LEAF_LIST
2085 #undef ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST
2086 #define ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST \
2087 ECF_TM_PURE | ATTR_NORETURN_NOTHROW_LEAF_LIST
2088 #undef ATTR_COLD_NOTHROW_LEAF_LIST
2089 #define ATTR_COLD_NOTHROW_LEAF_LIST \
2090 /* ECF_COLD missing */ ATTR_NOTHROW_LEAF_LIST
2091 #undef ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST
2092 #define ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST \
2093 /* ECF_COLD missing */ ATTR_NORETURN_NOTHROW_LEAF_LIST
2094 #undef DEF_SANITIZER_BUILTIN
2095 #define DEF_SANITIZER_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
2096 decl = add_builtin_function ("__builtin_" NAME, TYPE, ENUM, \
2097 BUILT_IN_NORMAL, NAME, NULL_TREE); \
2098 set_call_expr_flags (decl, ATTRS); \
2099 set_builtin_decl (ENUM, decl, true);
2101 #include "sanitizer.def"
2103 #undef DEF_SANITIZER_BUILTIN
2106 /* Called via htab_traverse. Count number of emitted
2107 STRING_CSTs in the constant hash table. */
2110 count_string_csts (void **slot
, void *data
)
2112 struct constant_descriptor_tree
*desc
2113 = (struct constant_descriptor_tree
*) *slot
;
2114 if (TREE_CODE (desc
->value
) == STRING_CST
2115 && TREE_ASM_WRITTEN (desc
->value
)
2116 && asan_protect_global (desc
->value
))
2117 ++*((unsigned HOST_WIDE_INT
*) data
);
2121 /* Helper structure to pass two parameters to
2124 struct asan_add_string_csts_data
2127 vec
<constructor_elt
, va_gc
> *v
;
2130 /* Called via htab_traverse. Call asan_add_global
2131 on emitted STRING_CSTs from the constant hash table. */
2134 add_string_csts (void **slot
, void *data
)
2136 struct constant_descriptor_tree
*desc
2137 = (struct constant_descriptor_tree
*) *slot
;
2138 if (TREE_CODE (desc
->value
) == STRING_CST
2139 && TREE_ASM_WRITTEN (desc
->value
)
2140 && asan_protect_global (desc
->value
))
2142 struct asan_add_string_csts_data
*aascd
2143 = (struct asan_add_string_csts_data
*) data
;
2144 asan_add_global (SYMBOL_REF_DECL (XEXP (desc
->rtl
, 0)),
2145 aascd
->type
, aascd
->v
);
2150 /* Needs to be GTY(()), because cgraph_build_static_cdtor may
2151 invoke ggc_collect. */
2152 static GTY(()) tree asan_ctor_statements
;
2154 /* Module-level instrumentation.
2155 - Insert __asan_init() into the list of CTORs.
2156 - TODO: insert redzones around globals.
2160 asan_finish_file (void)
2162 struct varpool_node
*vnode
;
2163 unsigned HOST_WIDE_INT gcount
= 0;
2165 if (shadow_ptr_types
[0] == NULL_TREE
)
2166 asan_init_shadow_ptr_types ();
2167 /* Avoid instrumenting code in the asan ctors/dtors.
2168 We don't need to insert padding after the description strings,
2169 nor after .LASAN* array. */
2170 flag_sanitize
&= ~SANITIZE_ADDRESS
;
2172 tree fn
= builtin_decl_implicit (BUILT_IN_ASAN_INIT
);
2173 append_to_statement_list (build_call_expr (fn
, 0), &asan_ctor_statements
);
2174 FOR_EACH_DEFINED_VARIABLE (vnode
)
2175 if (TREE_ASM_WRITTEN (vnode
->symbol
.decl
)
2176 && asan_protect_global (vnode
->symbol
.decl
))
2178 htab_t const_desc_htab
= constant_pool_htab ();
2179 htab_traverse (const_desc_htab
, count_string_csts
, &gcount
);
2182 tree type
= asan_global_struct (), var
, ctor
;
2183 tree dtor_statements
= NULL_TREE
;
2184 vec
<constructor_elt
, va_gc
> *v
;
2187 type
= build_array_type_nelts (type
, gcount
);
2188 ASM_GENERATE_INTERNAL_LABEL (buf
, "LASAN", 0);
2189 var
= build_decl (UNKNOWN_LOCATION
, VAR_DECL
, get_identifier (buf
),
2191 TREE_STATIC (var
) = 1;
2192 TREE_PUBLIC (var
) = 0;
2193 DECL_ARTIFICIAL (var
) = 1;
2194 DECL_IGNORED_P (var
) = 1;
2195 vec_alloc (v
, gcount
);
2196 FOR_EACH_DEFINED_VARIABLE (vnode
)
2197 if (TREE_ASM_WRITTEN (vnode
->symbol
.decl
)
2198 && asan_protect_global (vnode
->symbol
.decl
))
2199 asan_add_global (vnode
->symbol
.decl
, TREE_TYPE (type
), v
);
2200 struct asan_add_string_csts_data aascd
;
2201 aascd
.type
= TREE_TYPE (type
);
2203 htab_traverse (const_desc_htab
, add_string_csts
, &aascd
);
2204 ctor
= build_constructor (type
, v
);
2205 TREE_CONSTANT (ctor
) = 1;
2206 TREE_STATIC (ctor
) = 1;
2207 DECL_INITIAL (var
) = ctor
;
2208 varpool_assemble_decl (varpool_node_for_decl (var
));
2210 fn
= builtin_decl_implicit (BUILT_IN_ASAN_REGISTER_GLOBALS
);
2211 tree gcount_tree
= build_int_cst (pointer_sized_int_node
, gcount
);
2212 append_to_statement_list (build_call_expr (fn
, 2,
2213 build_fold_addr_expr (var
),
2215 &asan_ctor_statements
);
2217 fn
= builtin_decl_implicit (BUILT_IN_ASAN_UNREGISTER_GLOBALS
);
2218 append_to_statement_list (build_call_expr (fn
, 2,
2219 build_fold_addr_expr (var
),
2222 cgraph_build_static_cdtor ('D', dtor_statements
,
2223 MAX_RESERVED_INIT_PRIORITY
- 1);
2225 cgraph_build_static_cdtor ('I', asan_ctor_statements
,
2226 MAX_RESERVED_INIT_PRIORITY
- 1);
2227 flag_sanitize
|= SANITIZE_ADDRESS
;
2230 /* Instrument the current function. */
2233 asan_instrument (void)
2235 if (shadow_ptr_types
[0] == NULL_TREE
)
2236 asan_init_shadow_ptr_types ();
2237 transform_statements ();
2244 return (flag_sanitize
& SANITIZE_ADDRESS
) != 0
2245 && !lookup_attribute ("no_sanitize_address",
2246 DECL_ATTRIBUTES (current_function_decl
));
2251 const pass_data pass_data_asan
=
2253 GIMPLE_PASS
, /* type */
2255 OPTGROUP_NONE
, /* optinfo_flags */
2256 true, /* has_gate */
2257 true, /* has_execute */
2258 TV_NONE
, /* tv_id */
2259 ( PROP_ssa
| PROP_cfg
| PROP_gimple_leh
), /* properties_required */
2260 0, /* properties_provided */
2261 0, /* properties_destroyed */
2262 0, /* todo_flags_start */
2263 ( TODO_verify_flow
| TODO_verify_stmts
2264 | TODO_update_ssa
), /* todo_flags_finish */
2267 class pass_asan
: public gimple_opt_pass
2270 pass_asan(gcc::context
*ctxt
)
2271 : gimple_opt_pass(pass_data_asan
, ctxt
)
2274 /* opt_pass methods: */
2275 opt_pass
* clone () { return new pass_asan (ctxt_
); }
2276 bool gate () { return gate_asan (); }
2277 unsigned int execute () { return asan_instrument (); }
2279 }; // class pass_asan
2284 make_pass_asan (gcc::context
*ctxt
)
2286 return new pass_asan (ctxt
);
2292 return !optimize
&& gate_asan ();
2297 const pass_data pass_data_asan_O0
=
2299 GIMPLE_PASS
, /* type */
2301 OPTGROUP_NONE
, /* optinfo_flags */
2302 true, /* has_gate */
2303 true, /* has_execute */
2304 TV_NONE
, /* tv_id */
2305 ( PROP_ssa
| PROP_cfg
| PROP_gimple_leh
), /* properties_required */
2306 0, /* properties_provided */
2307 0, /* properties_destroyed */
2308 0, /* todo_flags_start */
2309 ( TODO_verify_flow
| TODO_verify_stmts
2310 | TODO_update_ssa
), /* todo_flags_finish */
2313 class pass_asan_O0
: public gimple_opt_pass
2316 pass_asan_O0(gcc::context
*ctxt
)
2317 : gimple_opt_pass(pass_data_asan_O0
, ctxt
)
2320 /* opt_pass methods: */
2321 bool gate () { return gate_asan_O0 (); }
2322 unsigned int execute () { return asan_instrument (); }
2324 }; // class pass_asan_O0
2329 make_pass_asan_O0 (gcc::context
*ctxt
)
2331 return new pass_asan_O0 (ctxt
);
2334 #include "gt-asan.h"