1 /* AddressSanitizer, a fast memory error detector.
2 Copyright (C) 2012-2015 Free Software Foundation, Inc.
3 Contributed by Kostya Serebryany <kcc@google.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
29 #include "fold-const.h"
32 #include "hard-reg-set.h"
34 #include "dominance.h"
37 #include "basic-block.h"
38 #include "tree-ssa-alias.h"
39 #include "internal-fn.h"
40 #include "gimple-expr.h"
43 #include "gimple-iterator.h"
46 #include "stor-layout.h"
47 #include "tree-iterator.h"
49 #include "stringpool.h"
50 #include "tree-ssanames.h"
51 #include "tree-pass.h"
53 #include "gimple-pretty-print.h"
57 #include "insn-config.h"
64 #include "insn-codes.h"
68 #include "langhooks.h"
69 #include "alloc-pool.h"
71 #include "gimple-builder.h"
77 /* AddressSanitizer finds out-of-bounds and use-after-free bugs
78 with <2x slowdown on average.
80 The tool consists of two parts:
81 instrumentation module (this file) and a run-time library.
82 The instrumentation module adds a run-time check before every memory insn.
83 For a 8- or 16- byte load accessing address X:
84 ShadowAddr = (X >> 3) + Offset
85 ShadowValue = *(char*)ShadowAddr; // *(short*) for 16-byte access.
87 __asan_report_load8(X);
88 For a load of N bytes (N=1, 2 or 4) from address X:
89 ShadowAddr = (X >> 3) + Offset
90 ShadowValue = *(char*)ShadowAddr;
92 if ((X & 7) + N - 1 > ShadowValue)
93 __asan_report_loadN(X);
94 Stores are instrumented similarly, but using __asan_report_storeN functions.
95 A call too __asan_init_vN() is inserted to the list of module CTORs.
96 N is the version number of the AddressSanitizer API. The changes between the
97 API versions are listed in libsanitizer/asan/asan_interface_internal.h.
99 The run-time library redefines malloc (so that redzone are inserted around
100 the allocated memory) and free (so that reuse of free-ed memory is delayed),
101 provides __asan_report* and __asan_init_vN functions.
104 http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm
106 The current implementation supports detection of out-of-bounds and
107 use-after-free in the heap, on the stack and for global variables.
109 [Protection of stack variables]
111 To understand how detection of out-of-bounds and use-after-free works
112 for stack variables, lets look at this example on x86_64 where the
113 stack grows downward:
127 For this function, the stack protected by asan will be organized as
128 follows, from the top of the stack to the bottom:
130 Slot 1/ [red zone of 32 bytes called 'RIGHT RedZone']
132 Slot 2/ [8 bytes of red zone, that adds up to the space of 'a' to make
133 the next slot be 32 bytes aligned; this one is called Partial
134 Redzone; this 32 bytes alignment is an asan constraint]
136 Slot 3/ [24 bytes for variable 'a']
138 Slot 4/ [red zone of 32 bytes called 'Middle RedZone']
140 Slot 5/ [24 bytes of Partial Red Zone (similar to slot 2]
142 Slot 6/ [8 bytes for variable 'b']
144 Slot 7/ [32 bytes of Red Zone at the bottom of the stack, called
147 The 32 bytes of LEFT red zone at the bottom of the stack can be
150 1/ The first 8 bytes contain a magical asan number that is always
153 2/ The following 8 bytes contains a pointer to a string (to be
154 parsed at runtime by the runtime asan library), which format is
157 "<function-name> <space> <num-of-variables-on-the-stack>
158 (<32-bytes-aligned-offset-in-bytes-of-variable> <space>
159 <length-of-var-in-bytes> ){n} "
161 where '(...){n}' means the content inside the parenthesis occurs 'n'
162 times, with 'n' being the number of variables on the stack.
164 3/ The following 8 bytes contain the PC of the current function which
165 will be used by the run-time library to print an error message.
167 4/ The following 8 bytes are reserved for internal use by the run-time.
169 The shadow memory for that stack layout is going to look like this:
171 - content of shadow memory 8 bytes for slot 7: 0xF1F1F1F1.
172 The F1 byte pattern is a magic number called
173 ASAN_STACK_MAGIC_LEFT and is a way for the runtime to know that
174 the memory for that shadow byte is part of a the LEFT red zone
175 intended to seat at the bottom of the variables on the stack.
177 - content of shadow memory 8 bytes for slots 6 and 5:
178 0xF4F4F400. The F4 byte pattern is a magic number
179 called ASAN_STACK_MAGIC_PARTIAL. It flags the fact that the
180 memory region for this shadow byte is a PARTIAL red zone
181 intended to pad a variable A, so that the slot following
182 {A,padding} is 32 bytes aligned.
184 Note that the fact that the least significant byte of this
185 shadow memory content is 00 means that 8 bytes of its
186 corresponding memory (which corresponds to the memory of
187 variable 'b') is addressable.
189 - content of shadow memory 8 bytes for slot 4: 0xF2F2F2F2.
190 The F2 byte pattern is a magic number called
191 ASAN_STACK_MAGIC_MIDDLE. It flags the fact that the memory
192 region for this shadow byte is a MIDDLE red zone intended to
193 seat between two 32 aligned slots of {variable,padding}.
195 - content of shadow memory 8 bytes for slot 3 and 2:
196 0xF4000000. This represents is the concatenation of
197 variable 'a' and the partial red zone following it, like what we
198 had for variable 'b'. The least significant 3 bytes being 00
199 means that the 3 bytes of variable 'a' are addressable.
201 - content of shadow memory 8 bytes for slot 1: 0xF3F3F3F3.
202 The F3 byte pattern is a magic number called
203 ASAN_STACK_MAGIC_RIGHT. It flags the fact that the memory
204 region for this shadow byte is a RIGHT red zone intended to seat
205 at the top of the variables of the stack.
207 Note that the real variable layout is done in expand_used_vars in
208 cfgexpand.c. As far as Address Sanitizer is concerned, it lays out
209 stack variables as well as the different red zones, emits some
210 prologue code to populate the shadow memory as to poison (mark as
211 non-accessible) the regions of the red zones and mark the regions of
212 stack variables as accessible, and emit some epilogue code to
213 un-poison (mark as accessible) the regions of red zones right before
216 [Protection of global variables]
218 The basic idea is to insert a red zone between two global variables
219 and install a constructor function that calls the asan runtime to do
220 the populating of the relevant shadow memory regions at load time.
222 So the global variables are laid out as to insert a red zone between
223 them. The size of the red zones is so that each variable starts on a
226 Then a constructor function is installed so that, for each global
227 variable, it calls the runtime asan library function
228 __asan_register_globals_with an instance of this type:
232 // Address of the beginning of the global variable.
235 // Initial size of the global variable.
238 // Size of the global variable + size of the red zone. This
239 // size is 32 bytes aligned.
240 uptr __size_with_redzone;
242 // Name of the global variable.
245 // Name of the module where the global variable is declared.
246 const void *__module_name;
248 // 1 if it has dynamic initialization, 0 otherwise.
249 uptr __has_dynamic_init;
251 // A pointer to struct that contains source location, could be NULL.
252 __asan_global_source_location *__location;
255 A destructor function that calls the runtime asan library function
256 _asan_unregister_globals is also installed. */
258 static unsigned HOST_WIDE_INT asan_shadow_offset_value
;
259 static bool asan_shadow_offset_computed
;
260 static vec
<char *> sanitized_sections
;
262 /* Sets shadow offset to value in string VAL. */
265 set_asan_shadow_offset (const char *val
)
270 #ifdef HAVE_LONG_LONG
271 asan_shadow_offset_value
= strtoull (val
, &endp
, 0);
273 asan_shadow_offset_value
= strtoul (val
, &endp
, 0);
275 if (!(*val
!= '\0' && *endp
== '\0' && errno
== 0))
278 asan_shadow_offset_computed
= true;
283 /* Set list of user-defined sections that need to be sanitized. */
286 set_sanitized_sections (const char *sections
)
290 FOR_EACH_VEC_ELT (sanitized_sections
, i
, pat
)
292 sanitized_sections
.truncate (0);
294 for (const char *s
= sections
; *s
; )
297 for (end
= s
; *end
&& *end
!= ','; ++end
);
298 size_t len
= end
- s
;
299 sanitized_sections
.safe_push (xstrndup (s
, len
));
300 s
= *end
? end
+ 1 : end
;
304 /* Checks whether section SEC should be sanitized. */
307 section_sanitized_p (const char *sec
)
311 FOR_EACH_VEC_ELT (sanitized_sections
, i
, pat
)
312 if (fnmatch (pat
, sec
, FNM_PERIOD
) == 0)
317 /* Returns Asan shadow offset. */
319 static unsigned HOST_WIDE_INT
320 asan_shadow_offset ()
322 if (!asan_shadow_offset_computed
)
324 asan_shadow_offset_computed
= true;
325 asan_shadow_offset_value
= targetm
.asan_shadow_offset ();
327 return asan_shadow_offset_value
;
330 alias_set_type asan_shadow_set
= -1;
332 /* Pointer types to 1 resp. 2 byte integers in shadow memory. A separate
333 alias set is used for all shadow memory accesses. */
334 static GTY(()) tree shadow_ptr_types
[2];
336 /* Decl for __asan_option_detect_stack_use_after_return. */
337 static GTY(()) tree asan_detect_stack_use_after_return
;
339 /* Various flags for Asan builtins. */
340 enum asan_check_flags
342 ASAN_CHECK_STORE
= 1 << 0,
343 ASAN_CHECK_SCALAR_ACCESS
= 1 << 1,
344 ASAN_CHECK_NON_ZERO_LEN
= 1 << 2,
345 ASAN_CHECK_LAST
= 1 << 3
348 /* Hashtable support for memory references used by gimple
351 /* This type represents a reference to a memory region. */
354 /* The expression of the beginning of the memory region. */
357 /* The size of the access. */
358 HOST_WIDE_INT access_size
;
360 /* Pool allocation new operator. */
361 inline void *operator new (size_t)
363 return pool
.allocate ();
366 /* Delete operator utilizing pool allocation. */
367 inline void operator delete (void *ptr
)
369 pool
.remove ((asan_mem_ref
*) ptr
);
372 /* Memory allocation pool. */
373 static pool_allocator
<asan_mem_ref
> pool
;
376 pool_allocator
<asan_mem_ref
> asan_mem_ref::pool ("asan_mem_ref", 10);
378 /* Initializes an instance of asan_mem_ref. */
381 asan_mem_ref_init (asan_mem_ref
*ref
, tree start
, HOST_WIDE_INT access_size
)
384 ref
->access_size
= access_size
;
387 /* Allocates memory for an instance of asan_mem_ref into the memory
388 pool returned by asan_mem_ref_get_alloc_pool and initialize it.
389 START is the address of (or the expression pointing to) the
390 beginning of memory reference. ACCESS_SIZE is the size of the
391 access to the referenced memory. */
394 asan_mem_ref_new (tree start
, HOST_WIDE_INT access_size
)
396 asan_mem_ref
*ref
= new asan_mem_ref
;
398 asan_mem_ref_init (ref
, start
, access_size
);
402 /* This builds and returns a pointer to the end of the memory region
403 that starts at START and of length LEN. */
406 asan_mem_ref_get_end (tree start
, tree len
)
408 if (len
== NULL_TREE
|| integer_zerop (len
))
411 if (!ptrofftype_p (len
))
412 len
= convert_to_ptrofftype (len
);
414 return fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (start
), start
, len
);
417 /* Return a tree expression that represents the end of the referenced
418 memory region. Beware that this function can actually build a new
422 asan_mem_ref_get_end (const asan_mem_ref
*ref
, tree len
)
424 return asan_mem_ref_get_end (ref
->start
, len
);
427 struct asan_mem_ref_hasher
: nofree_ptr_hash
<asan_mem_ref
>
429 static inline hashval_t
hash (const asan_mem_ref
*);
430 static inline bool equal (const asan_mem_ref
*, const asan_mem_ref
*);
433 /* Hash a memory reference. */
436 asan_mem_ref_hasher::hash (const asan_mem_ref
*mem_ref
)
438 return iterative_hash_expr (mem_ref
->start
, 0);
441 /* Compare two memory references. We accept the length of either
442 memory references to be NULL_TREE. */
445 asan_mem_ref_hasher::equal (const asan_mem_ref
*m1
,
446 const asan_mem_ref
*m2
)
448 return operand_equal_p (m1
->start
, m2
->start
, 0);
451 static hash_table
<asan_mem_ref_hasher
> *asan_mem_ref_ht
;
453 /* Returns a reference to the hash table containing memory references.
454 This function ensures that the hash table is created. Note that
455 this hash table is updated by the function
456 update_mem_ref_hash_table. */
458 static hash_table
<asan_mem_ref_hasher
> *
459 get_mem_ref_hash_table ()
461 if (!asan_mem_ref_ht
)
462 asan_mem_ref_ht
= new hash_table
<asan_mem_ref_hasher
> (10);
464 return asan_mem_ref_ht
;
467 /* Clear all entries from the memory references hash table. */
470 empty_mem_ref_hash_table ()
473 asan_mem_ref_ht
->empty ();
476 /* Free the memory references hash table. */
479 free_mem_ref_resources ()
481 delete asan_mem_ref_ht
;
482 asan_mem_ref_ht
= NULL
;
484 asan_mem_ref::pool
.release ();
487 /* Return true iff the memory reference REF has been instrumented. */
490 has_mem_ref_been_instrumented (tree ref
, HOST_WIDE_INT access_size
)
493 asan_mem_ref_init (&r
, ref
, access_size
);
495 asan_mem_ref
*saved_ref
= get_mem_ref_hash_table ()->find (&r
);
496 return saved_ref
&& saved_ref
->access_size
>= access_size
;
499 /* Return true iff the memory reference REF has been instrumented. */
502 has_mem_ref_been_instrumented (const asan_mem_ref
*ref
)
504 return has_mem_ref_been_instrumented (ref
->start
, ref
->access_size
);
507 /* Return true iff access to memory region starting at REF and of
508 length LEN has been instrumented. */
511 has_mem_ref_been_instrumented (const asan_mem_ref
*ref
, tree len
)
513 HOST_WIDE_INT size_in_bytes
514 = tree_fits_shwi_p (len
) ? tree_to_shwi (len
) : -1;
516 return size_in_bytes
!= -1
517 && has_mem_ref_been_instrumented (ref
->start
, size_in_bytes
);
520 /* Set REF to the memory reference present in a gimple assignment
521 ASSIGNMENT. Return true upon successful completion, false
525 get_mem_ref_of_assignment (const gassign
*assignment
,
529 gcc_assert (gimple_assign_single_p (assignment
));
531 if (gimple_store_p (assignment
)
532 && !gimple_clobber_p (assignment
))
534 ref
->start
= gimple_assign_lhs (assignment
);
535 *ref_is_store
= true;
537 else if (gimple_assign_load_p (assignment
))
539 ref
->start
= gimple_assign_rhs1 (assignment
);
540 *ref_is_store
= false;
545 ref
->access_size
= int_size_in_bytes (TREE_TYPE (ref
->start
));
549 /* Return the memory references contained in a gimple statement
550 representing a builtin call that has to do with memory access. */
553 get_mem_refs_of_builtin_call (const gcall
*call
,
566 gcc_checking_assert (gimple_call_builtin_p (call
, BUILT_IN_NORMAL
));
568 tree callee
= gimple_call_fndecl (call
);
569 tree source0
= NULL_TREE
, source1
= NULL_TREE
,
570 dest
= NULL_TREE
, len
= NULL_TREE
;
571 bool is_store
= true, got_reference_p
= false;
572 HOST_WIDE_INT access_size
= 1;
574 *intercepted_p
= asan_intercepted_p ((DECL_FUNCTION_CODE (callee
)));
576 switch (DECL_FUNCTION_CODE (callee
))
578 /* (s, s, n) style memops. */
580 case BUILT_IN_MEMCMP
:
581 source0
= gimple_call_arg (call
, 0);
582 source1
= gimple_call_arg (call
, 1);
583 len
= gimple_call_arg (call
, 2);
586 /* (src, dest, n) style memops. */
588 source0
= gimple_call_arg (call
, 0);
589 dest
= gimple_call_arg (call
, 1);
590 len
= gimple_call_arg (call
, 2);
593 /* (dest, src, n) style memops. */
594 case BUILT_IN_MEMCPY
:
595 case BUILT_IN_MEMCPY_CHK
:
596 case BUILT_IN_MEMMOVE
:
597 case BUILT_IN_MEMMOVE_CHK
:
598 case BUILT_IN_MEMPCPY
:
599 case BUILT_IN_MEMPCPY_CHK
:
600 dest
= gimple_call_arg (call
, 0);
601 source0
= gimple_call_arg (call
, 1);
602 len
= gimple_call_arg (call
, 2);
605 /* (dest, n) style memops. */
607 dest
= gimple_call_arg (call
, 0);
608 len
= gimple_call_arg (call
, 1);
611 /* (dest, x, n) style memops*/
612 case BUILT_IN_MEMSET
:
613 case BUILT_IN_MEMSET_CHK
:
614 dest
= gimple_call_arg (call
, 0);
615 len
= gimple_call_arg (call
, 2);
618 case BUILT_IN_STRLEN
:
619 source0
= gimple_call_arg (call
, 0);
620 len
= gimple_call_lhs (call
);
623 /* And now the __atomic* and __sync builtins.
624 These are handled differently from the classical memory memory
625 access builtins above. */
627 case BUILT_IN_ATOMIC_LOAD_1
:
628 case BUILT_IN_ATOMIC_LOAD_2
:
629 case BUILT_IN_ATOMIC_LOAD_4
:
630 case BUILT_IN_ATOMIC_LOAD_8
:
631 case BUILT_IN_ATOMIC_LOAD_16
:
635 case BUILT_IN_SYNC_FETCH_AND_ADD_1
:
636 case BUILT_IN_SYNC_FETCH_AND_ADD_2
:
637 case BUILT_IN_SYNC_FETCH_AND_ADD_4
:
638 case BUILT_IN_SYNC_FETCH_AND_ADD_8
:
639 case BUILT_IN_SYNC_FETCH_AND_ADD_16
:
641 case BUILT_IN_SYNC_FETCH_AND_SUB_1
:
642 case BUILT_IN_SYNC_FETCH_AND_SUB_2
:
643 case BUILT_IN_SYNC_FETCH_AND_SUB_4
:
644 case BUILT_IN_SYNC_FETCH_AND_SUB_8
:
645 case BUILT_IN_SYNC_FETCH_AND_SUB_16
:
647 case BUILT_IN_SYNC_FETCH_AND_OR_1
:
648 case BUILT_IN_SYNC_FETCH_AND_OR_2
:
649 case BUILT_IN_SYNC_FETCH_AND_OR_4
:
650 case BUILT_IN_SYNC_FETCH_AND_OR_8
:
651 case BUILT_IN_SYNC_FETCH_AND_OR_16
:
653 case BUILT_IN_SYNC_FETCH_AND_AND_1
:
654 case BUILT_IN_SYNC_FETCH_AND_AND_2
:
655 case BUILT_IN_SYNC_FETCH_AND_AND_4
:
656 case BUILT_IN_SYNC_FETCH_AND_AND_8
:
657 case BUILT_IN_SYNC_FETCH_AND_AND_16
:
659 case BUILT_IN_SYNC_FETCH_AND_XOR_1
:
660 case BUILT_IN_SYNC_FETCH_AND_XOR_2
:
661 case BUILT_IN_SYNC_FETCH_AND_XOR_4
:
662 case BUILT_IN_SYNC_FETCH_AND_XOR_8
:
663 case BUILT_IN_SYNC_FETCH_AND_XOR_16
:
665 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
666 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
667 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
668 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
670 case BUILT_IN_SYNC_ADD_AND_FETCH_1
:
671 case BUILT_IN_SYNC_ADD_AND_FETCH_2
:
672 case BUILT_IN_SYNC_ADD_AND_FETCH_4
:
673 case BUILT_IN_SYNC_ADD_AND_FETCH_8
:
674 case BUILT_IN_SYNC_ADD_AND_FETCH_16
:
676 case BUILT_IN_SYNC_SUB_AND_FETCH_1
:
677 case BUILT_IN_SYNC_SUB_AND_FETCH_2
:
678 case BUILT_IN_SYNC_SUB_AND_FETCH_4
:
679 case BUILT_IN_SYNC_SUB_AND_FETCH_8
:
680 case BUILT_IN_SYNC_SUB_AND_FETCH_16
:
682 case BUILT_IN_SYNC_OR_AND_FETCH_1
:
683 case BUILT_IN_SYNC_OR_AND_FETCH_2
:
684 case BUILT_IN_SYNC_OR_AND_FETCH_4
:
685 case BUILT_IN_SYNC_OR_AND_FETCH_8
:
686 case BUILT_IN_SYNC_OR_AND_FETCH_16
:
688 case BUILT_IN_SYNC_AND_AND_FETCH_1
:
689 case BUILT_IN_SYNC_AND_AND_FETCH_2
:
690 case BUILT_IN_SYNC_AND_AND_FETCH_4
:
691 case BUILT_IN_SYNC_AND_AND_FETCH_8
:
692 case BUILT_IN_SYNC_AND_AND_FETCH_16
:
694 case BUILT_IN_SYNC_XOR_AND_FETCH_1
:
695 case BUILT_IN_SYNC_XOR_AND_FETCH_2
:
696 case BUILT_IN_SYNC_XOR_AND_FETCH_4
:
697 case BUILT_IN_SYNC_XOR_AND_FETCH_8
:
698 case BUILT_IN_SYNC_XOR_AND_FETCH_16
:
700 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
701 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
702 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
703 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
705 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
:
706 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2
:
707 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4
:
708 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8
:
709 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16
:
711 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
:
712 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2
:
713 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4
:
714 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8
:
715 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16
:
717 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
:
718 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2
:
719 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4
:
720 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8
:
721 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16
:
723 case BUILT_IN_SYNC_LOCK_RELEASE_1
:
724 case BUILT_IN_SYNC_LOCK_RELEASE_2
:
725 case BUILT_IN_SYNC_LOCK_RELEASE_4
:
726 case BUILT_IN_SYNC_LOCK_RELEASE_8
:
727 case BUILT_IN_SYNC_LOCK_RELEASE_16
:
729 case BUILT_IN_ATOMIC_EXCHANGE_1
:
730 case BUILT_IN_ATOMIC_EXCHANGE_2
:
731 case BUILT_IN_ATOMIC_EXCHANGE_4
:
732 case BUILT_IN_ATOMIC_EXCHANGE_8
:
733 case BUILT_IN_ATOMIC_EXCHANGE_16
:
735 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
736 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
737 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
738 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
739 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
741 case BUILT_IN_ATOMIC_STORE_1
:
742 case BUILT_IN_ATOMIC_STORE_2
:
743 case BUILT_IN_ATOMIC_STORE_4
:
744 case BUILT_IN_ATOMIC_STORE_8
:
745 case BUILT_IN_ATOMIC_STORE_16
:
747 case BUILT_IN_ATOMIC_ADD_FETCH_1
:
748 case BUILT_IN_ATOMIC_ADD_FETCH_2
:
749 case BUILT_IN_ATOMIC_ADD_FETCH_4
:
750 case BUILT_IN_ATOMIC_ADD_FETCH_8
:
751 case BUILT_IN_ATOMIC_ADD_FETCH_16
:
753 case BUILT_IN_ATOMIC_SUB_FETCH_1
:
754 case BUILT_IN_ATOMIC_SUB_FETCH_2
:
755 case BUILT_IN_ATOMIC_SUB_FETCH_4
:
756 case BUILT_IN_ATOMIC_SUB_FETCH_8
:
757 case BUILT_IN_ATOMIC_SUB_FETCH_16
:
759 case BUILT_IN_ATOMIC_AND_FETCH_1
:
760 case BUILT_IN_ATOMIC_AND_FETCH_2
:
761 case BUILT_IN_ATOMIC_AND_FETCH_4
:
762 case BUILT_IN_ATOMIC_AND_FETCH_8
:
763 case BUILT_IN_ATOMIC_AND_FETCH_16
:
765 case BUILT_IN_ATOMIC_NAND_FETCH_1
:
766 case BUILT_IN_ATOMIC_NAND_FETCH_2
:
767 case BUILT_IN_ATOMIC_NAND_FETCH_4
:
768 case BUILT_IN_ATOMIC_NAND_FETCH_8
:
769 case BUILT_IN_ATOMIC_NAND_FETCH_16
:
771 case BUILT_IN_ATOMIC_XOR_FETCH_1
:
772 case BUILT_IN_ATOMIC_XOR_FETCH_2
:
773 case BUILT_IN_ATOMIC_XOR_FETCH_4
:
774 case BUILT_IN_ATOMIC_XOR_FETCH_8
:
775 case BUILT_IN_ATOMIC_XOR_FETCH_16
:
777 case BUILT_IN_ATOMIC_OR_FETCH_1
:
778 case BUILT_IN_ATOMIC_OR_FETCH_2
:
779 case BUILT_IN_ATOMIC_OR_FETCH_4
:
780 case BUILT_IN_ATOMIC_OR_FETCH_8
:
781 case BUILT_IN_ATOMIC_OR_FETCH_16
:
783 case BUILT_IN_ATOMIC_FETCH_ADD_1
:
784 case BUILT_IN_ATOMIC_FETCH_ADD_2
:
785 case BUILT_IN_ATOMIC_FETCH_ADD_4
:
786 case BUILT_IN_ATOMIC_FETCH_ADD_8
:
787 case BUILT_IN_ATOMIC_FETCH_ADD_16
:
789 case BUILT_IN_ATOMIC_FETCH_SUB_1
:
790 case BUILT_IN_ATOMIC_FETCH_SUB_2
:
791 case BUILT_IN_ATOMIC_FETCH_SUB_4
:
792 case BUILT_IN_ATOMIC_FETCH_SUB_8
:
793 case BUILT_IN_ATOMIC_FETCH_SUB_16
:
795 case BUILT_IN_ATOMIC_FETCH_AND_1
:
796 case BUILT_IN_ATOMIC_FETCH_AND_2
:
797 case BUILT_IN_ATOMIC_FETCH_AND_4
:
798 case BUILT_IN_ATOMIC_FETCH_AND_8
:
799 case BUILT_IN_ATOMIC_FETCH_AND_16
:
801 case BUILT_IN_ATOMIC_FETCH_NAND_1
:
802 case BUILT_IN_ATOMIC_FETCH_NAND_2
:
803 case BUILT_IN_ATOMIC_FETCH_NAND_4
:
804 case BUILT_IN_ATOMIC_FETCH_NAND_8
:
805 case BUILT_IN_ATOMIC_FETCH_NAND_16
:
807 case BUILT_IN_ATOMIC_FETCH_XOR_1
:
808 case BUILT_IN_ATOMIC_FETCH_XOR_2
:
809 case BUILT_IN_ATOMIC_FETCH_XOR_4
:
810 case BUILT_IN_ATOMIC_FETCH_XOR_8
:
811 case BUILT_IN_ATOMIC_FETCH_XOR_16
:
813 case BUILT_IN_ATOMIC_FETCH_OR_1
:
814 case BUILT_IN_ATOMIC_FETCH_OR_2
:
815 case BUILT_IN_ATOMIC_FETCH_OR_4
:
816 case BUILT_IN_ATOMIC_FETCH_OR_8
:
817 case BUILT_IN_ATOMIC_FETCH_OR_16
:
819 dest
= gimple_call_arg (call
, 0);
820 /* DEST represents the address of a memory location.
821 instrument_derefs wants the memory location, so lets
822 dereference the address DEST before handing it to
823 instrument_derefs. */
824 if (TREE_CODE (dest
) == ADDR_EXPR
)
825 dest
= TREE_OPERAND (dest
, 0);
826 else if (TREE_CODE (dest
) == SSA_NAME
|| TREE_CODE (dest
) == INTEGER_CST
)
827 dest
= build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (dest
)),
828 dest
, build_int_cst (TREE_TYPE (dest
), 0));
832 access_size
= int_size_in_bytes (TREE_TYPE (dest
));
836 /* The other builtins memory access are not instrumented in this
837 function because they either don't have any length parameter,
838 or their length parameter is just a limit. */
842 if (len
!= NULL_TREE
)
844 if (source0
!= NULL_TREE
)
846 src0
->start
= source0
;
847 src0
->access_size
= access_size
;
849 *src0_is_store
= false;
852 if (source1
!= NULL_TREE
)
854 src1
->start
= source1
;
855 src1
->access_size
= access_size
;
857 *src1_is_store
= false;
860 if (dest
!= NULL_TREE
)
863 dst
->access_size
= access_size
;
865 *dst_is_store
= true;
868 got_reference_p
= true;
873 dst
->access_size
= access_size
;
874 *dst_len
= NULL_TREE
;
875 *dst_is_store
= is_store
;
876 *dest_is_deref
= true;
877 got_reference_p
= true;
880 return got_reference_p
;
883 /* Return true iff a given gimple statement has been instrumented.
884 Note that the statement is "defined" by the memory references it
888 has_stmt_been_instrumented_p (gimple stmt
)
890 if (gimple_assign_single_p (stmt
))
894 asan_mem_ref_init (&r
, NULL
, 1);
896 if (get_mem_ref_of_assignment (as_a
<gassign
*> (stmt
), &r
,
898 return has_mem_ref_been_instrumented (&r
);
900 else if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
902 asan_mem_ref src0
, src1
, dest
;
903 asan_mem_ref_init (&src0
, NULL
, 1);
904 asan_mem_ref_init (&src1
, NULL
, 1);
905 asan_mem_ref_init (&dest
, NULL
, 1);
907 tree src0_len
= NULL_TREE
, src1_len
= NULL_TREE
, dest_len
= NULL_TREE
;
908 bool src0_is_store
= false, src1_is_store
= false,
909 dest_is_store
= false, dest_is_deref
= false, intercepted_p
= true;
910 if (get_mem_refs_of_builtin_call (as_a
<gcall
*> (stmt
),
911 &src0
, &src0_len
, &src0_is_store
,
912 &src1
, &src1_len
, &src1_is_store
,
913 &dest
, &dest_len
, &dest_is_store
,
914 &dest_is_deref
, &intercepted_p
))
916 if (src0
.start
!= NULL_TREE
917 && !has_mem_ref_been_instrumented (&src0
, src0_len
))
920 if (src1
.start
!= NULL_TREE
921 && !has_mem_ref_been_instrumented (&src1
, src1_len
))
924 if (dest
.start
!= NULL_TREE
925 && !has_mem_ref_been_instrumented (&dest
, dest_len
))
934 /* Insert a memory reference into the hash table. */
937 update_mem_ref_hash_table (tree ref
, HOST_WIDE_INT access_size
)
939 hash_table
<asan_mem_ref_hasher
> *ht
= get_mem_ref_hash_table ();
942 asan_mem_ref_init (&r
, ref
, access_size
);
944 asan_mem_ref
**slot
= ht
->find_slot (&r
, INSERT
);
945 if (*slot
== NULL
|| (*slot
)->access_size
< access_size
)
946 *slot
= asan_mem_ref_new (ref
, access_size
);
949 /* Initialize shadow_ptr_types array. */
952 asan_init_shadow_ptr_types (void)
954 asan_shadow_set
= new_alias_set ();
955 shadow_ptr_types
[0] = build_distinct_type_copy (signed_char_type_node
);
956 TYPE_ALIAS_SET (shadow_ptr_types
[0]) = asan_shadow_set
;
957 shadow_ptr_types
[0] = build_pointer_type (shadow_ptr_types
[0]);
958 shadow_ptr_types
[1] = build_distinct_type_copy (short_integer_type_node
);
959 TYPE_ALIAS_SET (shadow_ptr_types
[1]) = asan_shadow_set
;
960 shadow_ptr_types
[1] = build_pointer_type (shadow_ptr_types
[1]);
961 initialize_sanitizer_builtins ();
964 /* Create ADDR_EXPR of STRING_CST with the PP pretty printer text. */
967 asan_pp_string (pretty_printer
*pp
)
969 const char *buf
= pp_formatted_text (pp
);
970 size_t len
= strlen (buf
);
971 tree ret
= build_string (len
+ 1, buf
);
973 = build_array_type (TREE_TYPE (shadow_ptr_types
[0]),
974 build_index_type (size_int (len
)));
975 TREE_READONLY (ret
) = 1;
976 TREE_STATIC (ret
) = 1;
977 return build1 (ADDR_EXPR
, shadow_ptr_types
[0], ret
);
980 /* Return a CONST_INT representing 4 subsequent shadow memory bytes. */
983 asan_shadow_cst (unsigned char shadow_bytes
[4])
986 unsigned HOST_WIDE_INT val
= 0;
987 gcc_assert (WORDS_BIG_ENDIAN
== BYTES_BIG_ENDIAN
);
988 for (i
= 0; i
< 4; i
++)
989 val
|= (unsigned HOST_WIDE_INT
) shadow_bytes
[BYTES_BIG_ENDIAN
? 3 - i
: i
]
990 << (BITS_PER_UNIT
* i
);
991 return gen_int_mode (val
, SImode
);
994 /* Clear shadow memory at SHADOW_MEM, LEN bytes. Can't call a library call here
998 asan_clear_shadow (rtx shadow_mem
, HOST_WIDE_INT len
)
1000 rtx_insn
*insn
, *insns
, *jump
;
1001 rtx_code_label
*top_label
;
1005 clear_storage (shadow_mem
, GEN_INT (len
), BLOCK_OP_NORMAL
);
1006 insns
= get_insns ();
1008 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
1011 if (insn
== NULL_RTX
)
1017 gcc_assert ((len
& 3) == 0);
1018 top_label
= gen_label_rtx ();
1019 addr
= copy_to_mode_reg (Pmode
, XEXP (shadow_mem
, 0));
1020 shadow_mem
= adjust_automodify_address (shadow_mem
, SImode
, addr
, 0);
1021 end
= force_reg (Pmode
, plus_constant (Pmode
, addr
, len
));
1022 emit_label (top_label
);
1024 emit_move_insn (shadow_mem
, const0_rtx
);
1025 tmp
= expand_simple_binop (Pmode
, PLUS
, addr
, gen_int_mode (4, Pmode
), addr
,
1026 true, OPTAB_LIB_WIDEN
);
1028 emit_move_insn (addr
, tmp
);
1029 emit_cmp_and_jump_insns (addr
, end
, LT
, NULL_RTX
, Pmode
, true, top_label
);
1030 jump
= get_last_insn ();
1031 gcc_assert (JUMP_P (jump
));
1032 add_int_reg_note (jump
, REG_BR_PROB
, REG_BR_PROB_BASE
* 80 / 100);
1036 asan_function_start (void)
1038 section
*fnsec
= function_section (current_function_decl
);
1039 switch_to_section (fnsec
);
1040 ASM_OUTPUT_DEBUG_LABEL (asm_out_file
, "LASANPC",
1041 current_function_funcdef_no
);
1044 /* Insert code to protect stack vars. The prologue sequence should be emitted
1045 directly, epilogue sequence returned. BASE is the register holding the
1046 stack base, against which OFFSETS array offsets are relative to, OFFSETS
1047 array contains pairs of offsets in reverse order, always the end offset
1048 of some gap that needs protection followed by starting offset,
1049 and DECLS is an array of representative decls for each var partition.
1050 LENGTH is the length of the OFFSETS array, DECLS array is LENGTH / 2 - 1
1051 elements long (OFFSETS include gap before the first variable as well
1052 as gaps after each stack variable). PBASE is, if non-NULL, some pseudo
1053 register which stack vars DECL_RTLs are based on. Either BASE should be
1054 assigned to PBASE, when not doing use after return protection, or
1055 corresponding address based on __asan_stack_malloc* return value. */
1058 asan_emit_stack_protection (rtx base
, rtx pbase
, unsigned int alignb
,
1059 HOST_WIDE_INT
*offsets
, tree
*decls
, int length
)
1061 rtx shadow_base
, shadow_mem
, ret
, mem
, orig_base
;
1062 rtx_code_label
*lab
;
1065 unsigned char shadow_bytes
[4];
1066 HOST_WIDE_INT base_offset
= offsets
[length
- 1];
1067 HOST_WIDE_INT base_align_bias
= 0, offset
, prev_offset
;
1068 HOST_WIDE_INT asan_frame_size
= offsets
[0] - base_offset
;
1069 HOST_WIDE_INT last_offset
, last_size
;
1071 unsigned char cur_shadow_byte
= ASAN_STACK_MAGIC_LEFT
;
1072 tree str_cst
, decl
, id
;
1073 int use_after_return_class
= -1;
1075 if (shadow_ptr_types
[0] == NULL_TREE
)
1076 asan_init_shadow_ptr_types ();
1078 /* First of all, prepare the description string. */
1079 pretty_printer asan_pp
;
1081 pp_decimal_int (&asan_pp
, length
/ 2 - 1);
1082 pp_space (&asan_pp
);
1083 for (l
= length
- 2; l
; l
-= 2)
1085 tree decl
= decls
[l
/ 2 - 1];
1086 pp_wide_integer (&asan_pp
, offsets
[l
] - base_offset
);
1087 pp_space (&asan_pp
);
1088 pp_wide_integer (&asan_pp
, offsets
[l
- 1] - offsets
[l
]);
1089 pp_space (&asan_pp
);
1090 if (DECL_P (decl
) && DECL_NAME (decl
))
1092 pp_decimal_int (&asan_pp
, IDENTIFIER_LENGTH (DECL_NAME (decl
)));
1093 pp_space (&asan_pp
);
1094 pp_tree_identifier (&asan_pp
, DECL_NAME (decl
));
1097 pp_string (&asan_pp
, "9 <unknown>");
1098 pp_space (&asan_pp
);
1100 str_cst
= asan_pp_string (&asan_pp
);
1102 /* Emit the prologue sequence. */
1103 if (asan_frame_size
> 32 && asan_frame_size
<= 65536 && pbase
1104 && ASAN_USE_AFTER_RETURN
)
1106 use_after_return_class
= floor_log2 (asan_frame_size
- 1) - 5;
1107 /* __asan_stack_malloc_N guarantees alignment
1108 N < 6 ? (64 << N) : 4096 bytes. */
1109 if (alignb
> (use_after_return_class
< 6
1110 ? (64U << use_after_return_class
) : 4096U))
1111 use_after_return_class
= -1;
1112 else if (alignb
> ASAN_RED_ZONE_SIZE
&& (asan_frame_size
& (alignb
- 1)))
1113 base_align_bias
= ((asan_frame_size
+ alignb
- 1)
1114 & ~(alignb
- HOST_WIDE_INT_1
)) - asan_frame_size
;
1116 /* Align base if target is STRICT_ALIGNMENT. */
1117 if (STRICT_ALIGNMENT
)
1118 base
= expand_binop (Pmode
, and_optab
, base
,
1119 gen_int_mode (-((GET_MODE_ALIGNMENT (SImode
)
1120 << ASAN_SHADOW_SHIFT
)
1121 / BITS_PER_UNIT
), Pmode
), NULL_RTX
,
1124 if (use_after_return_class
== -1 && pbase
)
1125 emit_move_insn (pbase
, base
);
1127 base
= expand_binop (Pmode
, add_optab
, base
,
1128 gen_int_mode (base_offset
- base_align_bias
, Pmode
),
1129 NULL_RTX
, 1, OPTAB_DIRECT
);
1130 orig_base
= NULL_RTX
;
1131 if (use_after_return_class
!= -1)
1133 if (asan_detect_stack_use_after_return
== NULL_TREE
)
1135 id
= get_identifier ("__asan_option_detect_stack_use_after_return");
1136 decl
= build_decl (BUILTINS_LOCATION
, VAR_DECL
, id
,
1138 SET_DECL_ASSEMBLER_NAME (decl
, id
);
1139 TREE_ADDRESSABLE (decl
) = 1;
1140 DECL_ARTIFICIAL (decl
) = 1;
1141 DECL_IGNORED_P (decl
) = 1;
1142 DECL_EXTERNAL (decl
) = 1;
1143 TREE_STATIC (decl
) = 1;
1144 TREE_PUBLIC (decl
) = 1;
1145 TREE_USED (decl
) = 1;
1146 asan_detect_stack_use_after_return
= decl
;
1148 orig_base
= gen_reg_rtx (Pmode
);
1149 emit_move_insn (orig_base
, base
);
1150 ret
= expand_normal (asan_detect_stack_use_after_return
);
1151 lab
= gen_label_rtx ();
1152 int very_likely
= REG_BR_PROB_BASE
- (REG_BR_PROB_BASE
/ 2000 - 1);
1153 emit_cmp_and_jump_insns (ret
, const0_rtx
, EQ
, NULL_RTX
,
1154 VOIDmode
, 0, lab
, very_likely
);
1155 snprintf (buf
, sizeof buf
, "__asan_stack_malloc_%d",
1156 use_after_return_class
);
1157 ret
= init_one_libfunc (buf
);
1158 rtx addr
= convert_memory_address (ptr_mode
, base
);
1159 ret
= emit_library_call_value (ret
, NULL_RTX
, LCT_NORMAL
, ptr_mode
, 2,
1160 GEN_INT (asan_frame_size
1162 TYPE_MODE (pointer_sized_int_node
),
1164 ret
= convert_memory_address (Pmode
, ret
);
1165 emit_move_insn (base
, ret
);
1167 emit_move_insn (pbase
, expand_binop (Pmode
, add_optab
, base
,
1168 gen_int_mode (base_align_bias
1169 - base_offset
, Pmode
),
1170 NULL_RTX
, 1, OPTAB_DIRECT
));
1172 mem
= gen_rtx_MEM (ptr_mode
, base
);
1173 mem
= adjust_address (mem
, VOIDmode
, base_align_bias
);
1174 emit_move_insn (mem
, gen_int_mode (ASAN_STACK_FRAME_MAGIC
, ptr_mode
));
1175 mem
= adjust_address (mem
, VOIDmode
, GET_MODE_SIZE (ptr_mode
));
1176 emit_move_insn (mem
, expand_normal (str_cst
));
1177 mem
= adjust_address (mem
, VOIDmode
, GET_MODE_SIZE (ptr_mode
));
1178 ASM_GENERATE_INTERNAL_LABEL (buf
, "LASANPC", current_function_funcdef_no
);
1179 id
= get_identifier (buf
);
1180 decl
= build_decl (DECL_SOURCE_LOCATION (current_function_decl
),
1181 VAR_DECL
, id
, char_type_node
);
1182 SET_DECL_ASSEMBLER_NAME (decl
, id
);
1183 TREE_ADDRESSABLE (decl
) = 1;
1184 TREE_READONLY (decl
) = 1;
1185 DECL_ARTIFICIAL (decl
) = 1;
1186 DECL_IGNORED_P (decl
) = 1;
1187 TREE_STATIC (decl
) = 1;
1188 TREE_PUBLIC (decl
) = 0;
1189 TREE_USED (decl
) = 1;
1190 DECL_INITIAL (decl
) = decl
;
1191 TREE_ASM_WRITTEN (decl
) = 1;
1192 TREE_ASM_WRITTEN (id
) = 1;
1193 emit_move_insn (mem
, expand_normal (build_fold_addr_expr (decl
)));
1194 shadow_base
= expand_binop (Pmode
, lshr_optab
, base
,
1195 GEN_INT (ASAN_SHADOW_SHIFT
),
1196 NULL_RTX
, 1, OPTAB_DIRECT
);
1198 = plus_constant (Pmode
, shadow_base
,
1199 asan_shadow_offset ()
1200 + (base_align_bias
>> ASAN_SHADOW_SHIFT
));
1201 gcc_assert (asan_shadow_set
!= -1
1202 && (ASAN_RED_ZONE_SIZE
>> ASAN_SHADOW_SHIFT
) == 4);
1203 shadow_mem
= gen_rtx_MEM (SImode
, shadow_base
);
1204 set_mem_alias_set (shadow_mem
, asan_shadow_set
);
1205 if (STRICT_ALIGNMENT
)
1206 set_mem_align (shadow_mem
, (GET_MODE_ALIGNMENT (SImode
)));
1207 prev_offset
= base_offset
;
1208 for (l
= length
; l
; l
-= 2)
1211 cur_shadow_byte
= ASAN_STACK_MAGIC_RIGHT
;
1212 offset
= offsets
[l
- 1];
1213 if ((offset
- base_offset
) & (ASAN_RED_ZONE_SIZE
- 1))
1217 = base_offset
+ ((offset
- base_offset
)
1218 & ~(ASAN_RED_ZONE_SIZE
- HOST_WIDE_INT_1
));
1219 shadow_mem
= adjust_address (shadow_mem
, VOIDmode
,
1220 (aoff
- prev_offset
)
1221 >> ASAN_SHADOW_SHIFT
);
1223 for (i
= 0; i
< 4; i
++, aoff
+= (1 << ASAN_SHADOW_SHIFT
))
1226 if (aoff
< offset
- (1 << ASAN_SHADOW_SHIFT
) + 1)
1227 shadow_bytes
[i
] = 0;
1229 shadow_bytes
[i
] = offset
- aoff
;
1232 shadow_bytes
[i
] = ASAN_STACK_MAGIC_PARTIAL
;
1233 emit_move_insn (shadow_mem
, asan_shadow_cst (shadow_bytes
));
1236 while (offset
<= offsets
[l
- 2] - ASAN_RED_ZONE_SIZE
)
1238 shadow_mem
= adjust_address (shadow_mem
, VOIDmode
,
1239 (offset
- prev_offset
)
1240 >> ASAN_SHADOW_SHIFT
);
1241 prev_offset
= offset
;
1242 memset (shadow_bytes
, cur_shadow_byte
, 4);
1243 emit_move_insn (shadow_mem
, asan_shadow_cst (shadow_bytes
));
1244 offset
+= ASAN_RED_ZONE_SIZE
;
1246 cur_shadow_byte
= ASAN_STACK_MAGIC_MIDDLE
;
1248 do_pending_stack_adjust ();
1250 /* Construct epilogue sequence. */
1254 if (use_after_return_class
!= -1)
1256 rtx_code_label
*lab2
= gen_label_rtx ();
1257 char c
= (char) ASAN_STACK_MAGIC_USE_AFTER_RET
;
1258 int very_likely
= REG_BR_PROB_BASE
- (REG_BR_PROB_BASE
/ 2000 - 1);
1259 emit_cmp_and_jump_insns (orig_base
, base
, EQ
, NULL_RTX
,
1260 VOIDmode
, 0, lab2
, very_likely
);
1261 shadow_mem
= gen_rtx_MEM (BLKmode
, shadow_base
);
1262 set_mem_alias_set (shadow_mem
, asan_shadow_set
);
1263 mem
= gen_rtx_MEM (ptr_mode
, base
);
1264 mem
= adjust_address (mem
, VOIDmode
, base_align_bias
);
1265 emit_move_insn (mem
, gen_int_mode (ASAN_STACK_RETIRED_MAGIC
, ptr_mode
));
1266 unsigned HOST_WIDE_INT sz
= asan_frame_size
>> ASAN_SHADOW_SHIFT
;
1267 if (use_after_return_class
< 5
1268 && can_store_by_pieces (sz
, builtin_memset_read_str
, &c
,
1269 BITS_PER_UNIT
, true))
1270 store_by_pieces (shadow_mem
, sz
, builtin_memset_read_str
, &c
,
1271 BITS_PER_UNIT
, true, 0);
1272 else if (use_after_return_class
>= 5
1273 || !set_storage_via_setmem (shadow_mem
,
1275 gen_int_mode (c
, QImode
),
1276 BITS_PER_UNIT
, BITS_PER_UNIT
,
1279 snprintf (buf
, sizeof buf
, "__asan_stack_free_%d",
1280 use_after_return_class
);
1281 ret
= init_one_libfunc (buf
);
1282 rtx addr
= convert_memory_address (ptr_mode
, base
);
1283 rtx orig_addr
= convert_memory_address (ptr_mode
, orig_base
);
1284 emit_library_call (ret
, LCT_NORMAL
, ptr_mode
, 3, addr
, ptr_mode
,
1285 GEN_INT (asan_frame_size
+ base_align_bias
),
1286 TYPE_MODE (pointer_sized_int_node
),
1287 orig_addr
, ptr_mode
);
1289 lab
= gen_label_rtx ();
1294 shadow_mem
= gen_rtx_MEM (BLKmode
, shadow_base
);
1295 set_mem_alias_set (shadow_mem
, asan_shadow_set
);
1297 if (STRICT_ALIGNMENT
)
1298 set_mem_align (shadow_mem
, (GET_MODE_ALIGNMENT (SImode
)));
1300 prev_offset
= base_offset
;
1301 last_offset
= base_offset
;
1303 for (l
= length
; l
; l
-= 2)
1305 offset
= base_offset
+ ((offsets
[l
- 1] - base_offset
)
1306 & ~(ASAN_RED_ZONE_SIZE
- HOST_WIDE_INT_1
));
1307 if (last_offset
+ last_size
!= offset
)
1309 shadow_mem
= adjust_address (shadow_mem
, VOIDmode
,
1310 (last_offset
- prev_offset
)
1311 >> ASAN_SHADOW_SHIFT
);
1312 prev_offset
= last_offset
;
1313 asan_clear_shadow (shadow_mem
, last_size
>> ASAN_SHADOW_SHIFT
);
1314 last_offset
= offset
;
1317 last_size
+= base_offset
+ ((offsets
[l
- 2] - base_offset
)
1318 & ~(ASAN_RED_ZONE_SIZE
- HOST_WIDE_INT_1
))
1323 shadow_mem
= adjust_address (shadow_mem
, VOIDmode
,
1324 (last_offset
- prev_offset
)
1325 >> ASAN_SHADOW_SHIFT
);
1326 asan_clear_shadow (shadow_mem
, last_size
>> ASAN_SHADOW_SHIFT
);
1329 do_pending_stack_adjust ();
1333 insns
= get_insns ();
1338 /* Return true if DECL, a global var, might be overridden and needs
1339 therefore a local alias. */
1342 asan_needs_local_alias (tree decl
)
1344 return DECL_WEAK (decl
) || !targetm
.binds_local_p (decl
);
1347 /* Return true if DECL is a VAR_DECL that should be protected
1348 by Address Sanitizer, by appending a red zone with protected
1349 shadow memory after it and aligning it to at least
1350 ASAN_RED_ZONE_SIZE bytes. */
1353 asan_protect_global (tree decl
)
1360 if (TREE_CODE (decl
) == STRING_CST
)
1362 /* Instrument all STRING_CSTs except those created
1363 by asan_pp_string here. */
1364 if (shadow_ptr_types
[0] != NULL_TREE
1365 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
1366 && TREE_TYPE (TREE_TYPE (decl
)) == TREE_TYPE (shadow_ptr_types
[0]))
1370 if (TREE_CODE (decl
) != VAR_DECL
1371 /* TLS vars aren't statically protectable. */
1372 || DECL_THREAD_LOCAL_P (decl
)
1373 /* Externs will be protected elsewhere. */
1374 || DECL_EXTERNAL (decl
)
1375 || !DECL_RTL_SET_P (decl
)
1376 /* Comdat vars pose an ABI problem, we can't know if
1377 the var that is selected by the linker will have
1379 || DECL_ONE_ONLY (decl
)
1380 /* Similarly for common vars. People can use -fno-common.
1381 Note: Linux kernel is built with -fno-common, so we do instrument
1382 globals there even if it is C. */
1383 || (DECL_COMMON (decl
) && TREE_PUBLIC (decl
))
1384 /* Don't protect if using user section, often vars placed
1385 into user section from multiple TUs are then assumed
1386 to be an array of such vars, putting padding in there
1387 breaks this assumption. */
1388 || (DECL_SECTION_NAME (decl
) != NULL
1389 && !symtab_node::get (decl
)->implicit_section
1390 && !section_sanitized_p (DECL_SECTION_NAME (decl
)))
1391 || DECL_SIZE (decl
) == 0
1392 || ASAN_RED_ZONE_SIZE
* BITS_PER_UNIT
> MAX_OFILE_ALIGNMENT
1393 || !valid_constant_size_p (DECL_SIZE_UNIT (decl
))
1394 || DECL_ALIGN_UNIT (decl
) > 2 * ASAN_RED_ZONE_SIZE
1395 || TREE_TYPE (decl
) == ubsan_get_source_location_type ())
1398 rtl
= DECL_RTL (decl
);
1399 if (!MEM_P (rtl
) || GET_CODE (XEXP (rtl
, 0)) != SYMBOL_REF
)
1401 symbol
= XEXP (rtl
, 0);
1403 if (CONSTANT_POOL_ADDRESS_P (symbol
)
1404 || TREE_CONSTANT_POOL_ADDRESS_P (symbol
))
1407 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl
)))
1410 #ifndef ASM_OUTPUT_DEF
1411 if (asan_needs_local_alias (decl
))
1418 /* Construct a function tree for __asan_report_{load,store}{1,2,4,8,16,_n}.
1419 IS_STORE is either 1 (for a store) or 0 (for a load). */
1422 report_error_func (bool is_store
, bool recover_p
, HOST_WIDE_INT size_in_bytes
,
1425 static enum built_in_function report
[2][2][6]
1426 = { { { BUILT_IN_ASAN_REPORT_LOAD1
, BUILT_IN_ASAN_REPORT_LOAD2
,
1427 BUILT_IN_ASAN_REPORT_LOAD4
, BUILT_IN_ASAN_REPORT_LOAD8
,
1428 BUILT_IN_ASAN_REPORT_LOAD16
, BUILT_IN_ASAN_REPORT_LOAD_N
},
1429 { BUILT_IN_ASAN_REPORT_STORE1
, BUILT_IN_ASAN_REPORT_STORE2
,
1430 BUILT_IN_ASAN_REPORT_STORE4
, BUILT_IN_ASAN_REPORT_STORE8
,
1431 BUILT_IN_ASAN_REPORT_STORE16
, BUILT_IN_ASAN_REPORT_STORE_N
} },
1432 { { BUILT_IN_ASAN_REPORT_LOAD1_NOABORT
,
1433 BUILT_IN_ASAN_REPORT_LOAD2_NOABORT
,
1434 BUILT_IN_ASAN_REPORT_LOAD4_NOABORT
,
1435 BUILT_IN_ASAN_REPORT_LOAD8_NOABORT
,
1436 BUILT_IN_ASAN_REPORT_LOAD16_NOABORT
,
1437 BUILT_IN_ASAN_REPORT_LOAD_N_NOABORT
},
1438 { BUILT_IN_ASAN_REPORT_STORE1_NOABORT
,
1439 BUILT_IN_ASAN_REPORT_STORE2_NOABORT
,
1440 BUILT_IN_ASAN_REPORT_STORE4_NOABORT
,
1441 BUILT_IN_ASAN_REPORT_STORE8_NOABORT
,
1442 BUILT_IN_ASAN_REPORT_STORE16_NOABORT
,
1443 BUILT_IN_ASAN_REPORT_STORE_N_NOABORT
} } };
1444 if (size_in_bytes
== -1)
1447 return builtin_decl_implicit (report
[recover_p
][is_store
][5]);
1450 int size_log2
= exact_log2 (size_in_bytes
);
1451 return builtin_decl_implicit (report
[recover_p
][is_store
][size_log2
]);
1454 /* Construct a function tree for __asan_{load,store}{1,2,4,8,16,_n}.
1455 IS_STORE is either 1 (for a store) or 0 (for a load). */
1458 check_func (bool is_store
, bool recover_p
, HOST_WIDE_INT size_in_bytes
,
1461 static enum built_in_function check
[2][2][6]
1462 = { { { BUILT_IN_ASAN_LOAD1
, BUILT_IN_ASAN_LOAD2
,
1463 BUILT_IN_ASAN_LOAD4
, BUILT_IN_ASAN_LOAD8
,
1464 BUILT_IN_ASAN_LOAD16
, BUILT_IN_ASAN_LOADN
},
1465 { BUILT_IN_ASAN_STORE1
, BUILT_IN_ASAN_STORE2
,
1466 BUILT_IN_ASAN_STORE4
, BUILT_IN_ASAN_STORE8
,
1467 BUILT_IN_ASAN_STORE16
, BUILT_IN_ASAN_STOREN
} },
1468 { { BUILT_IN_ASAN_LOAD1_NOABORT
,
1469 BUILT_IN_ASAN_LOAD2_NOABORT
,
1470 BUILT_IN_ASAN_LOAD4_NOABORT
,
1471 BUILT_IN_ASAN_LOAD8_NOABORT
,
1472 BUILT_IN_ASAN_LOAD16_NOABORT
,
1473 BUILT_IN_ASAN_LOADN_NOABORT
},
1474 { BUILT_IN_ASAN_STORE1_NOABORT
,
1475 BUILT_IN_ASAN_STORE2_NOABORT
,
1476 BUILT_IN_ASAN_STORE4_NOABORT
,
1477 BUILT_IN_ASAN_STORE8_NOABORT
,
1478 BUILT_IN_ASAN_STORE16_NOABORT
,
1479 BUILT_IN_ASAN_STOREN_NOABORT
} } };
1480 if (size_in_bytes
== -1)
1483 return builtin_decl_implicit (check
[recover_p
][is_store
][5]);
1486 int size_log2
= exact_log2 (size_in_bytes
);
1487 return builtin_decl_implicit (check
[recover_p
][is_store
][size_log2
]);
1490 /* Split the current basic block and create a condition statement
1491 insertion point right before or after the statement pointed to by
1492 ITER. Return an iterator to the point at which the caller might
1493 safely insert the condition statement.
1495 THEN_BLOCK must be set to the address of an uninitialized instance
1496 of basic_block. The function will then set *THEN_BLOCK to the
1497 'then block' of the condition statement to be inserted by the
1500 If CREATE_THEN_FALLTHRU_EDGE is false, no edge will be created from
1501 *THEN_BLOCK to *FALLTHROUGH_BLOCK.
1503 Similarly, the function will set *FALLTRHOUGH_BLOCK to the 'else
1504 block' of the condition statement to be inserted by the caller.
1506 Note that *FALLTHROUGH_BLOCK is a new block that contains the
1507 statements starting from *ITER, and *THEN_BLOCK is a new empty
1510 *ITER is adjusted to point to always point to the first statement
1511 of the basic block * FALLTHROUGH_BLOCK. That statement is the
1512 same as what ITER was pointing to prior to calling this function,
1513 if BEFORE_P is true; otherwise, it is its following statement. */
1515 gimple_stmt_iterator
1516 create_cond_insert_point (gimple_stmt_iterator
*iter
,
1518 bool then_more_likely_p
,
1519 bool create_then_fallthru_edge
,
1520 basic_block
*then_block
,
1521 basic_block
*fallthrough_block
)
1523 gimple_stmt_iterator gsi
= *iter
;
1525 if (!gsi_end_p (gsi
) && before_p
)
1528 basic_block cur_bb
= gsi_bb (*iter
);
1530 edge e
= split_block (cur_bb
, gsi_stmt (gsi
));
1532 /* Get a hold on the 'condition block', the 'then block' and the
1534 basic_block cond_bb
= e
->src
;
1535 basic_block fallthru_bb
= e
->dest
;
1536 basic_block then_bb
= create_empty_bb (cond_bb
);
1539 add_bb_to_loop (then_bb
, cond_bb
->loop_father
);
1540 loops_state_set (LOOPS_NEED_FIXUP
);
1543 /* Set up the newly created 'then block'. */
1544 e
= make_edge (cond_bb
, then_bb
, EDGE_TRUE_VALUE
);
1545 int fallthrough_probability
1546 = then_more_likely_p
1547 ? PROB_VERY_UNLIKELY
1548 : PROB_ALWAYS
- PROB_VERY_UNLIKELY
;
1549 e
->probability
= PROB_ALWAYS
- fallthrough_probability
;
1550 if (create_then_fallthru_edge
)
1551 make_single_succ_edge (then_bb
, fallthru_bb
, EDGE_FALLTHRU
);
1553 /* Set up the fallthrough basic block. */
1554 e
= find_edge (cond_bb
, fallthru_bb
);
1555 e
->flags
= EDGE_FALSE_VALUE
;
1556 e
->count
= cond_bb
->count
;
1557 e
->probability
= fallthrough_probability
;
1559 /* Update dominance info for the newly created then_bb; note that
1560 fallthru_bb's dominance info has already been updated by
1562 if (dom_info_available_p (CDI_DOMINATORS
))
1563 set_immediate_dominator (CDI_DOMINATORS
, then_bb
, cond_bb
);
1565 *then_block
= then_bb
;
1566 *fallthrough_block
= fallthru_bb
;
1567 *iter
= gsi_start_bb (fallthru_bb
);
1569 return gsi_last_bb (cond_bb
);
1572 /* Insert an if condition followed by a 'then block' right before the
1573 statement pointed to by ITER. The fallthrough block -- which is the
1574 else block of the condition as well as the destination of the
1575 outcoming edge of the 'then block' -- starts with the statement
1578 COND is the condition of the if.
1580 If THEN_MORE_LIKELY_P is true, the probability of the edge to the
1581 'then block' is higher than the probability of the edge to the
1584 Upon completion of the function, *THEN_BB is set to the newly
1585 inserted 'then block' and similarly, *FALLTHROUGH_BB is set to the
1588 *ITER is adjusted to still point to the same statement it was
1589 pointing to initially. */
1592 insert_if_then_before_iter (gcond
*cond
,
1593 gimple_stmt_iterator
*iter
,
1594 bool then_more_likely_p
,
1595 basic_block
*then_bb
,
1596 basic_block
*fallthrough_bb
)
1598 gimple_stmt_iterator cond_insert_point
=
1599 create_cond_insert_point (iter
,
1602 /*create_then_fallthru_edge=*/true,
1605 gsi_insert_after (&cond_insert_point
, cond
, GSI_NEW_STMT
);
1609 (base_addr >> ASAN_SHADOW_SHIFT) + asan_shadow_offset (). */
1612 build_shadow_mem_access (gimple_stmt_iterator
*gsi
, location_t location
,
1613 tree base_addr
, tree shadow_ptr_type
)
1615 tree t
, uintptr_type
= TREE_TYPE (base_addr
);
1616 tree shadow_type
= TREE_TYPE (shadow_ptr_type
);
1619 t
= build_int_cst (uintptr_type
, ASAN_SHADOW_SHIFT
);
1620 g
= gimple_build_assign (make_ssa_name (uintptr_type
), RSHIFT_EXPR
,
1622 gimple_set_location (g
, location
);
1623 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
1625 t
= build_int_cst (uintptr_type
, asan_shadow_offset ());
1626 g
= gimple_build_assign (make_ssa_name (uintptr_type
), PLUS_EXPR
,
1627 gimple_assign_lhs (g
), t
);
1628 gimple_set_location (g
, location
);
1629 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
1631 g
= gimple_build_assign (make_ssa_name (shadow_ptr_type
), NOP_EXPR
,
1632 gimple_assign_lhs (g
));
1633 gimple_set_location (g
, location
);
1634 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
1636 t
= build2 (MEM_REF
, shadow_type
, gimple_assign_lhs (g
),
1637 build_int_cst (shadow_ptr_type
, 0));
1638 g
= gimple_build_assign (make_ssa_name (shadow_type
), MEM_REF
, t
);
1639 gimple_set_location (g
, location
);
1640 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
1641 return gimple_assign_lhs (g
);
1644 /* BASE can already be an SSA_NAME; in that case, do not create a
1645 new SSA_NAME for it. */
1648 maybe_create_ssa_name (location_t loc
, tree base
, gimple_stmt_iterator
*iter
,
1651 if (TREE_CODE (base
) == SSA_NAME
)
1653 gimple g
= gimple_build_assign (make_ssa_name (TREE_TYPE (base
)),
1654 TREE_CODE (base
), base
);
1655 gimple_set_location (g
, loc
);
1657 gsi_insert_before (iter
, g
, GSI_SAME_STMT
);
1659 gsi_insert_after (iter
, g
, GSI_NEW_STMT
);
1660 return gimple_assign_lhs (g
);
1663 /* LEN can already have necessary size and precision;
1664 in that case, do not create a new variable. */
1667 maybe_cast_to_ptrmode (location_t loc
, tree len
, gimple_stmt_iterator
*iter
,
1670 if (ptrofftype_p (len
))
1672 gimple g
= gimple_build_assign (make_ssa_name (pointer_sized_int_node
),
1674 gimple_set_location (g
, loc
);
1676 gsi_insert_before (iter
, g
, GSI_SAME_STMT
);
1678 gsi_insert_after (iter
, g
, GSI_NEW_STMT
);
1679 return gimple_assign_lhs (g
);
1682 /* Instrument the memory access instruction BASE. Insert new
1683 statements before or after ITER.
1685 Note that the memory access represented by BASE can be either an
1686 SSA_NAME, or a non-SSA expression. LOCATION is the source code
1687 location. IS_STORE is TRUE for a store, FALSE for a load.
1688 BEFORE_P is TRUE for inserting the instrumentation code before
1689 ITER, FALSE for inserting it after ITER. IS_SCALAR_ACCESS is TRUE
1690 for a scalar memory access and FALSE for memory region access.
1691 NON_ZERO_P is TRUE if memory region is guaranteed to have non-zero
1692 length. ALIGN tells alignment of accessed memory object.
1694 START_INSTRUMENTED and END_INSTRUMENTED are TRUE if start/end of
1695 memory region have already been instrumented.
1697 If BEFORE_P is TRUE, *ITER is arranged to still point to the
1698 statement it was pointing to prior to calling this function,
1699 otherwise, it points to the statement logically following it. */
1702 build_check_stmt (location_t loc
, tree base
, tree len
,
1703 HOST_WIDE_INT size_in_bytes
, gimple_stmt_iterator
*iter
,
1704 bool is_non_zero_len
, bool before_p
, bool is_store
,
1705 bool is_scalar_access
, unsigned int align
= 0)
1707 gimple_stmt_iterator gsi
= *iter
;
1710 gcc_assert (!(size_in_bytes
> 0 && !is_non_zero_len
));
1714 base
= unshare_expr (base
);
1715 base
= maybe_create_ssa_name (loc
, base
, &gsi
, before_p
);
1719 len
= unshare_expr (len
);
1720 len
= maybe_cast_to_ptrmode (loc
, len
, iter
, before_p
);
1724 gcc_assert (size_in_bytes
!= -1);
1725 len
= build_int_cst (pointer_sized_int_node
, size_in_bytes
);
1728 if (size_in_bytes
> 1)
1730 if ((size_in_bytes
& (size_in_bytes
- 1)) != 0
1731 || size_in_bytes
> 16)
1732 is_scalar_access
= false;
1733 else if (align
&& align
< size_in_bytes
* BITS_PER_UNIT
)
1735 /* On non-strict alignment targets, if
1736 16-byte access is just 8-byte aligned,
1737 this will result in misaligned shadow
1738 memory 2 byte load, but otherwise can
1739 be handled using one read. */
1740 if (size_in_bytes
!= 16
1742 || align
< 8 * BITS_PER_UNIT
)
1743 is_scalar_access
= false;
1747 HOST_WIDE_INT flags
= 0;
1749 flags
|= ASAN_CHECK_STORE
;
1750 if (is_non_zero_len
)
1751 flags
|= ASAN_CHECK_NON_ZERO_LEN
;
1752 if (is_scalar_access
)
1753 flags
|= ASAN_CHECK_SCALAR_ACCESS
;
1755 g
= gimple_build_call_internal (IFN_ASAN_CHECK
, 4,
1756 build_int_cst (integer_type_node
, flags
),
1758 build_int_cst (integer_type_node
,
1759 align
/ BITS_PER_UNIT
));
1760 gimple_set_location (g
, loc
);
1762 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
1765 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
1771 /* If T represents a memory access, add instrumentation code before ITER.
1772 LOCATION is source code location.
1773 IS_STORE is either TRUE (for a store) or FALSE (for a load). */
1776 instrument_derefs (gimple_stmt_iterator
*iter
, tree t
,
1777 location_t location
, bool is_store
)
1779 if (is_store
&& !ASAN_INSTRUMENT_WRITES
)
1781 if (!is_store
&& !ASAN_INSTRUMENT_READS
)
1785 HOST_WIDE_INT size_in_bytes
;
1787 type
= TREE_TYPE (t
);
1788 switch (TREE_CODE (t
))
1802 size_in_bytes
= int_size_in_bytes (type
);
1803 if (size_in_bytes
<= 0)
1806 HOST_WIDE_INT bitsize
, bitpos
;
1809 int volatilep
= 0, unsignedp
= 0;
1810 tree inner
= get_inner_reference (t
, &bitsize
, &bitpos
, &offset
,
1811 &mode
, &unsignedp
, &volatilep
, false);
1813 if (TREE_CODE (t
) == COMPONENT_REF
1814 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t
, 1)) != NULL_TREE
)
1816 tree repr
= DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t
, 1));
1817 instrument_derefs (iter
, build3 (COMPONENT_REF
, TREE_TYPE (repr
),
1818 TREE_OPERAND (t
, 0), repr
,
1819 NULL_TREE
), location
, is_store
);
1823 if (bitpos
% BITS_PER_UNIT
1824 || bitsize
!= size_in_bytes
* BITS_PER_UNIT
)
1827 if (TREE_CODE (inner
) == VAR_DECL
1828 && offset
== NULL_TREE
1830 && DECL_SIZE (inner
)
1831 && tree_fits_shwi_p (DECL_SIZE (inner
))
1832 && bitpos
+ bitsize
<= tree_to_shwi (DECL_SIZE (inner
)))
1834 if (DECL_THREAD_LOCAL_P (inner
))
1836 if (!ASAN_GLOBALS
&& is_global_var (inner
))
1838 if (!TREE_STATIC (inner
))
1840 /* Automatic vars in the current function will be always
1842 if (decl_function_context (inner
) == current_function_decl
)
1845 /* Always instrument external vars, they might be dynamically
1847 else if (!DECL_EXTERNAL (inner
))
1849 /* For static vars if they are known not to be dynamically
1850 initialized, they will be always accessible. */
1851 varpool_node
*vnode
= varpool_node::get (inner
);
1852 if (vnode
&& !vnode
->dynamically_initialized
)
1857 base
= build_fold_addr_expr (t
);
1858 if (!has_mem_ref_been_instrumented (base
, size_in_bytes
))
1860 unsigned int align
= get_object_alignment (t
);
1861 build_check_stmt (location
, base
, NULL_TREE
, size_in_bytes
, iter
,
1862 /*is_non_zero_len*/size_in_bytes
> 0, /*before_p=*/true,
1863 is_store
, /*is_scalar_access*/true, align
);
1864 update_mem_ref_hash_table (base
, size_in_bytes
);
1865 update_mem_ref_hash_table (t
, size_in_bytes
);
1870 /* Insert a memory reference into the hash table if access length
1871 can be determined in compile time. */
1874 maybe_update_mem_ref_hash_table (tree base
, tree len
)
1876 if (!POINTER_TYPE_P (TREE_TYPE (base
))
1877 || !INTEGRAL_TYPE_P (TREE_TYPE (len
)))
1880 HOST_WIDE_INT size_in_bytes
= tree_fits_shwi_p (len
) ? tree_to_shwi (len
) : -1;
1882 if (size_in_bytes
!= -1)
1883 update_mem_ref_hash_table (base
, size_in_bytes
);
1886 /* Instrument an access to a contiguous memory region that starts at
1887 the address pointed to by BASE, over a length of LEN (expressed in
1888 the sizeof (*BASE) bytes). ITER points to the instruction before
1889 which the instrumentation instructions must be inserted. LOCATION
1890 is the source location that the instrumentation instructions must
1891 have. If IS_STORE is true, then the memory access is a store;
1892 otherwise, it's a load. */
1895 instrument_mem_region_access (tree base
, tree len
,
1896 gimple_stmt_iterator
*iter
,
1897 location_t location
, bool is_store
)
1899 if (!POINTER_TYPE_P (TREE_TYPE (base
))
1900 || !INTEGRAL_TYPE_P (TREE_TYPE (len
))
1901 || integer_zerop (len
))
1904 HOST_WIDE_INT size_in_bytes
= tree_fits_shwi_p (len
) ? tree_to_shwi (len
) : -1;
1906 if ((size_in_bytes
== -1)
1907 || !has_mem_ref_been_instrumented (base
, size_in_bytes
))
1909 build_check_stmt (location
, base
, len
, size_in_bytes
, iter
,
1910 /*is_non_zero_len*/size_in_bytes
> 0, /*before_p*/true,
1911 is_store
, /*is_scalar_access*/false, /*align*/0);
1914 maybe_update_mem_ref_hash_table (base
, len
);
1915 *iter
= gsi_for_stmt (gsi_stmt (*iter
));
1918 /* Instrument the call to a built-in memory access function that is
1919 pointed to by the iterator ITER.
1921 Upon completion, return TRUE iff *ITER has been advanced to the
1922 statement following the one it was originally pointing to. */
1925 instrument_builtin_call (gimple_stmt_iterator
*iter
)
1927 if (!ASAN_MEMINTRIN
)
1930 bool iter_advanced_p
= false;
1931 gcall
*call
= as_a
<gcall
*> (gsi_stmt (*iter
));
1933 gcc_checking_assert (gimple_call_builtin_p (call
, BUILT_IN_NORMAL
));
1935 location_t loc
= gimple_location (call
);
1937 asan_mem_ref src0
, src1
, dest
;
1938 asan_mem_ref_init (&src0
, NULL
, 1);
1939 asan_mem_ref_init (&src1
, NULL
, 1);
1940 asan_mem_ref_init (&dest
, NULL
, 1);
1942 tree src0_len
= NULL_TREE
, src1_len
= NULL_TREE
, dest_len
= NULL_TREE
;
1943 bool src0_is_store
= false, src1_is_store
= false, dest_is_store
= false,
1944 dest_is_deref
= false, intercepted_p
= true;
1946 if (get_mem_refs_of_builtin_call (call
,
1947 &src0
, &src0_len
, &src0_is_store
,
1948 &src1
, &src1_len
, &src1_is_store
,
1949 &dest
, &dest_len
, &dest_is_store
,
1950 &dest_is_deref
, &intercepted_p
))
1954 instrument_derefs (iter
, dest
.start
, loc
, dest_is_store
);
1956 iter_advanced_p
= true;
1958 else if (!intercepted_p
1959 && (src0_len
|| src1_len
|| dest_len
))
1961 if (src0
.start
!= NULL_TREE
)
1962 instrument_mem_region_access (src0
.start
, src0_len
,
1963 iter
, loc
, /*is_store=*/false);
1964 if (src1
.start
!= NULL_TREE
)
1965 instrument_mem_region_access (src1
.start
, src1_len
,
1966 iter
, loc
, /*is_store=*/false);
1967 if (dest
.start
!= NULL_TREE
)
1968 instrument_mem_region_access (dest
.start
, dest_len
,
1969 iter
, loc
, /*is_store=*/true);
1971 *iter
= gsi_for_stmt (call
);
1973 iter_advanced_p
= true;
1977 if (src0
.start
!= NULL_TREE
)
1978 maybe_update_mem_ref_hash_table (src0
.start
, src0_len
);
1979 if (src1
.start
!= NULL_TREE
)
1980 maybe_update_mem_ref_hash_table (src1
.start
, src1_len
);
1981 if (dest
.start
!= NULL_TREE
)
1982 maybe_update_mem_ref_hash_table (dest
.start
, dest_len
);
1985 return iter_advanced_p
;
1988 /* Instrument the assignment statement ITER if it is subject to
1989 instrumentation. Return TRUE iff instrumentation actually
1990 happened. In that case, the iterator ITER is advanced to the next
1991 logical expression following the one initially pointed to by ITER,
1992 and the relevant memory reference that which access has been
1993 instrumented is added to the memory references hash table. */
1996 maybe_instrument_assignment (gimple_stmt_iterator
*iter
)
1998 gimple s
= gsi_stmt (*iter
);
2000 gcc_assert (gimple_assign_single_p (s
));
2002 tree ref_expr
= NULL_TREE
;
2003 bool is_store
, is_instrumented
= false;
2005 if (gimple_store_p (s
))
2007 ref_expr
= gimple_assign_lhs (s
);
2009 instrument_derefs (iter
, ref_expr
,
2010 gimple_location (s
),
2012 is_instrumented
= true;
2015 if (gimple_assign_load_p (s
))
2017 ref_expr
= gimple_assign_rhs1 (s
);
2019 instrument_derefs (iter
, ref_expr
,
2020 gimple_location (s
),
2022 is_instrumented
= true;
2025 if (is_instrumented
)
2028 return is_instrumented
;
2031 /* Instrument the function call pointed to by the iterator ITER, if it
2032 is subject to instrumentation. At the moment, the only function
2033 calls that are instrumented are some built-in functions that access
2034 memory. Look at instrument_builtin_call to learn more.
2036 Upon completion return TRUE iff *ITER was advanced to the statement
2037 following the one it was originally pointing to. */
2040 maybe_instrument_call (gimple_stmt_iterator
*iter
)
2042 gimple stmt
= gsi_stmt (*iter
);
2043 bool is_builtin
= gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
);
2045 if (is_builtin
&& instrument_builtin_call (iter
))
2048 if (gimple_call_noreturn_p (stmt
))
2052 tree callee
= gimple_call_fndecl (stmt
);
2053 switch (DECL_FUNCTION_CODE (callee
))
2055 case BUILT_IN_UNREACHABLE
:
2057 /* Don't instrument these. */
2063 tree decl
= builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN
);
2064 gimple g
= gimple_build_call (decl
, 0);
2065 gimple_set_location (g
, gimple_location (stmt
));
2066 gsi_insert_before (iter
, g
, GSI_SAME_STMT
);
2071 /* Walk each instruction of all basic block and instrument those that
2072 represent memory references: loads, stores, or function calls.
2073 In a given basic block, this function avoids instrumenting memory
2074 references that have already been instrumented. */
2077 transform_statements (void)
2079 basic_block bb
, last_bb
= NULL
;
2080 gimple_stmt_iterator i
;
2081 int saved_last_basic_block
= last_basic_block_for_fn (cfun
);
2083 FOR_EACH_BB_FN (bb
, cfun
)
2085 basic_block prev_bb
= bb
;
2087 if (bb
->index
>= saved_last_basic_block
) continue;
2089 /* Flush the mem ref hash table, if current bb doesn't have
2090 exactly one predecessor, or if that predecessor (skipping
2091 over asan created basic blocks) isn't the last processed
2092 basic block. Thus we effectively flush on extended basic
2093 block boundaries. */
2094 while (single_pred_p (prev_bb
))
2096 prev_bb
= single_pred (prev_bb
);
2097 if (prev_bb
->index
< saved_last_basic_block
)
2100 if (prev_bb
!= last_bb
)
2101 empty_mem_ref_hash_table ();
2104 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
);)
2106 gimple s
= gsi_stmt (i
);
2108 if (has_stmt_been_instrumented_p (s
))
2110 else if (gimple_assign_single_p (s
)
2111 && !gimple_clobber_p (s
)
2112 && maybe_instrument_assignment (&i
))
2113 /* Nothing to do as maybe_instrument_assignment advanced
2115 else if (is_gimple_call (s
) && maybe_instrument_call (&i
))
2116 /* Nothing to do as maybe_instrument_call
2117 advanced the iterator I. */;
2120 /* No instrumentation happened.
2122 If the current instruction is a function call that
2123 might free something, let's forget about the memory
2124 references that got instrumented. Otherwise we might
2125 miss some instrumentation opportunities. */
2126 if (is_gimple_call (s
) && !nonfreeing_call_p (s
))
2127 empty_mem_ref_hash_table ();
2133 free_mem_ref_resources ();
2137 __asan_before_dynamic_init (module_name)
2139 __asan_after_dynamic_init ()
2143 asan_dynamic_init_call (bool after_p
)
2145 tree fn
= builtin_decl_implicit (after_p
2146 ? BUILT_IN_ASAN_AFTER_DYNAMIC_INIT
2147 : BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT
);
2148 tree module_name_cst
= NULL_TREE
;
2151 pretty_printer module_name_pp
;
2152 pp_string (&module_name_pp
, main_input_filename
);
2154 if (shadow_ptr_types
[0] == NULL_TREE
)
2155 asan_init_shadow_ptr_types ();
2156 module_name_cst
= asan_pp_string (&module_name_pp
);
2157 module_name_cst
= fold_convert (const_ptr_type_node
,
2161 return build_call_expr (fn
, after_p
? 0 : 1, module_name_cst
);
2165 struct __asan_global
2169 uptr __size_with_redzone;
2171 const void *__module_name;
2172 uptr __has_dynamic_init;
2173 __asan_global_source_location *__location;
2177 asan_global_struct (void)
2179 static const char *field_names
[7]
2180 = { "__beg", "__size", "__size_with_redzone",
2181 "__name", "__module_name", "__has_dynamic_init", "__location"};
2182 tree fields
[7], ret
;
2185 ret
= make_node (RECORD_TYPE
);
2186 for (i
= 0; i
< 7; i
++)
2189 = build_decl (UNKNOWN_LOCATION
, FIELD_DECL
,
2190 get_identifier (field_names
[i
]),
2191 (i
== 0 || i
== 3) ? const_ptr_type_node
2192 : pointer_sized_int_node
);
2193 DECL_CONTEXT (fields
[i
]) = ret
;
2195 DECL_CHAIN (fields
[i
- 1]) = fields
[i
];
2197 tree type_decl
= build_decl (input_location
, TYPE_DECL
,
2198 get_identifier ("__asan_global"), ret
);
2199 DECL_IGNORED_P (type_decl
) = 1;
2200 DECL_ARTIFICIAL (type_decl
) = 1;
2201 TYPE_FIELDS (ret
) = fields
[0];
2202 TYPE_NAME (ret
) = type_decl
;
2203 TYPE_STUB_DECL (ret
) = type_decl
;
2208 /* Append description of a single global DECL into vector V.
2209 TYPE is __asan_global struct type as returned by asan_global_struct. */
2212 asan_add_global (tree decl
, tree type
, vec
<constructor_elt
, va_gc
> *v
)
2214 tree init
, uptr
= TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type
)));
2215 unsigned HOST_WIDE_INT size
;
2216 tree str_cst
, module_name_cst
, refdecl
= decl
;
2217 vec
<constructor_elt
, va_gc
> *vinner
= NULL
;
2219 pretty_printer asan_pp
, module_name_pp
;
2221 if (DECL_NAME (decl
))
2222 pp_tree_identifier (&asan_pp
, DECL_NAME (decl
));
2224 pp_string (&asan_pp
, "<unknown>");
2225 str_cst
= asan_pp_string (&asan_pp
);
2227 pp_string (&module_name_pp
, main_input_filename
);
2228 module_name_cst
= asan_pp_string (&module_name_pp
);
2230 if (asan_needs_local_alias (decl
))
2233 ASM_GENERATE_INTERNAL_LABEL (buf
, "LASAN", vec_safe_length (v
) + 1);
2234 refdecl
= build_decl (DECL_SOURCE_LOCATION (decl
),
2235 VAR_DECL
, get_identifier (buf
), TREE_TYPE (decl
));
2236 TREE_ADDRESSABLE (refdecl
) = TREE_ADDRESSABLE (decl
);
2237 TREE_READONLY (refdecl
) = TREE_READONLY (decl
);
2238 TREE_THIS_VOLATILE (refdecl
) = TREE_THIS_VOLATILE (decl
);
2239 DECL_GIMPLE_REG_P (refdecl
) = DECL_GIMPLE_REG_P (decl
);
2240 DECL_ARTIFICIAL (refdecl
) = DECL_ARTIFICIAL (decl
);
2241 DECL_IGNORED_P (refdecl
) = DECL_IGNORED_P (decl
);
2242 TREE_STATIC (refdecl
) = 1;
2243 TREE_PUBLIC (refdecl
) = 0;
2244 TREE_USED (refdecl
) = 1;
2245 assemble_alias (refdecl
, DECL_ASSEMBLER_NAME (decl
));
2248 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
,
2249 fold_convert (const_ptr_type_node
,
2250 build_fold_addr_expr (refdecl
)));
2251 size
= tree_to_uhwi (DECL_SIZE_UNIT (decl
));
2252 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
, build_int_cst (uptr
, size
));
2253 size
+= asan_red_zone_size (size
);
2254 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
, build_int_cst (uptr
, size
));
2255 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
,
2256 fold_convert (const_ptr_type_node
, str_cst
));
2257 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
,
2258 fold_convert (const_ptr_type_node
, module_name_cst
));
2259 varpool_node
*vnode
= varpool_node::get (decl
);
2260 int has_dynamic_init
= vnode
? vnode
->dynamically_initialized
: 0;
2261 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
,
2262 build_int_cst (uptr
, has_dynamic_init
));
2263 tree locptr
= NULL_TREE
;
2264 location_t loc
= DECL_SOURCE_LOCATION (decl
);
2265 expanded_location xloc
= expand_location (loc
);
2266 if (xloc
.file
!= NULL
)
2268 static int lasanloccnt
= 0;
2270 ASM_GENERATE_INTERNAL_LABEL (buf
, "LASANLOC", ++lasanloccnt
);
2271 tree var
= build_decl (UNKNOWN_LOCATION
, VAR_DECL
, get_identifier (buf
),
2272 ubsan_get_source_location_type ());
2273 TREE_STATIC (var
) = 1;
2274 TREE_PUBLIC (var
) = 0;
2275 DECL_ARTIFICIAL (var
) = 1;
2276 DECL_IGNORED_P (var
) = 1;
2277 pretty_printer filename_pp
;
2278 pp_string (&filename_pp
, xloc
.file
);
2279 tree str
= asan_pp_string (&filename_pp
);
2280 tree ctor
= build_constructor_va (TREE_TYPE (var
), 3,
2281 NULL_TREE
, str
, NULL_TREE
,
2282 build_int_cst (unsigned_type_node
,
2283 xloc
.line
), NULL_TREE
,
2284 build_int_cst (unsigned_type_node
,
2286 TREE_CONSTANT (ctor
) = 1;
2287 TREE_STATIC (ctor
) = 1;
2288 DECL_INITIAL (var
) = ctor
;
2289 varpool_node::finalize_decl (var
);
2290 locptr
= fold_convert (uptr
, build_fold_addr_expr (var
));
2293 locptr
= build_int_cst (uptr
, 0);
2294 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
, locptr
);
2295 init
= build_constructor (type
, vinner
);
2296 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, init
);
2299 /* Initialize sanitizer.def builtins if the FE hasn't initialized them. */
2301 initialize_sanitizer_builtins (void)
2305 if (builtin_decl_implicit_p (BUILT_IN_ASAN_INIT
))
2308 tree BT_FN_VOID
= build_function_type_list (void_type_node
, NULL_TREE
);
2310 = build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
2311 tree BT_FN_VOID_CONST_PTR
2312 = build_function_type_list (void_type_node
, const_ptr_type_node
, NULL_TREE
);
2313 tree BT_FN_VOID_PTR_PTR
2314 = build_function_type_list (void_type_node
, ptr_type_node
,
2315 ptr_type_node
, NULL_TREE
);
2316 tree BT_FN_VOID_PTR_PTR_PTR
2317 = build_function_type_list (void_type_node
, ptr_type_node
,
2318 ptr_type_node
, ptr_type_node
, NULL_TREE
);
2319 tree BT_FN_VOID_PTR_PTRMODE
2320 = build_function_type_list (void_type_node
, ptr_type_node
,
2321 pointer_sized_int_node
, NULL_TREE
);
2323 = build_function_type_list (void_type_node
, integer_type_node
, NULL_TREE
);
2324 tree BT_FN_SIZE_CONST_PTR_INT
2325 = build_function_type_list (size_type_node
, const_ptr_type_node
,
2326 integer_type_node
, NULL_TREE
);
2327 tree BT_FN_BOOL_VPTR_PTR_IX_INT_INT
[5];
2328 tree BT_FN_IX_CONST_VPTR_INT
[5];
2329 tree BT_FN_IX_VPTR_IX_INT
[5];
2330 tree BT_FN_VOID_VPTR_IX_INT
[5];
2332 = build_pointer_type (build_qualified_type (void_type_node
,
2333 TYPE_QUAL_VOLATILE
));
2335 = build_pointer_type (build_qualified_type (void_type_node
,
2339 = lang_hooks
.types
.type_for_size (BOOL_TYPE_SIZE
, 1);
2341 for (i
= 0; i
< 5; i
++)
2343 tree ix
= build_nonstandard_integer_type (BITS_PER_UNIT
* (1 << i
), 1);
2344 BT_FN_BOOL_VPTR_PTR_IX_INT_INT
[i
]
2345 = build_function_type_list (boolt
, vptr
, ptr_type_node
, ix
,
2346 integer_type_node
, integer_type_node
,
2348 BT_FN_IX_CONST_VPTR_INT
[i
]
2349 = build_function_type_list (ix
, cvptr
, integer_type_node
, NULL_TREE
);
2350 BT_FN_IX_VPTR_IX_INT
[i
]
2351 = build_function_type_list (ix
, vptr
, ix
, integer_type_node
,
2353 BT_FN_VOID_VPTR_IX_INT
[i
]
2354 = build_function_type_list (void_type_node
, vptr
, ix
,
2355 integer_type_node
, NULL_TREE
);
2357 #define BT_FN_BOOL_VPTR_PTR_I1_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[0]
2358 #define BT_FN_I1_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[0]
2359 #define BT_FN_I1_VPTR_I1_INT BT_FN_IX_VPTR_IX_INT[0]
2360 #define BT_FN_VOID_VPTR_I1_INT BT_FN_VOID_VPTR_IX_INT[0]
2361 #define BT_FN_BOOL_VPTR_PTR_I2_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[1]
2362 #define BT_FN_I2_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[1]
2363 #define BT_FN_I2_VPTR_I2_INT BT_FN_IX_VPTR_IX_INT[1]
2364 #define BT_FN_VOID_VPTR_I2_INT BT_FN_VOID_VPTR_IX_INT[1]
2365 #define BT_FN_BOOL_VPTR_PTR_I4_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[2]
2366 #define BT_FN_I4_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[2]
2367 #define BT_FN_I4_VPTR_I4_INT BT_FN_IX_VPTR_IX_INT[2]
2368 #define BT_FN_VOID_VPTR_I4_INT BT_FN_VOID_VPTR_IX_INT[2]
2369 #define BT_FN_BOOL_VPTR_PTR_I8_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[3]
2370 #define BT_FN_I8_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[3]
2371 #define BT_FN_I8_VPTR_I8_INT BT_FN_IX_VPTR_IX_INT[3]
2372 #define BT_FN_VOID_VPTR_I8_INT BT_FN_VOID_VPTR_IX_INT[3]
2373 #define BT_FN_BOOL_VPTR_PTR_I16_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[4]
2374 #define BT_FN_I16_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[4]
2375 #define BT_FN_I16_VPTR_I16_INT BT_FN_IX_VPTR_IX_INT[4]
2376 #define BT_FN_VOID_VPTR_I16_INT BT_FN_VOID_VPTR_IX_INT[4]
2377 #undef ATTR_NOTHROW_LEAF_LIST
2378 #define ATTR_NOTHROW_LEAF_LIST ECF_NOTHROW | ECF_LEAF
2379 #undef ATTR_TMPURE_NOTHROW_LEAF_LIST
2380 #define ATTR_TMPURE_NOTHROW_LEAF_LIST ECF_TM_PURE | ATTR_NOTHROW_LEAF_LIST
2381 #undef ATTR_NORETURN_NOTHROW_LEAF_LIST
2382 #define ATTR_NORETURN_NOTHROW_LEAF_LIST ECF_NORETURN | ATTR_NOTHROW_LEAF_LIST
2383 #undef ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
2384 #define ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST \
2385 ECF_CONST | ATTR_NORETURN_NOTHROW_LEAF_LIST
2386 #undef ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST
2387 #define ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST \
2388 ECF_TM_PURE | ATTR_NORETURN_NOTHROW_LEAF_LIST
2389 #undef ATTR_COLD_NOTHROW_LEAF_LIST
2390 #define ATTR_COLD_NOTHROW_LEAF_LIST \
2391 /* ECF_COLD missing */ ATTR_NOTHROW_LEAF_LIST
2392 #undef ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST
2393 #define ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST \
2394 /* ECF_COLD missing */ ATTR_NORETURN_NOTHROW_LEAF_LIST
2395 #undef ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST
2396 #define ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST \
2397 /* ECF_COLD missing */ ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
2398 #undef ATTR_PURE_NOTHROW_LEAF_LIST
2399 #define ATTR_PURE_NOTHROW_LEAF_LIST ECF_PURE | ATTR_NOTHROW_LEAF_LIST
2400 #undef DEF_SANITIZER_BUILTIN
2401 #define DEF_SANITIZER_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
2402 decl = add_builtin_function ("__builtin_" NAME, TYPE, ENUM, \
2403 BUILT_IN_NORMAL, NAME, NULL_TREE); \
2404 set_call_expr_flags (decl, ATTRS); \
2405 set_builtin_decl (ENUM, decl, true);
2407 #include "sanitizer.def"
2409 /* -fsanitize=object-size uses __builtin_object_size, but that might
2410 not be available for e.g. Fortran at this point. We use
2411 DEF_SANITIZER_BUILTIN here only as a convenience macro. */
2412 if ((flag_sanitize
& SANITIZE_OBJECT_SIZE
)
2413 && !builtin_decl_implicit_p (BUILT_IN_OBJECT_SIZE
))
2414 DEF_SANITIZER_BUILTIN (BUILT_IN_OBJECT_SIZE
, "object_size",
2415 BT_FN_SIZE_CONST_PTR_INT
,
2416 ATTR_PURE_NOTHROW_LEAF_LIST
)
2418 #undef DEF_SANITIZER_BUILTIN
2421 /* Called via htab_traverse. Count number of emitted
2422 STRING_CSTs in the constant hash table. */
2425 count_string_csts (constant_descriptor_tree
**slot
,
2426 unsigned HOST_WIDE_INT
*data
)
2428 struct constant_descriptor_tree
*desc
= *slot
;
2429 if (TREE_CODE (desc
->value
) == STRING_CST
2430 && TREE_ASM_WRITTEN (desc
->value
)
2431 && asan_protect_global (desc
->value
))
2436 /* Helper structure to pass two parameters to
2439 struct asan_add_string_csts_data
2442 vec
<constructor_elt
, va_gc
> *v
;
2445 /* Called via hash_table::traverse. Call asan_add_global
2446 on emitted STRING_CSTs from the constant hash table. */
2449 add_string_csts (constant_descriptor_tree
**slot
,
2450 asan_add_string_csts_data
*aascd
)
2452 struct constant_descriptor_tree
*desc
= *slot
;
2453 if (TREE_CODE (desc
->value
) == STRING_CST
2454 && TREE_ASM_WRITTEN (desc
->value
)
2455 && asan_protect_global (desc
->value
))
2457 asan_add_global (SYMBOL_REF_DECL (XEXP (desc
->rtl
, 0)),
2458 aascd
->type
, aascd
->v
);
2463 /* Needs to be GTY(()), because cgraph_build_static_cdtor may
2464 invoke ggc_collect. */
2465 static GTY(()) tree asan_ctor_statements
;
2467 /* Module-level instrumentation.
2468 - Insert __asan_init_vN() into the list of CTORs.
2469 - TODO: insert redzones around globals.
2473 asan_finish_file (void)
2475 varpool_node
*vnode
;
2476 unsigned HOST_WIDE_INT gcount
= 0;
2478 if (shadow_ptr_types
[0] == NULL_TREE
)
2479 asan_init_shadow_ptr_types ();
2480 /* Avoid instrumenting code in the asan ctors/dtors.
2481 We don't need to insert padding after the description strings,
2482 nor after .LASAN* array. */
2483 flag_sanitize
&= ~SANITIZE_ADDRESS
;
2485 /* For user-space we want asan constructors to run first.
2486 Linux kernel does not support priorities other than default, and the only
2487 other user of constructors is coverage. So we run with the default
2489 int priority
= flag_sanitize
& SANITIZE_USER_ADDRESS
2490 ? MAX_RESERVED_INIT_PRIORITY
- 1 : DEFAULT_INIT_PRIORITY
;
2492 if (flag_sanitize
& SANITIZE_USER_ADDRESS
)
2494 tree fn
= builtin_decl_implicit (BUILT_IN_ASAN_INIT
);
2495 append_to_statement_list (build_call_expr (fn
, 0), &asan_ctor_statements
);
2497 FOR_EACH_DEFINED_VARIABLE (vnode
)
2498 if (TREE_ASM_WRITTEN (vnode
->decl
)
2499 && asan_protect_global (vnode
->decl
))
2501 hash_table
<tree_descriptor_hasher
> *const_desc_htab
= constant_pool_htab ();
2502 const_desc_htab
->traverse
<unsigned HOST_WIDE_INT
*, count_string_csts
>
2506 tree type
= asan_global_struct (), var
, ctor
;
2507 tree dtor_statements
= NULL_TREE
;
2508 vec
<constructor_elt
, va_gc
> *v
;
2511 type
= build_array_type_nelts (type
, gcount
);
2512 ASM_GENERATE_INTERNAL_LABEL (buf
, "LASAN", 0);
2513 var
= build_decl (UNKNOWN_LOCATION
, VAR_DECL
, get_identifier (buf
),
2515 TREE_STATIC (var
) = 1;
2516 TREE_PUBLIC (var
) = 0;
2517 DECL_ARTIFICIAL (var
) = 1;
2518 DECL_IGNORED_P (var
) = 1;
2519 vec_alloc (v
, gcount
);
2520 FOR_EACH_DEFINED_VARIABLE (vnode
)
2521 if (TREE_ASM_WRITTEN (vnode
->decl
)
2522 && asan_protect_global (vnode
->decl
))
2523 asan_add_global (vnode
->decl
, TREE_TYPE (type
), v
);
2524 struct asan_add_string_csts_data aascd
;
2525 aascd
.type
= TREE_TYPE (type
);
2527 const_desc_htab
->traverse
<asan_add_string_csts_data
*, add_string_csts
>
2529 ctor
= build_constructor (type
, v
);
2530 TREE_CONSTANT (ctor
) = 1;
2531 TREE_STATIC (ctor
) = 1;
2532 DECL_INITIAL (var
) = ctor
;
2533 varpool_node::finalize_decl (var
);
2535 tree fn
= builtin_decl_implicit (BUILT_IN_ASAN_REGISTER_GLOBALS
);
2536 tree gcount_tree
= build_int_cst (pointer_sized_int_node
, gcount
);
2537 append_to_statement_list (build_call_expr (fn
, 2,
2538 build_fold_addr_expr (var
),
2540 &asan_ctor_statements
);
2542 fn
= builtin_decl_implicit (BUILT_IN_ASAN_UNREGISTER_GLOBALS
);
2543 append_to_statement_list (build_call_expr (fn
, 2,
2544 build_fold_addr_expr (var
),
2547 cgraph_build_static_cdtor ('D', dtor_statements
, priority
);
2549 if (asan_ctor_statements
)
2550 cgraph_build_static_cdtor ('I', asan_ctor_statements
, priority
);
2551 flag_sanitize
|= SANITIZE_ADDRESS
;
2554 /* Expand the ASAN_{LOAD,STORE} builtins. */
2557 asan_expand_check_ifn (gimple_stmt_iterator
*iter
, bool use_calls
)
2559 gimple g
= gsi_stmt (*iter
);
2560 location_t loc
= gimple_location (g
);
2563 = (flag_sanitize
& flag_sanitize_recover
& SANITIZE_KERNEL_ADDRESS
) != 0;
2565 HOST_WIDE_INT flags
= tree_to_shwi (gimple_call_arg (g
, 0));
2566 gcc_assert (flags
< ASAN_CHECK_LAST
);
2567 bool is_scalar_access
= (flags
& ASAN_CHECK_SCALAR_ACCESS
) != 0;
2568 bool is_store
= (flags
& ASAN_CHECK_STORE
) != 0;
2569 bool is_non_zero_len
= (flags
& ASAN_CHECK_NON_ZERO_LEN
) != 0;
2571 tree base
= gimple_call_arg (g
, 1);
2572 tree len
= gimple_call_arg (g
, 2);
2573 HOST_WIDE_INT align
= tree_to_shwi (gimple_call_arg (g
, 3));
2575 HOST_WIDE_INT size_in_bytes
2576 = is_scalar_access
&& tree_fits_shwi_p (len
) ? tree_to_shwi (len
) : -1;
2580 /* Instrument using callbacks. */
2581 gimple g
= gimple_build_assign (make_ssa_name (pointer_sized_int_node
),
2583 gimple_set_location (g
, loc
);
2584 gsi_insert_before (iter
, g
, GSI_SAME_STMT
);
2585 tree base_addr
= gimple_assign_lhs (g
);
2588 tree fun
= check_func (is_store
, recover_p
, size_in_bytes
, &nargs
);
2590 g
= gimple_build_call (fun
, 1, base_addr
);
2593 gcc_assert (nargs
== 2);
2594 g
= gimple_build_assign (make_ssa_name (pointer_sized_int_node
),
2596 gimple_set_location (g
, loc
);
2597 gsi_insert_before (iter
, g
, GSI_SAME_STMT
);
2598 tree sz_arg
= gimple_assign_lhs (g
);
2599 g
= gimple_build_call (fun
, nargs
, base_addr
, sz_arg
);
2601 gimple_set_location (g
, loc
);
2602 gsi_replace (iter
, g
, false);
2606 HOST_WIDE_INT real_size_in_bytes
= size_in_bytes
== -1 ? 1 : size_in_bytes
;
2608 tree shadow_ptr_type
= shadow_ptr_types
[real_size_in_bytes
== 16 ? 1 : 0];
2609 tree shadow_type
= TREE_TYPE (shadow_ptr_type
);
2611 gimple_stmt_iterator gsi
= *iter
;
2613 if (!is_non_zero_len
)
2615 /* So, the length of the memory area to asan-protect is
2616 non-constant. Let's guard the generated instrumentation code
2621 //asan instrumentation code goes here.
2623 // falltrough instructions, starting with *ITER. */
2625 g
= gimple_build_cond (NE_EXPR
,
2627 build_int_cst (TREE_TYPE (len
), 0),
2628 NULL_TREE
, NULL_TREE
);
2629 gimple_set_location (g
, loc
);
2631 basic_block then_bb
, fallthrough_bb
;
2632 insert_if_then_before_iter (as_a
<gcond
*> (g
), iter
,
2633 /*then_more_likely_p=*/true,
2634 &then_bb
, &fallthrough_bb
);
2635 /* Note that fallthrough_bb starts with the statement that was
2636 pointed to by ITER. */
2638 /* The 'then block' of the 'if (len != 0) condition is where
2639 we'll generate the asan instrumentation code now. */
2640 gsi
= gsi_last_bb (then_bb
);
2643 /* Get an iterator on the point where we can add the condition
2644 statement for the instrumentation. */
2645 basic_block then_bb
, else_bb
;
2646 gsi
= create_cond_insert_point (&gsi
, /*before_p*/false,
2647 /*then_more_likely_p=*/false,
2648 /*create_then_fallthru_edge*/recover_p
,
2652 g
= gimple_build_assign (make_ssa_name (pointer_sized_int_node
),
2654 gimple_set_location (g
, loc
);
2655 gsi_insert_before (&gsi
, g
, GSI_NEW_STMT
);
2656 tree base_addr
= gimple_assign_lhs (g
);
2659 if (real_size_in_bytes
>= 8)
2661 tree shadow
= build_shadow_mem_access (&gsi
, loc
, base_addr
,
2667 /* Slow path for 1, 2 and 4 byte accesses. */
2668 /* Test (shadow != 0)
2669 & ((base_addr & 7) + (real_size_in_bytes - 1)) >= shadow). */
2670 tree shadow
= build_shadow_mem_access (&gsi
, loc
, base_addr
,
2672 gimple shadow_test
= build_assign (NE_EXPR
, shadow
, 0);
2673 gimple_seq seq
= NULL
;
2674 gimple_seq_add_stmt (&seq
, shadow_test
);
2675 /* Aligned (>= 8 bytes) can test just
2676 (real_size_in_bytes - 1 >= shadow), as base_addr & 7 is known
2680 gimple_seq_add_stmt (&seq
, build_assign (BIT_AND_EXPR
,
2682 gimple_seq_add_stmt (&seq
,
2683 build_type_cast (shadow_type
,
2684 gimple_seq_last (seq
)));
2685 if (real_size_in_bytes
> 1)
2686 gimple_seq_add_stmt (&seq
,
2687 build_assign (PLUS_EXPR
,
2688 gimple_seq_last (seq
),
2689 real_size_in_bytes
- 1));
2690 t
= gimple_assign_lhs (gimple_seq_last_stmt (seq
));
2693 t
= build_int_cst (shadow_type
, real_size_in_bytes
- 1);
2694 gimple_seq_add_stmt (&seq
, build_assign (GE_EXPR
, t
, shadow
));
2695 gimple_seq_add_stmt (&seq
, build_assign (BIT_AND_EXPR
, shadow_test
,
2696 gimple_seq_last (seq
)));
2697 t
= gimple_assign_lhs (gimple_seq_last (seq
));
2698 gimple_seq_set_location (seq
, loc
);
2699 gsi_insert_seq_after (&gsi
, seq
, GSI_CONTINUE_LINKING
);
2701 /* For non-constant, misaligned or otherwise weird access sizes,
2702 check first and last byte. */
2703 if (size_in_bytes
== -1)
2705 g
= gimple_build_assign (make_ssa_name (pointer_sized_int_node
),
2707 build_int_cst (pointer_sized_int_node
, 1));
2708 gimple_set_location (g
, loc
);
2709 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
2710 tree last
= gimple_assign_lhs (g
);
2711 g
= gimple_build_assign (make_ssa_name (pointer_sized_int_node
),
2712 PLUS_EXPR
, base_addr
, last
);
2713 gimple_set_location (g
, loc
);
2714 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
2715 tree base_end_addr
= gimple_assign_lhs (g
);
2717 tree shadow
= build_shadow_mem_access (&gsi
, loc
, base_end_addr
,
2719 gimple shadow_test
= build_assign (NE_EXPR
, shadow
, 0);
2720 gimple_seq seq
= NULL
;
2721 gimple_seq_add_stmt (&seq
, shadow_test
);
2722 gimple_seq_add_stmt (&seq
, build_assign (BIT_AND_EXPR
,
2724 gimple_seq_add_stmt (&seq
, build_type_cast (shadow_type
,
2725 gimple_seq_last (seq
)));
2726 gimple_seq_add_stmt (&seq
, build_assign (GE_EXPR
,
2727 gimple_seq_last (seq
),
2729 gimple_seq_add_stmt (&seq
, build_assign (BIT_AND_EXPR
, shadow_test
,
2730 gimple_seq_last (seq
)));
2731 gimple_seq_add_stmt (&seq
, build_assign (BIT_IOR_EXPR
, t
,
2732 gimple_seq_last (seq
)));
2733 t
= gimple_assign_lhs (gimple_seq_last (seq
));
2734 gimple_seq_set_location (seq
, loc
);
2735 gsi_insert_seq_after (&gsi
, seq
, GSI_CONTINUE_LINKING
);
2739 g
= gimple_build_cond (NE_EXPR
, t
, build_int_cst (TREE_TYPE (t
), 0),
2740 NULL_TREE
, NULL_TREE
);
2741 gimple_set_location (g
, loc
);
2742 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
2744 /* Generate call to the run-time library (e.g. __asan_report_load8). */
2745 gsi
= gsi_start_bb (then_bb
);
2747 tree fun
= report_error_func (is_store
, recover_p
, size_in_bytes
, &nargs
);
2748 g
= gimple_build_call (fun
, nargs
, base_addr
, len
);
2749 gimple_set_location (g
, loc
);
2750 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
2752 gsi_remove (iter
, true);
2753 *iter
= gsi_start_bb (else_bb
);
2758 /* Instrument the current function. */
2761 asan_instrument (void)
2763 if (shadow_ptr_types
[0] == NULL_TREE
)
2764 asan_init_shadow_ptr_types ();
2765 transform_statements ();
2772 return (flag_sanitize
& SANITIZE_ADDRESS
) != 0
2773 && !lookup_attribute ("no_sanitize_address",
2774 DECL_ATTRIBUTES (current_function_decl
));
2779 const pass_data pass_data_asan
=
2781 GIMPLE_PASS
, /* type */
2783 OPTGROUP_NONE
, /* optinfo_flags */
2784 TV_NONE
, /* tv_id */
2785 ( PROP_ssa
| PROP_cfg
| PROP_gimple_leh
), /* properties_required */
2786 0, /* properties_provided */
2787 0, /* properties_destroyed */
2788 0, /* todo_flags_start */
2789 TODO_update_ssa
, /* todo_flags_finish */
2792 class pass_asan
: public gimple_opt_pass
2795 pass_asan (gcc::context
*ctxt
)
2796 : gimple_opt_pass (pass_data_asan
, ctxt
)
2799 /* opt_pass methods: */
2800 opt_pass
* clone () { return new pass_asan (m_ctxt
); }
2801 virtual bool gate (function
*) { return gate_asan (); }
2802 virtual unsigned int execute (function
*) { return asan_instrument (); }
2804 }; // class pass_asan
2809 make_pass_asan (gcc::context
*ctxt
)
2811 return new pass_asan (ctxt
);
2816 const pass_data pass_data_asan_O0
=
2818 GIMPLE_PASS
, /* type */
2820 OPTGROUP_NONE
, /* optinfo_flags */
2821 TV_NONE
, /* tv_id */
2822 ( PROP_ssa
| PROP_cfg
| PROP_gimple_leh
), /* properties_required */
2823 0, /* properties_provided */
2824 0, /* properties_destroyed */
2825 0, /* todo_flags_start */
2826 TODO_update_ssa
, /* todo_flags_finish */
2829 class pass_asan_O0
: public gimple_opt_pass
2832 pass_asan_O0 (gcc::context
*ctxt
)
2833 : gimple_opt_pass (pass_data_asan_O0
, ctxt
)
2836 /* opt_pass methods: */
2837 virtual bool gate (function
*) { return !optimize
&& gate_asan (); }
2838 virtual unsigned int execute (function
*) { return asan_instrument (); }
2840 }; // class pass_asan_O0
2845 make_pass_asan_O0 (gcc::context
*ctxt
)
2847 return new pass_asan_O0 (ctxt
);
2850 #include "gt-asan.h"