1 /* AddressSanitizer, a fast memory error detector.
2 Copyright (C) 2012-2017 Free Software Foundation, Inc.
3 Contributed by Kostya Serebryany <kcc@google.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
31 #include "alloc-pool.h"
32 #include "tree-pass.h"
36 #include "stringpool.h"
37 #include "tree-ssanames.h"
41 #include "gimple-pretty-print.h"
43 #include "fold-const.h"
46 #include "gimple-iterator.h"
48 #include "stor-layout.h"
49 #include "tree-iterator.h"
55 #include "langhooks.h"
57 #include "gimple-builder.h"
62 #include "tree-inline.h"
64 /* AddressSanitizer finds out-of-bounds and use-after-free bugs
65 with <2x slowdown on average.
67 The tool consists of two parts:
68 instrumentation module (this file) and a run-time library.
69 The instrumentation module adds a run-time check before every memory insn.
70 For a 8- or 16- byte load accessing address X:
71 ShadowAddr = (X >> 3) + Offset
72 ShadowValue = *(char*)ShadowAddr; // *(short*) for 16-byte access.
74 __asan_report_load8(X);
75 For a load of N bytes (N=1, 2 or 4) from address X:
76 ShadowAddr = (X >> 3) + Offset
77 ShadowValue = *(char*)ShadowAddr;
79 if ((X & 7) + N - 1 > ShadowValue)
80 __asan_report_loadN(X);
81 Stores are instrumented similarly, but using __asan_report_storeN functions.
82 A call too __asan_init_vN() is inserted to the list of module CTORs.
83 N is the version number of the AddressSanitizer API. The changes between the
84 API versions are listed in libsanitizer/asan/asan_interface_internal.h.
86 The run-time library redefines malloc (so that redzone are inserted around
87 the allocated memory) and free (so that reuse of free-ed memory is delayed),
88 provides __asan_report* and __asan_init_vN functions.
91 http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm
93 The current implementation supports detection of out-of-bounds and
94 use-after-free in the heap, on the stack and for global variables.
96 [Protection of stack variables]
98 To understand how detection of out-of-bounds and use-after-free works
99 for stack variables, lets look at this example on x86_64 where the
100 stack grows downward:
114 For this function, the stack protected by asan will be organized as
115 follows, from the top of the stack to the bottom:
117 Slot 1/ [red zone of 32 bytes called 'RIGHT RedZone']
119 Slot 2/ [8 bytes of red zone, that adds up to the space of 'a' to make
120 the next slot be 32 bytes aligned; this one is called Partial
121 Redzone; this 32 bytes alignment is an asan constraint]
123 Slot 3/ [24 bytes for variable 'a']
125 Slot 4/ [red zone of 32 bytes called 'Middle RedZone']
127 Slot 5/ [24 bytes of Partial Red Zone (similar to slot 2]
129 Slot 6/ [8 bytes for variable 'b']
131 Slot 7/ [32 bytes of Red Zone at the bottom of the stack, called
134 The 32 bytes of LEFT red zone at the bottom of the stack can be
137 1/ The first 8 bytes contain a magical asan number that is always
140 2/ The following 8 bytes contains a pointer to a string (to be
141 parsed at runtime by the runtime asan library), which format is
144 "<function-name> <space> <num-of-variables-on-the-stack>
145 (<32-bytes-aligned-offset-in-bytes-of-variable> <space>
146 <length-of-var-in-bytes> ){n} "
148 where '(...){n}' means the content inside the parenthesis occurs 'n'
149 times, with 'n' being the number of variables on the stack.
151 3/ The following 8 bytes contain the PC of the current function which
152 will be used by the run-time library to print an error message.
154 4/ The following 8 bytes are reserved for internal use by the run-time.
156 The shadow memory for that stack layout is going to look like this:
158 - content of shadow memory 8 bytes for slot 7: 0xF1F1F1F1.
159 The F1 byte pattern is a magic number called
160 ASAN_STACK_MAGIC_LEFT and is a way for the runtime to know that
161 the memory for that shadow byte is part of a the LEFT red zone
162 intended to seat at the bottom of the variables on the stack.
164 - content of shadow memory 8 bytes for slots 6 and 5:
165 0xF4F4F400. The F4 byte pattern is a magic number
166 called ASAN_STACK_MAGIC_PARTIAL. It flags the fact that the
167 memory region for this shadow byte is a PARTIAL red zone
168 intended to pad a variable A, so that the slot following
169 {A,padding} is 32 bytes aligned.
171 Note that the fact that the least significant byte of this
172 shadow memory content is 00 means that 8 bytes of its
173 corresponding memory (which corresponds to the memory of
174 variable 'b') is addressable.
176 - content of shadow memory 8 bytes for slot 4: 0xF2F2F2F2.
177 The F2 byte pattern is a magic number called
178 ASAN_STACK_MAGIC_MIDDLE. It flags the fact that the memory
179 region for this shadow byte is a MIDDLE red zone intended to
180 seat between two 32 aligned slots of {variable,padding}.
182 - content of shadow memory 8 bytes for slot 3 and 2:
183 0xF4000000. This represents is the concatenation of
184 variable 'a' and the partial red zone following it, like what we
185 had for variable 'b'. The least significant 3 bytes being 00
186 means that the 3 bytes of variable 'a' are addressable.
188 - content of shadow memory 8 bytes for slot 1: 0xF3F3F3F3.
189 The F3 byte pattern is a magic number called
190 ASAN_STACK_MAGIC_RIGHT. It flags the fact that the memory
191 region for this shadow byte is a RIGHT red zone intended to seat
192 at the top of the variables of the stack.
194 Note that the real variable layout is done in expand_used_vars in
195 cfgexpand.c. As far as Address Sanitizer is concerned, it lays out
196 stack variables as well as the different red zones, emits some
197 prologue code to populate the shadow memory as to poison (mark as
198 non-accessible) the regions of the red zones and mark the regions of
199 stack variables as accessible, and emit some epilogue code to
200 un-poison (mark as accessible) the regions of red zones right before
203 [Protection of global variables]
205 The basic idea is to insert a red zone between two global variables
206 and install a constructor function that calls the asan runtime to do
207 the populating of the relevant shadow memory regions at load time.
209 So the global variables are laid out as to insert a red zone between
210 them. The size of the red zones is so that each variable starts on a
213 Then a constructor function is installed so that, for each global
214 variable, it calls the runtime asan library function
215 __asan_register_globals_with an instance of this type:
219 // Address of the beginning of the global variable.
222 // Initial size of the global variable.
225 // Size of the global variable + size of the red zone. This
226 // size is 32 bytes aligned.
227 uptr __size_with_redzone;
229 // Name of the global variable.
232 // Name of the module where the global variable is declared.
233 const void *__module_name;
235 // 1 if it has dynamic initialization, 0 otherwise.
236 uptr __has_dynamic_init;
238 // A pointer to struct that contains source location, could be NULL.
239 __asan_global_source_location *__location;
242 A destructor function that calls the runtime asan library function
243 _asan_unregister_globals is also installed. */
245 static unsigned HOST_WIDE_INT asan_shadow_offset_value
;
246 static bool asan_shadow_offset_computed
;
247 static vec
<char *> sanitized_sections
;
249 /* Set of variable declarations that are going to be guarded by
250 use-after-scope sanitizer. */
252 static hash_set
<tree
> *asan_handled_variables
= NULL
;
254 hash_set
<tree
> *asan_used_labels
= NULL
;
256 /* Sets shadow offset to value in string VAL. */
259 set_asan_shadow_offset (const char *val
)
264 #ifdef HAVE_LONG_LONG
265 asan_shadow_offset_value
= strtoull (val
, &endp
, 0);
267 asan_shadow_offset_value
= strtoul (val
, &endp
, 0);
269 if (!(*val
!= '\0' && *endp
== '\0' && errno
== 0))
272 asan_shadow_offset_computed
= true;
277 /* Set list of user-defined sections that need to be sanitized. */
280 set_sanitized_sections (const char *sections
)
284 FOR_EACH_VEC_ELT (sanitized_sections
, i
, pat
)
286 sanitized_sections
.truncate (0);
288 for (const char *s
= sections
; *s
; )
291 for (end
= s
; *end
&& *end
!= ','; ++end
);
292 size_t len
= end
- s
;
293 sanitized_sections
.safe_push (xstrndup (s
, len
));
294 s
= *end
? end
+ 1 : end
;
299 asan_mark_p (gimple
*stmt
, enum asan_mark_flags flag
)
301 return (gimple_call_internal_p (stmt
, IFN_ASAN_MARK
)
302 && tree_to_uhwi (gimple_call_arg (stmt
, 0)) == flag
);
306 asan_sanitize_stack_p (void)
308 return ((flag_sanitize
& SANITIZE_ADDRESS
)
310 && !asan_no_sanitize_address_p ());
313 /* Checks whether section SEC should be sanitized. */
316 section_sanitized_p (const char *sec
)
320 FOR_EACH_VEC_ELT (sanitized_sections
, i
, pat
)
321 if (fnmatch (pat
, sec
, FNM_PERIOD
) == 0)
326 /* Returns Asan shadow offset. */
328 static unsigned HOST_WIDE_INT
329 asan_shadow_offset ()
331 if (!asan_shadow_offset_computed
)
333 asan_shadow_offset_computed
= true;
334 asan_shadow_offset_value
= targetm
.asan_shadow_offset ();
336 return asan_shadow_offset_value
;
339 alias_set_type asan_shadow_set
= -1;
341 /* Pointer types to 1, 2 or 4 byte integers in shadow memory. A separate
342 alias set is used for all shadow memory accesses. */
343 static GTY(()) tree shadow_ptr_types
[3];
345 /* Decl for __asan_option_detect_stack_use_after_return. */
346 static GTY(()) tree asan_detect_stack_use_after_return
;
348 /* Hashtable support for memory references used by gimple
351 /* This type represents a reference to a memory region. */
354 /* The expression of the beginning of the memory region. */
357 /* The size of the access. */
358 HOST_WIDE_INT access_size
;
361 object_allocator
<asan_mem_ref
> asan_mem_ref_pool ("asan_mem_ref");
363 /* Initializes an instance of asan_mem_ref. */
366 asan_mem_ref_init (asan_mem_ref
*ref
, tree start
, HOST_WIDE_INT access_size
)
369 ref
->access_size
= access_size
;
372 /* Allocates memory for an instance of asan_mem_ref into the memory
373 pool returned by asan_mem_ref_get_alloc_pool and initialize it.
374 START is the address of (or the expression pointing to) the
375 beginning of memory reference. ACCESS_SIZE is the size of the
376 access to the referenced memory. */
379 asan_mem_ref_new (tree start
, HOST_WIDE_INT access_size
)
381 asan_mem_ref
*ref
= asan_mem_ref_pool
.allocate ();
383 asan_mem_ref_init (ref
, start
, access_size
);
387 /* This builds and returns a pointer to the end of the memory region
388 that starts at START and of length LEN. */
391 asan_mem_ref_get_end (tree start
, tree len
)
393 if (len
== NULL_TREE
|| integer_zerop (len
))
396 if (!ptrofftype_p (len
))
397 len
= convert_to_ptrofftype (len
);
399 return fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (start
), start
, len
);
402 /* Return a tree expression that represents the end of the referenced
403 memory region. Beware that this function can actually build a new
407 asan_mem_ref_get_end (const asan_mem_ref
*ref
, tree len
)
409 return asan_mem_ref_get_end (ref
->start
, len
);
412 struct asan_mem_ref_hasher
: nofree_ptr_hash
<asan_mem_ref
>
414 static inline hashval_t
hash (const asan_mem_ref
*);
415 static inline bool equal (const asan_mem_ref
*, const asan_mem_ref
*);
418 /* Hash a memory reference. */
421 asan_mem_ref_hasher::hash (const asan_mem_ref
*mem_ref
)
423 return iterative_hash_expr (mem_ref
->start
, 0);
426 /* Compare two memory references. We accept the length of either
427 memory references to be NULL_TREE. */
430 asan_mem_ref_hasher::equal (const asan_mem_ref
*m1
,
431 const asan_mem_ref
*m2
)
433 return operand_equal_p (m1
->start
, m2
->start
, 0);
436 static hash_table
<asan_mem_ref_hasher
> *asan_mem_ref_ht
;
438 /* Returns a reference to the hash table containing memory references.
439 This function ensures that the hash table is created. Note that
440 this hash table is updated by the function
441 update_mem_ref_hash_table. */
443 static hash_table
<asan_mem_ref_hasher
> *
444 get_mem_ref_hash_table ()
446 if (!asan_mem_ref_ht
)
447 asan_mem_ref_ht
= new hash_table
<asan_mem_ref_hasher
> (10);
449 return asan_mem_ref_ht
;
452 /* Clear all entries from the memory references hash table. */
455 empty_mem_ref_hash_table ()
458 asan_mem_ref_ht
->empty ();
461 /* Free the memory references hash table. */
464 free_mem_ref_resources ()
466 delete asan_mem_ref_ht
;
467 asan_mem_ref_ht
= NULL
;
469 asan_mem_ref_pool
.release ();
472 /* Return true iff the memory reference REF has been instrumented. */
475 has_mem_ref_been_instrumented (tree ref
, HOST_WIDE_INT access_size
)
478 asan_mem_ref_init (&r
, ref
, access_size
);
480 asan_mem_ref
*saved_ref
= get_mem_ref_hash_table ()->find (&r
);
481 return saved_ref
&& saved_ref
->access_size
>= access_size
;
484 /* Return true iff the memory reference REF has been instrumented. */
487 has_mem_ref_been_instrumented (const asan_mem_ref
*ref
)
489 return has_mem_ref_been_instrumented (ref
->start
, ref
->access_size
);
492 /* Return true iff access to memory region starting at REF and of
493 length LEN has been instrumented. */
496 has_mem_ref_been_instrumented (const asan_mem_ref
*ref
, tree len
)
498 HOST_WIDE_INT size_in_bytes
499 = tree_fits_shwi_p (len
) ? tree_to_shwi (len
) : -1;
501 return size_in_bytes
!= -1
502 && has_mem_ref_been_instrumented (ref
->start
, size_in_bytes
);
505 /* Set REF to the memory reference present in a gimple assignment
506 ASSIGNMENT. Return true upon successful completion, false
510 get_mem_ref_of_assignment (const gassign
*assignment
,
514 gcc_assert (gimple_assign_single_p (assignment
));
516 if (gimple_store_p (assignment
)
517 && !gimple_clobber_p (assignment
))
519 ref
->start
= gimple_assign_lhs (assignment
);
520 *ref_is_store
= true;
522 else if (gimple_assign_load_p (assignment
))
524 ref
->start
= gimple_assign_rhs1 (assignment
);
525 *ref_is_store
= false;
530 ref
->access_size
= int_size_in_bytes (TREE_TYPE (ref
->start
));
534 /* Return the memory references contained in a gimple statement
535 representing a builtin call that has to do with memory access. */
538 get_mem_refs_of_builtin_call (const gcall
*call
,
551 gcc_checking_assert (gimple_call_builtin_p (call
, BUILT_IN_NORMAL
));
553 tree callee
= gimple_call_fndecl (call
);
554 tree source0
= NULL_TREE
, source1
= NULL_TREE
,
555 dest
= NULL_TREE
, len
= NULL_TREE
;
556 bool is_store
= true, got_reference_p
= false;
557 HOST_WIDE_INT access_size
= 1;
559 *intercepted_p
= asan_intercepted_p ((DECL_FUNCTION_CODE (callee
)));
561 switch (DECL_FUNCTION_CODE (callee
))
563 /* (s, s, n) style memops. */
565 case BUILT_IN_MEMCMP
:
566 source0
= gimple_call_arg (call
, 0);
567 source1
= gimple_call_arg (call
, 1);
568 len
= gimple_call_arg (call
, 2);
571 /* (src, dest, n) style memops. */
573 source0
= gimple_call_arg (call
, 0);
574 dest
= gimple_call_arg (call
, 1);
575 len
= gimple_call_arg (call
, 2);
578 /* (dest, src, n) style memops. */
579 case BUILT_IN_MEMCPY
:
580 case BUILT_IN_MEMCPY_CHK
:
581 case BUILT_IN_MEMMOVE
:
582 case BUILT_IN_MEMMOVE_CHK
:
583 case BUILT_IN_MEMPCPY
:
584 case BUILT_IN_MEMPCPY_CHK
:
585 dest
= gimple_call_arg (call
, 0);
586 source0
= gimple_call_arg (call
, 1);
587 len
= gimple_call_arg (call
, 2);
590 /* (dest, n) style memops. */
592 dest
= gimple_call_arg (call
, 0);
593 len
= gimple_call_arg (call
, 1);
596 /* (dest, x, n) style memops*/
597 case BUILT_IN_MEMSET
:
598 case BUILT_IN_MEMSET_CHK
:
599 dest
= gimple_call_arg (call
, 0);
600 len
= gimple_call_arg (call
, 2);
603 case BUILT_IN_STRLEN
:
604 source0
= gimple_call_arg (call
, 0);
605 len
= gimple_call_lhs (call
);
608 /* And now the __atomic* and __sync builtins.
609 These are handled differently from the classical memory memory
610 access builtins above. */
612 case BUILT_IN_ATOMIC_LOAD_1
:
613 case BUILT_IN_ATOMIC_LOAD_2
:
614 case BUILT_IN_ATOMIC_LOAD_4
:
615 case BUILT_IN_ATOMIC_LOAD_8
:
616 case BUILT_IN_ATOMIC_LOAD_16
:
620 case BUILT_IN_SYNC_FETCH_AND_ADD_1
:
621 case BUILT_IN_SYNC_FETCH_AND_ADD_2
:
622 case BUILT_IN_SYNC_FETCH_AND_ADD_4
:
623 case BUILT_IN_SYNC_FETCH_AND_ADD_8
:
624 case BUILT_IN_SYNC_FETCH_AND_ADD_16
:
626 case BUILT_IN_SYNC_FETCH_AND_SUB_1
:
627 case BUILT_IN_SYNC_FETCH_AND_SUB_2
:
628 case BUILT_IN_SYNC_FETCH_AND_SUB_4
:
629 case BUILT_IN_SYNC_FETCH_AND_SUB_8
:
630 case BUILT_IN_SYNC_FETCH_AND_SUB_16
:
632 case BUILT_IN_SYNC_FETCH_AND_OR_1
:
633 case BUILT_IN_SYNC_FETCH_AND_OR_2
:
634 case BUILT_IN_SYNC_FETCH_AND_OR_4
:
635 case BUILT_IN_SYNC_FETCH_AND_OR_8
:
636 case BUILT_IN_SYNC_FETCH_AND_OR_16
:
638 case BUILT_IN_SYNC_FETCH_AND_AND_1
:
639 case BUILT_IN_SYNC_FETCH_AND_AND_2
:
640 case BUILT_IN_SYNC_FETCH_AND_AND_4
:
641 case BUILT_IN_SYNC_FETCH_AND_AND_8
:
642 case BUILT_IN_SYNC_FETCH_AND_AND_16
:
644 case BUILT_IN_SYNC_FETCH_AND_XOR_1
:
645 case BUILT_IN_SYNC_FETCH_AND_XOR_2
:
646 case BUILT_IN_SYNC_FETCH_AND_XOR_4
:
647 case BUILT_IN_SYNC_FETCH_AND_XOR_8
:
648 case BUILT_IN_SYNC_FETCH_AND_XOR_16
:
650 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
651 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
652 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
653 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
655 case BUILT_IN_SYNC_ADD_AND_FETCH_1
:
656 case BUILT_IN_SYNC_ADD_AND_FETCH_2
:
657 case BUILT_IN_SYNC_ADD_AND_FETCH_4
:
658 case BUILT_IN_SYNC_ADD_AND_FETCH_8
:
659 case BUILT_IN_SYNC_ADD_AND_FETCH_16
:
661 case BUILT_IN_SYNC_SUB_AND_FETCH_1
:
662 case BUILT_IN_SYNC_SUB_AND_FETCH_2
:
663 case BUILT_IN_SYNC_SUB_AND_FETCH_4
:
664 case BUILT_IN_SYNC_SUB_AND_FETCH_8
:
665 case BUILT_IN_SYNC_SUB_AND_FETCH_16
:
667 case BUILT_IN_SYNC_OR_AND_FETCH_1
:
668 case BUILT_IN_SYNC_OR_AND_FETCH_2
:
669 case BUILT_IN_SYNC_OR_AND_FETCH_4
:
670 case BUILT_IN_SYNC_OR_AND_FETCH_8
:
671 case BUILT_IN_SYNC_OR_AND_FETCH_16
:
673 case BUILT_IN_SYNC_AND_AND_FETCH_1
:
674 case BUILT_IN_SYNC_AND_AND_FETCH_2
:
675 case BUILT_IN_SYNC_AND_AND_FETCH_4
:
676 case BUILT_IN_SYNC_AND_AND_FETCH_8
:
677 case BUILT_IN_SYNC_AND_AND_FETCH_16
:
679 case BUILT_IN_SYNC_XOR_AND_FETCH_1
:
680 case BUILT_IN_SYNC_XOR_AND_FETCH_2
:
681 case BUILT_IN_SYNC_XOR_AND_FETCH_4
:
682 case BUILT_IN_SYNC_XOR_AND_FETCH_8
:
683 case BUILT_IN_SYNC_XOR_AND_FETCH_16
:
685 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
686 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
687 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
688 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
690 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
:
691 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2
:
692 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4
:
693 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8
:
694 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16
:
696 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
:
697 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2
:
698 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4
:
699 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8
:
700 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16
:
702 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
:
703 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2
:
704 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4
:
705 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8
:
706 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16
:
708 case BUILT_IN_SYNC_LOCK_RELEASE_1
:
709 case BUILT_IN_SYNC_LOCK_RELEASE_2
:
710 case BUILT_IN_SYNC_LOCK_RELEASE_4
:
711 case BUILT_IN_SYNC_LOCK_RELEASE_8
:
712 case BUILT_IN_SYNC_LOCK_RELEASE_16
:
714 case BUILT_IN_ATOMIC_EXCHANGE_1
:
715 case BUILT_IN_ATOMIC_EXCHANGE_2
:
716 case BUILT_IN_ATOMIC_EXCHANGE_4
:
717 case BUILT_IN_ATOMIC_EXCHANGE_8
:
718 case BUILT_IN_ATOMIC_EXCHANGE_16
:
720 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
721 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
722 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
723 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
724 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
726 case BUILT_IN_ATOMIC_STORE_1
:
727 case BUILT_IN_ATOMIC_STORE_2
:
728 case BUILT_IN_ATOMIC_STORE_4
:
729 case BUILT_IN_ATOMIC_STORE_8
:
730 case BUILT_IN_ATOMIC_STORE_16
:
732 case BUILT_IN_ATOMIC_ADD_FETCH_1
:
733 case BUILT_IN_ATOMIC_ADD_FETCH_2
:
734 case BUILT_IN_ATOMIC_ADD_FETCH_4
:
735 case BUILT_IN_ATOMIC_ADD_FETCH_8
:
736 case BUILT_IN_ATOMIC_ADD_FETCH_16
:
738 case BUILT_IN_ATOMIC_SUB_FETCH_1
:
739 case BUILT_IN_ATOMIC_SUB_FETCH_2
:
740 case BUILT_IN_ATOMIC_SUB_FETCH_4
:
741 case BUILT_IN_ATOMIC_SUB_FETCH_8
:
742 case BUILT_IN_ATOMIC_SUB_FETCH_16
:
744 case BUILT_IN_ATOMIC_AND_FETCH_1
:
745 case BUILT_IN_ATOMIC_AND_FETCH_2
:
746 case BUILT_IN_ATOMIC_AND_FETCH_4
:
747 case BUILT_IN_ATOMIC_AND_FETCH_8
:
748 case BUILT_IN_ATOMIC_AND_FETCH_16
:
750 case BUILT_IN_ATOMIC_NAND_FETCH_1
:
751 case BUILT_IN_ATOMIC_NAND_FETCH_2
:
752 case BUILT_IN_ATOMIC_NAND_FETCH_4
:
753 case BUILT_IN_ATOMIC_NAND_FETCH_8
:
754 case BUILT_IN_ATOMIC_NAND_FETCH_16
:
756 case BUILT_IN_ATOMIC_XOR_FETCH_1
:
757 case BUILT_IN_ATOMIC_XOR_FETCH_2
:
758 case BUILT_IN_ATOMIC_XOR_FETCH_4
:
759 case BUILT_IN_ATOMIC_XOR_FETCH_8
:
760 case BUILT_IN_ATOMIC_XOR_FETCH_16
:
762 case BUILT_IN_ATOMIC_OR_FETCH_1
:
763 case BUILT_IN_ATOMIC_OR_FETCH_2
:
764 case BUILT_IN_ATOMIC_OR_FETCH_4
:
765 case BUILT_IN_ATOMIC_OR_FETCH_8
:
766 case BUILT_IN_ATOMIC_OR_FETCH_16
:
768 case BUILT_IN_ATOMIC_FETCH_ADD_1
:
769 case BUILT_IN_ATOMIC_FETCH_ADD_2
:
770 case BUILT_IN_ATOMIC_FETCH_ADD_4
:
771 case BUILT_IN_ATOMIC_FETCH_ADD_8
:
772 case BUILT_IN_ATOMIC_FETCH_ADD_16
:
774 case BUILT_IN_ATOMIC_FETCH_SUB_1
:
775 case BUILT_IN_ATOMIC_FETCH_SUB_2
:
776 case BUILT_IN_ATOMIC_FETCH_SUB_4
:
777 case BUILT_IN_ATOMIC_FETCH_SUB_8
:
778 case BUILT_IN_ATOMIC_FETCH_SUB_16
:
780 case BUILT_IN_ATOMIC_FETCH_AND_1
:
781 case BUILT_IN_ATOMIC_FETCH_AND_2
:
782 case BUILT_IN_ATOMIC_FETCH_AND_4
:
783 case BUILT_IN_ATOMIC_FETCH_AND_8
:
784 case BUILT_IN_ATOMIC_FETCH_AND_16
:
786 case BUILT_IN_ATOMIC_FETCH_NAND_1
:
787 case BUILT_IN_ATOMIC_FETCH_NAND_2
:
788 case BUILT_IN_ATOMIC_FETCH_NAND_4
:
789 case BUILT_IN_ATOMIC_FETCH_NAND_8
:
790 case BUILT_IN_ATOMIC_FETCH_NAND_16
:
792 case BUILT_IN_ATOMIC_FETCH_XOR_1
:
793 case BUILT_IN_ATOMIC_FETCH_XOR_2
:
794 case BUILT_IN_ATOMIC_FETCH_XOR_4
:
795 case BUILT_IN_ATOMIC_FETCH_XOR_8
:
796 case BUILT_IN_ATOMIC_FETCH_XOR_16
:
798 case BUILT_IN_ATOMIC_FETCH_OR_1
:
799 case BUILT_IN_ATOMIC_FETCH_OR_2
:
800 case BUILT_IN_ATOMIC_FETCH_OR_4
:
801 case BUILT_IN_ATOMIC_FETCH_OR_8
:
802 case BUILT_IN_ATOMIC_FETCH_OR_16
:
804 dest
= gimple_call_arg (call
, 0);
805 /* DEST represents the address of a memory location.
806 instrument_derefs wants the memory location, so lets
807 dereference the address DEST before handing it to
808 instrument_derefs. */
809 if (TREE_CODE (dest
) == ADDR_EXPR
)
810 dest
= TREE_OPERAND (dest
, 0);
811 else if (TREE_CODE (dest
) == SSA_NAME
|| TREE_CODE (dest
) == INTEGER_CST
)
812 dest
= build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (dest
)),
813 dest
, build_int_cst (TREE_TYPE (dest
), 0));
817 access_size
= int_size_in_bytes (TREE_TYPE (dest
));
821 /* The other builtins memory access are not instrumented in this
822 function because they either don't have any length parameter,
823 or their length parameter is just a limit. */
827 if (len
!= NULL_TREE
)
829 if (source0
!= NULL_TREE
)
831 src0
->start
= source0
;
832 src0
->access_size
= access_size
;
834 *src0_is_store
= false;
837 if (source1
!= NULL_TREE
)
839 src1
->start
= source1
;
840 src1
->access_size
= access_size
;
842 *src1_is_store
= false;
845 if (dest
!= NULL_TREE
)
848 dst
->access_size
= access_size
;
850 *dst_is_store
= true;
853 got_reference_p
= true;
858 dst
->access_size
= access_size
;
859 *dst_len
= NULL_TREE
;
860 *dst_is_store
= is_store
;
861 *dest_is_deref
= true;
862 got_reference_p
= true;
865 return got_reference_p
;
868 /* Return true iff a given gimple statement has been instrumented.
869 Note that the statement is "defined" by the memory references it
873 has_stmt_been_instrumented_p (gimple
*stmt
)
875 if (gimple_assign_single_p (stmt
))
879 asan_mem_ref_init (&r
, NULL
, 1);
881 if (get_mem_ref_of_assignment (as_a
<gassign
*> (stmt
), &r
,
883 return has_mem_ref_been_instrumented (&r
);
885 else if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
887 asan_mem_ref src0
, src1
, dest
;
888 asan_mem_ref_init (&src0
, NULL
, 1);
889 asan_mem_ref_init (&src1
, NULL
, 1);
890 asan_mem_ref_init (&dest
, NULL
, 1);
892 tree src0_len
= NULL_TREE
, src1_len
= NULL_TREE
, dest_len
= NULL_TREE
;
893 bool src0_is_store
= false, src1_is_store
= false,
894 dest_is_store
= false, dest_is_deref
= false, intercepted_p
= true;
895 if (get_mem_refs_of_builtin_call (as_a
<gcall
*> (stmt
),
896 &src0
, &src0_len
, &src0_is_store
,
897 &src1
, &src1_len
, &src1_is_store
,
898 &dest
, &dest_len
, &dest_is_store
,
899 &dest_is_deref
, &intercepted_p
))
901 if (src0
.start
!= NULL_TREE
902 && !has_mem_ref_been_instrumented (&src0
, src0_len
))
905 if (src1
.start
!= NULL_TREE
906 && !has_mem_ref_been_instrumented (&src1
, src1_len
))
909 if (dest
.start
!= NULL_TREE
910 && !has_mem_ref_been_instrumented (&dest
, dest_len
))
916 else if (is_gimple_call (stmt
) && gimple_store_p (stmt
))
919 asan_mem_ref_init (&r
, NULL
, 1);
921 r
.start
= gimple_call_lhs (stmt
);
922 r
.access_size
= int_size_in_bytes (TREE_TYPE (r
.start
));
923 return has_mem_ref_been_instrumented (&r
);
929 /* Insert a memory reference into the hash table. */
932 update_mem_ref_hash_table (tree ref
, HOST_WIDE_INT access_size
)
934 hash_table
<asan_mem_ref_hasher
> *ht
= get_mem_ref_hash_table ();
937 asan_mem_ref_init (&r
, ref
, access_size
);
939 asan_mem_ref
**slot
= ht
->find_slot (&r
, INSERT
);
940 if (*slot
== NULL
|| (*slot
)->access_size
< access_size
)
941 *slot
= asan_mem_ref_new (ref
, access_size
);
944 /* Initialize shadow_ptr_types array. */
947 asan_init_shadow_ptr_types (void)
949 asan_shadow_set
= new_alias_set ();
950 tree types
[3] = { signed_char_type_node
, short_integer_type_node
,
953 for (unsigned i
= 0; i
< 3; i
++)
955 shadow_ptr_types
[i
] = build_distinct_type_copy (types
[i
]);
956 TYPE_ALIAS_SET (shadow_ptr_types
[i
]) = asan_shadow_set
;
957 shadow_ptr_types
[i
] = build_pointer_type (shadow_ptr_types
[i
]);
960 initialize_sanitizer_builtins ();
963 /* Create ADDR_EXPR of STRING_CST with the PP pretty printer text. */
966 asan_pp_string (pretty_printer
*pp
)
968 const char *buf
= pp_formatted_text (pp
);
969 size_t len
= strlen (buf
);
970 tree ret
= build_string (len
+ 1, buf
);
972 = build_array_type (TREE_TYPE (shadow_ptr_types
[0]),
973 build_index_type (size_int (len
)));
974 TREE_READONLY (ret
) = 1;
975 TREE_STATIC (ret
) = 1;
976 return build1 (ADDR_EXPR
, shadow_ptr_types
[0], ret
);
979 /* Return a CONST_INT representing 4 subsequent shadow memory bytes. */
982 asan_shadow_cst (unsigned char shadow_bytes
[4])
985 unsigned HOST_WIDE_INT val
= 0;
986 gcc_assert (WORDS_BIG_ENDIAN
== BYTES_BIG_ENDIAN
);
987 for (i
= 0; i
< 4; i
++)
988 val
|= (unsigned HOST_WIDE_INT
) shadow_bytes
[BYTES_BIG_ENDIAN
? 3 - i
: i
]
989 << (BITS_PER_UNIT
* i
);
990 return gen_int_mode (val
, SImode
);
993 /* Clear shadow memory at SHADOW_MEM, LEN bytes. Can't call a library call here
997 asan_clear_shadow (rtx shadow_mem
, HOST_WIDE_INT len
)
999 rtx_insn
*insn
, *insns
, *jump
;
1000 rtx_code_label
*top_label
;
1004 clear_storage (shadow_mem
, GEN_INT (len
), BLOCK_OP_NORMAL
);
1005 insns
= get_insns ();
1007 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
1010 if (insn
== NULL_RTX
)
1016 gcc_assert ((len
& 3) == 0);
1017 top_label
= gen_label_rtx ();
1018 addr
= copy_to_mode_reg (Pmode
, XEXP (shadow_mem
, 0));
1019 shadow_mem
= adjust_automodify_address (shadow_mem
, SImode
, addr
, 0);
1020 end
= force_reg (Pmode
, plus_constant (Pmode
, addr
, len
));
1021 emit_label (top_label
);
1023 emit_move_insn (shadow_mem
, const0_rtx
);
1024 tmp
= expand_simple_binop (Pmode
, PLUS
, addr
, gen_int_mode (4, Pmode
), addr
,
1025 true, OPTAB_LIB_WIDEN
);
1027 emit_move_insn (addr
, tmp
);
1028 emit_cmp_and_jump_insns (addr
, end
, LT
, NULL_RTX
, Pmode
, true, top_label
);
1029 jump
= get_last_insn ();
1030 gcc_assert (JUMP_P (jump
));
1031 add_int_reg_note (jump
, REG_BR_PROB
, REG_BR_PROB_BASE
* 80 / 100);
1035 asan_function_start (void)
1037 section
*fnsec
= function_section (current_function_decl
);
1038 switch_to_section (fnsec
);
1039 ASM_OUTPUT_DEBUG_LABEL (asm_out_file
, "LASANPC",
1040 current_function_funcdef_no
);
1043 /* Return number of shadow bytes that are occupied by a local variable
1046 static unsigned HOST_WIDE_INT
1047 shadow_mem_size (unsigned HOST_WIDE_INT size
)
1049 return ROUND_UP (size
, ASAN_SHADOW_GRANULARITY
) / ASAN_SHADOW_GRANULARITY
;
1052 /* Insert code to protect stack vars. The prologue sequence should be emitted
1053 directly, epilogue sequence returned. BASE is the register holding the
1054 stack base, against which OFFSETS array offsets are relative to, OFFSETS
1055 array contains pairs of offsets in reverse order, always the end offset
1056 of some gap that needs protection followed by starting offset,
1057 and DECLS is an array of representative decls for each var partition.
1058 LENGTH is the length of the OFFSETS array, DECLS array is LENGTH / 2 - 1
1059 elements long (OFFSETS include gap before the first variable as well
1060 as gaps after each stack variable). PBASE is, if non-NULL, some pseudo
1061 register which stack vars DECL_RTLs are based on. Either BASE should be
1062 assigned to PBASE, when not doing use after return protection, or
1063 corresponding address based on __asan_stack_malloc* return value. */
1066 asan_emit_stack_protection (rtx base
, rtx pbase
, unsigned int alignb
,
1067 HOST_WIDE_INT
*offsets
, tree
*decls
, int length
)
1069 rtx shadow_base
, shadow_mem
, ret
, mem
, orig_base
;
1070 rtx_code_label
*lab
;
1073 unsigned char shadow_bytes
[4];
1074 HOST_WIDE_INT base_offset
= offsets
[length
- 1];
1075 HOST_WIDE_INT base_align_bias
= 0, offset
, prev_offset
;
1076 HOST_WIDE_INT asan_frame_size
= offsets
[0] - base_offset
;
1077 HOST_WIDE_INT last_offset
;
1079 unsigned char cur_shadow_byte
= ASAN_STACK_MAGIC_LEFT
;
1080 tree str_cst
, decl
, id
;
1081 int use_after_return_class
= -1;
1083 if (shadow_ptr_types
[0] == NULL_TREE
)
1084 asan_init_shadow_ptr_types ();
1086 /* First of all, prepare the description string. */
1087 pretty_printer asan_pp
;
1089 pp_decimal_int (&asan_pp
, length
/ 2 - 1);
1090 pp_space (&asan_pp
);
1091 for (l
= length
- 2; l
; l
-= 2)
1093 tree decl
= decls
[l
/ 2 - 1];
1094 pp_wide_integer (&asan_pp
, offsets
[l
] - base_offset
);
1095 pp_space (&asan_pp
);
1096 pp_wide_integer (&asan_pp
, offsets
[l
- 1] - offsets
[l
]);
1097 pp_space (&asan_pp
);
1098 if (DECL_P (decl
) && DECL_NAME (decl
))
1100 pp_decimal_int (&asan_pp
, IDENTIFIER_LENGTH (DECL_NAME (decl
)));
1101 pp_space (&asan_pp
);
1102 pp_tree_identifier (&asan_pp
, DECL_NAME (decl
));
1105 pp_string (&asan_pp
, "9 <unknown>");
1106 pp_space (&asan_pp
);
1108 str_cst
= asan_pp_string (&asan_pp
);
1110 /* Emit the prologue sequence. */
1111 if (asan_frame_size
> 32 && asan_frame_size
<= 65536 && pbase
1112 && ASAN_USE_AFTER_RETURN
)
1114 use_after_return_class
= floor_log2 (asan_frame_size
- 1) - 5;
1115 /* __asan_stack_malloc_N guarantees alignment
1116 N < 6 ? (64 << N) : 4096 bytes. */
1117 if (alignb
> (use_after_return_class
< 6
1118 ? (64U << use_after_return_class
) : 4096U))
1119 use_after_return_class
= -1;
1120 else if (alignb
> ASAN_RED_ZONE_SIZE
&& (asan_frame_size
& (alignb
- 1)))
1121 base_align_bias
= ((asan_frame_size
+ alignb
- 1)
1122 & ~(alignb
- HOST_WIDE_INT_1
)) - asan_frame_size
;
1124 /* Align base if target is STRICT_ALIGNMENT. */
1125 if (STRICT_ALIGNMENT
)
1126 base
= expand_binop (Pmode
, and_optab
, base
,
1127 gen_int_mode (-((GET_MODE_ALIGNMENT (SImode
)
1128 << ASAN_SHADOW_SHIFT
)
1129 / BITS_PER_UNIT
), Pmode
), NULL_RTX
,
1132 if (use_after_return_class
== -1 && pbase
)
1133 emit_move_insn (pbase
, base
);
1135 base
= expand_binop (Pmode
, add_optab
, base
,
1136 gen_int_mode (base_offset
- base_align_bias
, Pmode
),
1137 NULL_RTX
, 1, OPTAB_DIRECT
);
1138 orig_base
= NULL_RTX
;
1139 if (use_after_return_class
!= -1)
1141 if (asan_detect_stack_use_after_return
== NULL_TREE
)
1143 id
= get_identifier ("__asan_option_detect_stack_use_after_return");
1144 decl
= build_decl (BUILTINS_LOCATION
, VAR_DECL
, id
,
1146 SET_DECL_ASSEMBLER_NAME (decl
, id
);
1147 TREE_ADDRESSABLE (decl
) = 1;
1148 DECL_ARTIFICIAL (decl
) = 1;
1149 DECL_IGNORED_P (decl
) = 1;
1150 DECL_EXTERNAL (decl
) = 1;
1151 TREE_STATIC (decl
) = 1;
1152 TREE_PUBLIC (decl
) = 1;
1153 TREE_USED (decl
) = 1;
1154 asan_detect_stack_use_after_return
= decl
;
1156 orig_base
= gen_reg_rtx (Pmode
);
1157 emit_move_insn (orig_base
, base
);
1158 ret
= expand_normal (asan_detect_stack_use_after_return
);
1159 lab
= gen_label_rtx ();
1160 int very_likely
= REG_BR_PROB_BASE
- (REG_BR_PROB_BASE
/ 2000 - 1);
1161 emit_cmp_and_jump_insns (ret
, const0_rtx
, EQ
, NULL_RTX
,
1162 VOIDmode
, 0, lab
, very_likely
);
1163 snprintf (buf
, sizeof buf
, "__asan_stack_malloc_%d",
1164 use_after_return_class
);
1165 ret
= init_one_libfunc (buf
);
1166 ret
= emit_library_call_value (ret
, NULL_RTX
, LCT_NORMAL
, ptr_mode
, 1,
1167 GEN_INT (asan_frame_size
1169 TYPE_MODE (pointer_sized_int_node
));
1170 /* __asan_stack_malloc_[n] returns a pointer to fake stack if succeeded
1171 and NULL otherwise. Check RET value is NULL here and jump over the
1172 BASE reassignment in this case. Otherwise, reassign BASE to RET. */
1173 int very_unlikely
= REG_BR_PROB_BASE
/ 2000 - 1;
1174 emit_cmp_and_jump_insns (ret
, const0_rtx
, EQ
, NULL_RTX
,
1175 VOIDmode
, 0, lab
, very_unlikely
);
1176 ret
= convert_memory_address (Pmode
, ret
);
1177 emit_move_insn (base
, ret
);
1179 emit_move_insn (pbase
, expand_binop (Pmode
, add_optab
, base
,
1180 gen_int_mode (base_align_bias
1181 - base_offset
, Pmode
),
1182 NULL_RTX
, 1, OPTAB_DIRECT
));
1184 mem
= gen_rtx_MEM (ptr_mode
, base
);
1185 mem
= adjust_address (mem
, VOIDmode
, base_align_bias
);
1186 emit_move_insn (mem
, gen_int_mode (ASAN_STACK_FRAME_MAGIC
, ptr_mode
));
1187 mem
= adjust_address (mem
, VOIDmode
, GET_MODE_SIZE (ptr_mode
));
1188 emit_move_insn (mem
, expand_normal (str_cst
));
1189 mem
= adjust_address (mem
, VOIDmode
, GET_MODE_SIZE (ptr_mode
));
1190 ASM_GENERATE_INTERNAL_LABEL (buf
, "LASANPC", current_function_funcdef_no
);
1191 id
= get_identifier (buf
);
1192 decl
= build_decl (DECL_SOURCE_LOCATION (current_function_decl
),
1193 VAR_DECL
, id
, char_type_node
);
1194 SET_DECL_ASSEMBLER_NAME (decl
, id
);
1195 TREE_ADDRESSABLE (decl
) = 1;
1196 TREE_READONLY (decl
) = 1;
1197 DECL_ARTIFICIAL (decl
) = 1;
1198 DECL_IGNORED_P (decl
) = 1;
1199 TREE_STATIC (decl
) = 1;
1200 TREE_PUBLIC (decl
) = 0;
1201 TREE_USED (decl
) = 1;
1202 DECL_INITIAL (decl
) = decl
;
1203 TREE_ASM_WRITTEN (decl
) = 1;
1204 TREE_ASM_WRITTEN (id
) = 1;
1205 emit_move_insn (mem
, expand_normal (build_fold_addr_expr (decl
)));
1206 shadow_base
= expand_binop (Pmode
, lshr_optab
, base
,
1207 GEN_INT (ASAN_SHADOW_SHIFT
),
1208 NULL_RTX
, 1, OPTAB_DIRECT
);
1210 = plus_constant (Pmode
, shadow_base
,
1211 asan_shadow_offset ()
1212 + (base_align_bias
>> ASAN_SHADOW_SHIFT
));
1213 gcc_assert (asan_shadow_set
!= -1
1214 && (ASAN_RED_ZONE_SIZE
>> ASAN_SHADOW_SHIFT
) == 4);
1215 shadow_mem
= gen_rtx_MEM (SImode
, shadow_base
);
1216 set_mem_alias_set (shadow_mem
, asan_shadow_set
);
1217 if (STRICT_ALIGNMENT
)
1218 set_mem_align (shadow_mem
, (GET_MODE_ALIGNMENT (SImode
)));
1219 prev_offset
= base_offset
;
1220 for (l
= length
; l
; l
-= 2)
1223 cur_shadow_byte
= ASAN_STACK_MAGIC_RIGHT
;
1224 offset
= offsets
[l
- 1];
1225 if ((offset
- base_offset
) & (ASAN_RED_ZONE_SIZE
- 1))
1229 = base_offset
+ ((offset
- base_offset
)
1230 & ~(ASAN_RED_ZONE_SIZE
- HOST_WIDE_INT_1
));
1231 shadow_mem
= adjust_address (shadow_mem
, VOIDmode
,
1232 (aoff
- prev_offset
)
1233 >> ASAN_SHADOW_SHIFT
);
1235 for (i
= 0; i
< 4; i
++, aoff
+= ASAN_SHADOW_GRANULARITY
)
1238 if (aoff
< offset
- (HOST_WIDE_INT
)ASAN_SHADOW_GRANULARITY
+ 1)
1239 shadow_bytes
[i
] = 0;
1241 shadow_bytes
[i
] = offset
- aoff
;
1244 shadow_bytes
[i
] = ASAN_STACK_MAGIC_MIDDLE
;
1245 emit_move_insn (shadow_mem
, asan_shadow_cst (shadow_bytes
));
1248 while (offset
<= offsets
[l
- 2] - ASAN_RED_ZONE_SIZE
)
1250 shadow_mem
= adjust_address (shadow_mem
, VOIDmode
,
1251 (offset
- prev_offset
)
1252 >> ASAN_SHADOW_SHIFT
);
1253 prev_offset
= offset
;
1254 memset (shadow_bytes
, cur_shadow_byte
, 4);
1255 emit_move_insn (shadow_mem
, asan_shadow_cst (shadow_bytes
));
1256 offset
+= ASAN_RED_ZONE_SIZE
;
1258 cur_shadow_byte
= ASAN_STACK_MAGIC_MIDDLE
;
1260 do_pending_stack_adjust ();
1262 /* Construct epilogue sequence. */
1266 if (use_after_return_class
!= -1)
1268 rtx_code_label
*lab2
= gen_label_rtx ();
1269 char c
= (char) ASAN_STACK_MAGIC_USE_AFTER_RET
;
1270 int very_likely
= REG_BR_PROB_BASE
- (REG_BR_PROB_BASE
/ 2000 - 1);
1271 emit_cmp_and_jump_insns (orig_base
, base
, EQ
, NULL_RTX
,
1272 VOIDmode
, 0, lab2
, very_likely
);
1273 shadow_mem
= gen_rtx_MEM (BLKmode
, shadow_base
);
1274 set_mem_alias_set (shadow_mem
, asan_shadow_set
);
1275 mem
= gen_rtx_MEM (ptr_mode
, base
);
1276 mem
= adjust_address (mem
, VOIDmode
, base_align_bias
);
1277 emit_move_insn (mem
, gen_int_mode (ASAN_STACK_RETIRED_MAGIC
, ptr_mode
));
1278 unsigned HOST_WIDE_INT sz
= asan_frame_size
>> ASAN_SHADOW_SHIFT
;
1279 if (use_after_return_class
< 5
1280 && can_store_by_pieces (sz
, builtin_memset_read_str
, &c
,
1281 BITS_PER_UNIT
, true))
1282 store_by_pieces (shadow_mem
, sz
, builtin_memset_read_str
, &c
,
1283 BITS_PER_UNIT
, true, 0);
1284 else if (use_after_return_class
>= 5
1285 || !set_storage_via_setmem (shadow_mem
,
1287 gen_int_mode (c
, QImode
),
1288 BITS_PER_UNIT
, BITS_PER_UNIT
,
1291 snprintf (buf
, sizeof buf
, "__asan_stack_free_%d",
1292 use_after_return_class
);
1293 ret
= init_one_libfunc (buf
);
1294 rtx addr
= convert_memory_address (ptr_mode
, base
);
1295 rtx orig_addr
= convert_memory_address (ptr_mode
, orig_base
);
1296 emit_library_call (ret
, LCT_NORMAL
, ptr_mode
, 3, addr
, ptr_mode
,
1297 GEN_INT (asan_frame_size
+ base_align_bias
),
1298 TYPE_MODE (pointer_sized_int_node
),
1299 orig_addr
, ptr_mode
);
1301 lab
= gen_label_rtx ();
1306 shadow_mem
= gen_rtx_MEM (BLKmode
, shadow_base
);
1307 set_mem_alias_set (shadow_mem
, asan_shadow_set
);
1309 if (STRICT_ALIGNMENT
)
1310 set_mem_align (shadow_mem
, (GET_MODE_ALIGNMENT (SImode
)));
1312 /* Unpoison shadow memory of a stack at the very end of a function.
1313 As we're poisoning stack variables at the end of their scope,
1314 shadow memory must be properly unpoisoned here. The easiest approach
1315 would be to collect all variables that should not be unpoisoned and
1316 we unpoison shadow memory of the whole stack except ranges
1317 occupied by these variables. */
1318 last_offset
= base_offset
;
1319 HOST_WIDE_INT current_offset
= last_offset
;
1322 HOST_WIDE_INT var_end_offset
= 0;
1323 HOST_WIDE_INT stack_start
= offsets
[length
- 1];
1324 gcc_assert (last_offset
== stack_start
);
1326 for (int l
= length
- 2; l
> 0; l
-= 2)
1328 HOST_WIDE_INT var_offset
= offsets
[l
];
1329 current_offset
= var_offset
;
1330 var_end_offset
= offsets
[l
- 1];
1331 HOST_WIDE_INT rounded_size
= ROUND_UP (var_end_offset
- var_offset
,
1334 /* Should we unpoison the variable? */
1335 if (asan_handled_variables
!= NULL
1336 && asan_handled_variables
->contains (decl
))
1338 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1340 const char *n
= (DECL_NAME (decl
)
1341 ? IDENTIFIER_POINTER (DECL_NAME (decl
))
1343 fprintf (dump_file
, "Unpoisoning shadow stack for variable: "
1344 "%s (%" PRId64
"B)\n", n
,
1345 var_end_offset
- var_offset
);
1348 unsigned HOST_WIDE_INT s
1349 = shadow_mem_size (current_offset
- last_offset
);
1350 asan_clear_shadow (shadow_mem
, s
);
1352 = shadow_mem_size (current_offset
- last_offset
+ rounded_size
);
1353 shadow_mem
= adjust_address (shadow_mem
, VOIDmode
, shift
);
1354 last_offset
= var_offset
+ rounded_size
;
1355 current_offset
= last_offset
;
1360 /* Handle last redzone. */
1361 current_offset
= offsets
[0];
1362 asan_clear_shadow (shadow_mem
,
1363 shadow_mem_size (current_offset
- last_offset
));
1366 /* Clean-up set with instrumented stack variables. */
1367 delete asan_handled_variables
;
1368 asan_handled_variables
= NULL
;
1369 delete asan_used_labels
;
1370 asan_used_labels
= NULL
;
1372 do_pending_stack_adjust ();
1376 insns
= get_insns ();
1381 /* Return true if DECL, a global var, might be overridden and needs
1382 therefore a local alias. */
1385 asan_needs_local_alias (tree decl
)
1387 return DECL_WEAK (decl
) || !targetm
.binds_local_p (decl
);
1390 /* Return true if DECL, a global var, is an artificial ODR indicator symbol
1391 therefore doesn't need protection. */
1394 is_odr_indicator (tree decl
)
1396 return (DECL_ARTIFICIAL (decl
)
1397 && lookup_attribute ("asan odr indicator", DECL_ATTRIBUTES (decl
)));
1400 /* Return true if DECL is a VAR_DECL that should be protected
1401 by Address Sanitizer, by appending a red zone with protected
1402 shadow memory after it and aligning it to at least
1403 ASAN_RED_ZONE_SIZE bytes. */
1406 asan_protect_global (tree decl
)
1413 if (TREE_CODE (decl
) == STRING_CST
)
1415 /* Instrument all STRING_CSTs except those created
1416 by asan_pp_string here. */
1417 if (shadow_ptr_types
[0] != NULL_TREE
1418 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
1419 && TREE_TYPE (TREE_TYPE (decl
)) == TREE_TYPE (shadow_ptr_types
[0]))
1424 /* TLS vars aren't statically protectable. */
1425 || DECL_THREAD_LOCAL_P (decl
)
1426 /* Externs will be protected elsewhere. */
1427 || DECL_EXTERNAL (decl
)
1428 || !DECL_RTL_SET_P (decl
)
1429 /* Comdat vars pose an ABI problem, we can't know if
1430 the var that is selected by the linker will have
1432 || DECL_ONE_ONLY (decl
)
1433 /* Similarly for common vars. People can use -fno-common.
1434 Note: Linux kernel is built with -fno-common, so we do instrument
1435 globals there even if it is C. */
1436 || (DECL_COMMON (decl
) && TREE_PUBLIC (decl
))
1437 /* Don't protect if using user section, often vars placed
1438 into user section from multiple TUs are then assumed
1439 to be an array of such vars, putting padding in there
1440 breaks this assumption. */
1441 || (DECL_SECTION_NAME (decl
) != NULL
1442 && !symtab_node::get (decl
)->implicit_section
1443 && !section_sanitized_p (DECL_SECTION_NAME (decl
)))
1444 || DECL_SIZE (decl
) == 0
1445 || ASAN_RED_ZONE_SIZE
* BITS_PER_UNIT
> MAX_OFILE_ALIGNMENT
1446 || !valid_constant_size_p (DECL_SIZE_UNIT (decl
))
1447 || DECL_ALIGN_UNIT (decl
) > 2 * ASAN_RED_ZONE_SIZE
1448 || TREE_TYPE (decl
) == ubsan_get_source_location_type ()
1449 || is_odr_indicator (decl
))
1452 rtl
= DECL_RTL (decl
);
1453 if (!MEM_P (rtl
) || GET_CODE (XEXP (rtl
, 0)) != SYMBOL_REF
)
1455 symbol
= XEXP (rtl
, 0);
1457 if (CONSTANT_POOL_ADDRESS_P (symbol
)
1458 || TREE_CONSTANT_POOL_ADDRESS_P (symbol
))
1461 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl
)))
1464 #ifndef ASM_OUTPUT_DEF
1465 if (asan_needs_local_alias (decl
))
1472 /* Construct a function tree for __asan_report_{load,store}{1,2,4,8,16,_n}.
1473 IS_STORE is either 1 (for a store) or 0 (for a load). */
1476 report_error_func (bool is_store
, bool recover_p
, HOST_WIDE_INT size_in_bytes
,
1479 static enum built_in_function report
[2][2][6]
1480 = { { { BUILT_IN_ASAN_REPORT_LOAD1
, BUILT_IN_ASAN_REPORT_LOAD2
,
1481 BUILT_IN_ASAN_REPORT_LOAD4
, BUILT_IN_ASAN_REPORT_LOAD8
,
1482 BUILT_IN_ASAN_REPORT_LOAD16
, BUILT_IN_ASAN_REPORT_LOAD_N
},
1483 { BUILT_IN_ASAN_REPORT_STORE1
, BUILT_IN_ASAN_REPORT_STORE2
,
1484 BUILT_IN_ASAN_REPORT_STORE4
, BUILT_IN_ASAN_REPORT_STORE8
,
1485 BUILT_IN_ASAN_REPORT_STORE16
, BUILT_IN_ASAN_REPORT_STORE_N
} },
1486 { { BUILT_IN_ASAN_REPORT_LOAD1_NOABORT
,
1487 BUILT_IN_ASAN_REPORT_LOAD2_NOABORT
,
1488 BUILT_IN_ASAN_REPORT_LOAD4_NOABORT
,
1489 BUILT_IN_ASAN_REPORT_LOAD8_NOABORT
,
1490 BUILT_IN_ASAN_REPORT_LOAD16_NOABORT
,
1491 BUILT_IN_ASAN_REPORT_LOAD_N_NOABORT
},
1492 { BUILT_IN_ASAN_REPORT_STORE1_NOABORT
,
1493 BUILT_IN_ASAN_REPORT_STORE2_NOABORT
,
1494 BUILT_IN_ASAN_REPORT_STORE4_NOABORT
,
1495 BUILT_IN_ASAN_REPORT_STORE8_NOABORT
,
1496 BUILT_IN_ASAN_REPORT_STORE16_NOABORT
,
1497 BUILT_IN_ASAN_REPORT_STORE_N_NOABORT
} } };
1498 if (size_in_bytes
== -1)
1501 return builtin_decl_implicit (report
[recover_p
][is_store
][5]);
1504 int size_log2
= exact_log2 (size_in_bytes
);
1505 return builtin_decl_implicit (report
[recover_p
][is_store
][size_log2
]);
1508 /* Construct a function tree for __asan_{load,store}{1,2,4,8,16,_n}.
1509 IS_STORE is either 1 (for a store) or 0 (for a load). */
1512 check_func (bool is_store
, bool recover_p
, HOST_WIDE_INT size_in_bytes
,
1515 static enum built_in_function check
[2][2][6]
1516 = { { { BUILT_IN_ASAN_LOAD1
, BUILT_IN_ASAN_LOAD2
,
1517 BUILT_IN_ASAN_LOAD4
, BUILT_IN_ASAN_LOAD8
,
1518 BUILT_IN_ASAN_LOAD16
, BUILT_IN_ASAN_LOADN
},
1519 { BUILT_IN_ASAN_STORE1
, BUILT_IN_ASAN_STORE2
,
1520 BUILT_IN_ASAN_STORE4
, BUILT_IN_ASAN_STORE8
,
1521 BUILT_IN_ASAN_STORE16
, BUILT_IN_ASAN_STOREN
} },
1522 { { BUILT_IN_ASAN_LOAD1_NOABORT
,
1523 BUILT_IN_ASAN_LOAD2_NOABORT
,
1524 BUILT_IN_ASAN_LOAD4_NOABORT
,
1525 BUILT_IN_ASAN_LOAD8_NOABORT
,
1526 BUILT_IN_ASAN_LOAD16_NOABORT
,
1527 BUILT_IN_ASAN_LOADN_NOABORT
},
1528 { BUILT_IN_ASAN_STORE1_NOABORT
,
1529 BUILT_IN_ASAN_STORE2_NOABORT
,
1530 BUILT_IN_ASAN_STORE4_NOABORT
,
1531 BUILT_IN_ASAN_STORE8_NOABORT
,
1532 BUILT_IN_ASAN_STORE16_NOABORT
,
1533 BUILT_IN_ASAN_STOREN_NOABORT
} } };
1534 if (size_in_bytes
== -1)
1537 return builtin_decl_implicit (check
[recover_p
][is_store
][5]);
1540 int size_log2
= exact_log2 (size_in_bytes
);
1541 return builtin_decl_implicit (check
[recover_p
][is_store
][size_log2
]);
1544 /* Split the current basic block and create a condition statement
1545 insertion point right before or after the statement pointed to by
1546 ITER. Return an iterator to the point at which the caller might
1547 safely insert the condition statement.
1549 THEN_BLOCK must be set to the address of an uninitialized instance
1550 of basic_block. The function will then set *THEN_BLOCK to the
1551 'then block' of the condition statement to be inserted by the
1554 If CREATE_THEN_FALLTHRU_EDGE is false, no edge will be created from
1555 *THEN_BLOCK to *FALLTHROUGH_BLOCK.
1557 Similarly, the function will set *FALLTRHOUGH_BLOCK to the 'else
1558 block' of the condition statement to be inserted by the caller.
1560 Note that *FALLTHROUGH_BLOCK is a new block that contains the
1561 statements starting from *ITER, and *THEN_BLOCK is a new empty
1564 *ITER is adjusted to point to always point to the first statement
1565 of the basic block * FALLTHROUGH_BLOCK. That statement is the
1566 same as what ITER was pointing to prior to calling this function,
1567 if BEFORE_P is true; otherwise, it is its following statement. */
1569 gimple_stmt_iterator
1570 create_cond_insert_point (gimple_stmt_iterator
*iter
,
1572 bool then_more_likely_p
,
1573 bool create_then_fallthru_edge
,
1574 basic_block
*then_block
,
1575 basic_block
*fallthrough_block
)
1577 gimple_stmt_iterator gsi
= *iter
;
1579 if (!gsi_end_p (gsi
) && before_p
)
1582 basic_block cur_bb
= gsi_bb (*iter
);
1584 edge e
= split_block (cur_bb
, gsi_stmt (gsi
));
1586 /* Get a hold on the 'condition block', the 'then block' and the
1588 basic_block cond_bb
= e
->src
;
1589 basic_block fallthru_bb
= e
->dest
;
1590 basic_block then_bb
= create_empty_bb (cond_bb
);
1593 add_bb_to_loop (then_bb
, cond_bb
->loop_father
);
1594 loops_state_set (LOOPS_NEED_FIXUP
);
1597 /* Set up the newly created 'then block'. */
1598 e
= make_edge (cond_bb
, then_bb
, EDGE_TRUE_VALUE
);
1599 int fallthrough_probability
1600 = then_more_likely_p
1601 ? PROB_VERY_UNLIKELY
1602 : PROB_ALWAYS
- PROB_VERY_UNLIKELY
;
1603 e
->probability
= PROB_ALWAYS
- fallthrough_probability
;
1604 if (create_then_fallthru_edge
)
1605 make_single_succ_edge (then_bb
, fallthru_bb
, EDGE_FALLTHRU
);
1607 /* Set up the fallthrough basic block. */
1608 e
= find_edge (cond_bb
, fallthru_bb
);
1609 e
->flags
= EDGE_FALSE_VALUE
;
1610 e
->count
= cond_bb
->count
;
1611 e
->probability
= fallthrough_probability
;
1613 /* Update dominance info for the newly created then_bb; note that
1614 fallthru_bb's dominance info has already been updated by
1616 if (dom_info_available_p (CDI_DOMINATORS
))
1617 set_immediate_dominator (CDI_DOMINATORS
, then_bb
, cond_bb
);
1619 *then_block
= then_bb
;
1620 *fallthrough_block
= fallthru_bb
;
1621 *iter
= gsi_start_bb (fallthru_bb
);
1623 return gsi_last_bb (cond_bb
);
1626 /* Insert an if condition followed by a 'then block' right before the
1627 statement pointed to by ITER. The fallthrough block -- which is the
1628 else block of the condition as well as the destination of the
1629 outcoming edge of the 'then block' -- starts with the statement
1632 COND is the condition of the if.
1634 If THEN_MORE_LIKELY_P is true, the probability of the edge to the
1635 'then block' is higher than the probability of the edge to the
1638 Upon completion of the function, *THEN_BB is set to the newly
1639 inserted 'then block' and similarly, *FALLTHROUGH_BB is set to the
1642 *ITER is adjusted to still point to the same statement it was
1643 pointing to initially. */
1646 insert_if_then_before_iter (gcond
*cond
,
1647 gimple_stmt_iterator
*iter
,
1648 bool then_more_likely_p
,
1649 basic_block
*then_bb
,
1650 basic_block
*fallthrough_bb
)
1652 gimple_stmt_iterator cond_insert_point
=
1653 create_cond_insert_point (iter
,
1656 /*create_then_fallthru_edge=*/true,
1659 gsi_insert_after (&cond_insert_point
, cond
, GSI_NEW_STMT
);
1662 /* Build (base_addr >> ASAN_SHADOW_SHIFT) + asan_shadow_offset ().
1663 If RETURN_ADDRESS is set to true, return memory location instread
1664 of a value in the shadow memory. */
1667 build_shadow_mem_access (gimple_stmt_iterator
*gsi
, location_t location
,
1668 tree base_addr
, tree shadow_ptr_type
,
1669 bool return_address
= false)
1671 tree t
, uintptr_type
= TREE_TYPE (base_addr
);
1672 tree shadow_type
= TREE_TYPE (shadow_ptr_type
);
1675 t
= build_int_cst (uintptr_type
, ASAN_SHADOW_SHIFT
);
1676 g
= gimple_build_assign (make_ssa_name (uintptr_type
), RSHIFT_EXPR
,
1678 gimple_set_location (g
, location
);
1679 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
1681 t
= build_int_cst (uintptr_type
, asan_shadow_offset ());
1682 g
= gimple_build_assign (make_ssa_name (uintptr_type
), PLUS_EXPR
,
1683 gimple_assign_lhs (g
), t
);
1684 gimple_set_location (g
, location
);
1685 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
1687 g
= gimple_build_assign (make_ssa_name (shadow_ptr_type
), NOP_EXPR
,
1688 gimple_assign_lhs (g
));
1689 gimple_set_location (g
, location
);
1690 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
1692 if (!return_address
)
1694 t
= build2 (MEM_REF
, shadow_type
, gimple_assign_lhs (g
),
1695 build_int_cst (shadow_ptr_type
, 0));
1696 g
= gimple_build_assign (make_ssa_name (shadow_type
), MEM_REF
, t
);
1697 gimple_set_location (g
, location
);
1698 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
1701 return gimple_assign_lhs (g
);
1704 /* BASE can already be an SSA_NAME; in that case, do not create a
1705 new SSA_NAME for it. */
1708 maybe_create_ssa_name (location_t loc
, tree base
, gimple_stmt_iterator
*iter
,
1711 if (TREE_CODE (base
) == SSA_NAME
)
1713 gimple
*g
= gimple_build_assign (make_ssa_name (TREE_TYPE (base
)),
1714 TREE_CODE (base
), base
);
1715 gimple_set_location (g
, loc
);
1717 gsi_insert_before (iter
, g
, GSI_SAME_STMT
);
1719 gsi_insert_after (iter
, g
, GSI_NEW_STMT
);
1720 return gimple_assign_lhs (g
);
1723 /* LEN can already have necessary size and precision;
1724 in that case, do not create a new variable. */
1727 maybe_cast_to_ptrmode (location_t loc
, tree len
, gimple_stmt_iterator
*iter
,
1730 if (ptrofftype_p (len
))
1732 gimple
*g
= gimple_build_assign (make_ssa_name (pointer_sized_int_node
),
1734 gimple_set_location (g
, loc
);
1736 gsi_insert_before (iter
, g
, GSI_SAME_STMT
);
1738 gsi_insert_after (iter
, g
, GSI_NEW_STMT
);
1739 return gimple_assign_lhs (g
);
1742 /* Instrument the memory access instruction BASE. Insert new
1743 statements before or after ITER.
1745 Note that the memory access represented by BASE can be either an
1746 SSA_NAME, or a non-SSA expression. LOCATION is the source code
1747 location. IS_STORE is TRUE for a store, FALSE for a load.
1748 BEFORE_P is TRUE for inserting the instrumentation code before
1749 ITER, FALSE for inserting it after ITER. IS_SCALAR_ACCESS is TRUE
1750 for a scalar memory access and FALSE for memory region access.
1751 NON_ZERO_P is TRUE if memory region is guaranteed to have non-zero
1752 length. ALIGN tells alignment of accessed memory object.
1754 START_INSTRUMENTED and END_INSTRUMENTED are TRUE if start/end of
1755 memory region have already been instrumented.
1757 If BEFORE_P is TRUE, *ITER is arranged to still point to the
1758 statement it was pointing to prior to calling this function,
1759 otherwise, it points to the statement logically following it. */
1762 build_check_stmt (location_t loc
, tree base
, tree len
,
1763 HOST_WIDE_INT size_in_bytes
, gimple_stmt_iterator
*iter
,
1764 bool is_non_zero_len
, bool before_p
, bool is_store
,
1765 bool is_scalar_access
, unsigned int align
= 0)
1767 gimple_stmt_iterator gsi
= *iter
;
1770 gcc_assert (!(size_in_bytes
> 0 && !is_non_zero_len
));
1774 base
= unshare_expr (base
);
1775 base
= maybe_create_ssa_name (loc
, base
, &gsi
, before_p
);
1779 len
= unshare_expr (len
);
1780 len
= maybe_cast_to_ptrmode (loc
, len
, iter
, before_p
);
1784 gcc_assert (size_in_bytes
!= -1);
1785 len
= build_int_cst (pointer_sized_int_node
, size_in_bytes
);
1788 if (size_in_bytes
> 1)
1790 if ((size_in_bytes
& (size_in_bytes
- 1)) != 0
1791 || size_in_bytes
> 16)
1792 is_scalar_access
= false;
1793 else if (align
&& align
< size_in_bytes
* BITS_PER_UNIT
)
1795 /* On non-strict alignment targets, if
1796 16-byte access is just 8-byte aligned,
1797 this will result in misaligned shadow
1798 memory 2 byte load, but otherwise can
1799 be handled using one read. */
1800 if (size_in_bytes
!= 16
1802 || align
< 8 * BITS_PER_UNIT
)
1803 is_scalar_access
= false;
1807 HOST_WIDE_INT flags
= 0;
1809 flags
|= ASAN_CHECK_STORE
;
1810 if (is_non_zero_len
)
1811 flags
|= ASAN_CHECK_NON_ZERO_LEN
;
1812 if (is_scalar_access
)
1813 flags
|= ASAN_CHECK_SCALAR_ACCESS
;
1815 g
= gimple_build_call_internal (IFN_ASAN_CHECK
, 4,
1816 build_int_cst (integer_type_node
, flags
),
1818 build_int_cst (integer_type_node
,
1819 align
/ BITS_PER_UNIT
));
1820 gimple_set_location (g
, loc
);
1822 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
1825 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
1831 /* If T represents a memory access, add instrumentation code before ITER.
1832 LOCATION is source code location.
1833 IS_STORE is either TRUE (for a store) or FALSE (for a load). */
1836 instrument_derefs (gimple_stmt_iterator
*iter
, tree t
,
1837 location_t location
, bool is_store
)
1839 if (is_store
&& !ASAN_INSTRUMENT_WRITES
)
1841 if (!is_store
&& !ASAN_INSTRUMENT_READS
)
1845 HOST_WIDE_INT size_in_bytes
;
1846 if (location
== UNKNOWN_LOCATION
)
1847 location
= EXPR_LOCATION (t
);
1849 type
= TREE_TYPE (t
);
1850 switch (TREE_CODE (t
))
1864 size_in_bytes
= int_size_in_bytes (type
);
1865 if (size_in_bytes
<= 0)
1868 HOST_WIDE_INT bitsize
, bitpos
;
1871 int unsignedp
, reversep
, volatilep
= 0;
1872 tree inner
= get_inner_reference (t
, &bitsize
, &bitpos
, &offset
, &mode
,
1873 &unsignedp
, &reversep
, &volatilep
);
1875 if (TREE_CODE (t
) == COMPONENT_REF
1876 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t
, 1)) != NULL_TREE
)
1878 tree repr
= DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t
, 1));
1879 instrument_derefs (iter
, build3 (COMPONENT_REF
, TREE_TYPE (repr
),
1880 TREE_OPERAND (t
, 0), repr
,
1881 NULL_TREE
), location
, is_store
);
1885 if (bitpos
% BITS_PER_UNIT
1886 || bitsize
!= size_in_bytes
* BITS_PER_UNIT
)
1890 && offset
== NULL_TREE
1892 && DECL_SIZE (inner
)
1893 && tree_fits_shwi_p (DECL_SIZE (inner
))
1894 && bitpos
+ bitsize
<= tree_to_shwi (DECL_SIZE (inner
)))
1896 if (DECL_THREAD_LOCAL_P (inner
))
1898 if (!ASAN_GLOBALS
&& is_global_var (inner
))
1900 if (!TREE_STATIC (inner
))
1902 /* Automatic vars in the current function will be always
1904 if (decl_function_context (inner
) == current_function_decl
1905 && (!asan_sanitize_use_after_scope ()
1906 || !TREE_ADDRESSABLE (inner
)))
1909 /* Always instrument external vars, they might be dynamically
1911 else if (!DECL_EXTERNAL (inner
))
1913 /* For static vars if they are known not to be dynamically
1914 initialized, they will be always accessible. */
1915 varpool_node
*vnode
= varpool_node::get (inner
);
1916 if (vnode
&& !vnode
->dynamically_initialized
)
1921 base
= build_fold_addr_expr (t
);
1922 if (!has_mem_ref_been_instrumented (base
, size_in_bytes
))
1924 unsigned int align
= get_object_alignment (t
);
1925 build_check_stmt (location
, base
, NULL_TREE
, size_in_bytes
, iter
,
1926 /*is_non_zero_len*/size_in_bytes
> 0, /*before_p=*/true,
1927 is_store
, /*is_scalar_access*/true, align
);
1928 update_mem_ref_hash_table (base
, size_in_bytes
);
1929 update_mem_ref_hash_table (t
, size_in_bytes
);
1934 /* Insert a memory reference into the hash table if access length
1935 can be determined in compile time. */
1938 maybe_update_mem_ref_hash_table (tree base
, tree len
)
1940 if (!POINTER_TYPE_P (TREE_TYPE (base
))
1941 || !INTEGRAL_TYPE_P (TREE_TYPE (len
)))
1944 HOST_WIDE_INT size_in_bytes
= tree_fits_shwi_p (len
) ? tree_to_shwi (len
) : -1;
1946 if (size_in_bytes
!= -1)
1947 update_mem_ref_hash_table (base
, size_in_bytes
);
1950 /* Instrument an access to a contiguous memory region that starts at
1951 the address pointed to by BASE, over a length of LEN (expressed in
1952 the sizeof (*BASE) bytes). ITER points to the instruction before
1953 which the instrumentation instructions must be inserted. LOCATION
1954 is the source location that the instrumentation instructions must
1955 have. If IS_STORE is true, then the memory access is a store;
1956 otherwise, it's a load. */
1959 instrument_mem_region_access (tree base
, tree len
,
1960 gimple_stmt_iterator
*iter
,
1961 location_t location
, bool is_store
)
1963 if (!POINTER_TYPE_P (TREE_TYPE (base
))
1964 || !INTEGRAL_TYPE_P (TREE_TYPE (len
))
1965 || integer_zerop (len
))
1968 HOST_WIDE_INT size_in_bytes
= tree_fits_shwi_p (len
) ? tree_to_shwi (len
) : -1;
1970 if ((size_in_bytes
== -1)
1971 || !has_mem_ref_been_instrumented (base
, size_in_bytes
))
1973 build_check_stmt (location
, base
, len
, size_in_bytes
, iter
,
1974 /*is_non_zero_len*/size_in_bytes
> 0, /*before_p*/true,
1975 is_store
, /*is_scalar_access*/false, /*align*/0);
1978 maybe_update_mem_ref_hash_table (base
, len
);
1979 *iter
= gsi_for_stmt (gsi_stmt (*iter
));
1982 /* Instrument the call to a built-in memory access function that is
1983 pointed to by the iterator ITER.
1985 Upon completion, return TRUE iff *ITER has been advanced to the
1986 statement following the one it was originally pointing to. */
1989 instrument_builtin_call (gimple_stmt_iterator
*iter
)
1991 if (!ASAN_MEMINTRIN
)
1994 bool iter_advanced_p
= false;
1995 gcall
*call
= as_a
<gcall
*> (gsi_stmt (*iter
));
1997 gcc_checking_assert (gimple_call_builtin_p (call
, BUILT_IN_NORMAL
));
1999 location_t loc
= gimple_location (call
);
2001 asan_mem_ref src0
, src1
, dest
;
2002 asan_mem_ref_init (&src0
, NULL
, 1);
2003 asan_mem_ref_init (&src1
, NULL
, 1);
2004 asan_mem_ref_init (&dest
, NULL
, 1);
2006 tree src0_len
= NULL_TREE
, src1_len
= NULL_TREE
, dest_len
= NULL_TREE
;
2007 bool src0_is_store
= false, src1_is_store
= false, dest_is_store
= false,
2008 dest_is_deref
= false, intercepted_p
= true;
2010 if (get_mem_refs_of_builtin_call (call
,
2011 &src0
, &src0_len
, &src0_is_store
,
2012 &src1
, &src1_len
, &src1_is_store
,
2013 &dest
, &dest_len
, &dest_is_store
,
2014 &dest_is_deref
, &intercepted_p
))
2018 instrument_derefs (iter
, dest
.start
, loc
, dest_is_store
);
2020 iter_advanced_p
= true;
2022 else if (!intercepted_p
2023 && (src0_len
|| src1_len
|| dest_len
))
2025 if (src0
.start
!= NULL_TREE
)
2026 instrument_mem_region_access (src0
.start
, src0_len
,
2027 iter
, loc
, /*is_store=*/false);
2028 if (src1
.start
!= NULL_TREE
)
2029 instrument_mem_region_access (src1
.start
, src1_len
,
2030 iter
, loc
, /*is_store=*/false);
2031 if (dest
.start
!= NULL_TREE
)
2032 instrument_mem_region_access (dest
.start
, dest_len
,
2033 iter
, loc
, /*is_store=*/true);
2035 *iter
= gsi_for_stmt (call
);
2037 iter_advanced_p
= true;
2041 if (src0
.start
!= NULL_TREE
)
2042 maybe_update_mem_ref_hash_table (src0
.start
, src0_len
);
2043 if (src1
.start
!= NULL_TREE
)
2044 maybe_update_mem_ref_hash_table (src1
.start
, src1_len
);
2045 if (dest
.start
!= NULL_TREE
)
2046 maybe_update_mem_ref_hash_table (dest
.start
, dest_len
);
2049 return iter_advanced_p
;
2052 /* Instrument the assignment statement ITER if it is subject to
2053 instrumentation. Return TRUE iff instrumentation actually
2054 happened. In that case, the iterator ITER is advanced to the next
2055 logical expression following the one initially pointed to by ITER,
2056 and the relevant memory reference that which access has been
2057 instrumented is added to the memory references hash table. */
2060 maybe_instrument_assignment (gimple_stmt_iterator
*iter
)
2062 gimple
*s
= gsi_stmt (*iter
);
2064 gcc_assert (gimple_assign_single_p (s
));
2066 tree ref_expr
= NULL_TREE
;
2067 bool is_store
, is_instrumented
= false;
2069 if (gimple_store_p (s
))
2071 ref_expr
= gimple_assign_lhs (s
);
2073 instrument_derefs (iter
, ref_expr
,
2074 gimple_location (s
),
2076 is_instrumented
= true;
2079 if (gimple_assign_load_p (s
))
2081 ref_expr
= gimple_assign_rhs1 (s
);
2083 instrument_derefs (iter
, ref_expr
,
2084 gimple_location (s
),
2086 is_instrumented
= true;
2089 if (is_instrumented
)
2092 return is_instrumented
;
2095 /* Instrument the function call pointed to by the iterator ITER, if it
2096 is subject to instrumentation. At the moment, the only function
2097 calls that are instrumented are some built-in functions that access
2098 memory. Look at instrument_builtin_call to learn more.
2100 Upon completion return TRUE iff *ITER was advanced to the statement
2101 following the one it was originally pointing to. */
2104 maybe_instrument_call (gimple_stmt_iterator
*iter
)
2106 gimple
*stmt
= gsi_stmt (*iter
);
2107 bool is_builtin
= gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
);
2109 if (is_builtin
&& instrument_builtin_call (iter
))
2112 if (gimple_call_noreturn_p (stmt
))
2116 tree callee
= gimple_call_fndecl (stmt
);
2117 switch (DECL_FUNCTION_CODE (callee
))
2119 case BUILT_IN_UNREACHABLE
:
2121 /* Don't instrument these. */
2127 tree decl
= builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN
);
2128 gimple
*g
= gimple_build_call (decl
, 0);
2129 gimple_set_location (g
, gimple_location (stmt
));
2130 gsi_insert_before (iter
, g
, GSI_SAME_STMT
);
2133 bool instrumented
= false;
2134 if (gimple_store_p (stmt
))
2136 tree ref_expr
= gimple_call_lhs (stmt
);
2137 instrument_derefs (iter
, ref_expr
,
2138 gimple_location (stmt
),
2141 instrumented
= true;
2144 /* Walk through gimple_call arguments and check them id needed. */
2145 unsigned args_num
= gimple_call_num_args (stmt
);
2146 for (unsigned i
= 0; i
< args_num
; ++i
)
2148 tree arg
= gimple_call_arg (stmt
, i
);
2149 /* If ARG is not a non-aggregate register variable, compiler in general
2150 creates temporary for it and pass it as argument to gimple call.
2151 But in some cases, e.g. when we pass by value a small structure that
2152 fits to register, compiler can avoid extra overhead by pulling out
2153 these temporaries. In this case, we should check the argument. */
2154 if (!is_gimple_reg (arg
) && !is_gimple_min_invariant (arg
))
2156 instrument_derefs (iter
, arg
,
2157 gimple_location (stmt
),
2158 /*is_store=*/false);
2159 instrumented
= true;
2164 return instrumented
;
2167 /* Walk each instruction of all basic block and instrument those that
2168 represent memory references: loads, stores, or function calls.
2169 In a given basic block, this function avoids instrumenting memory
2170 references that have already been instrumented. */
2173 transform_statements (void)
2175 basic_block bb
, last_bb
= NULL
;
2176 gimple_stmt_iterator i
;
2177 int saved_last_basic_block
= last_basic_block_for_fn (cfun
);
2179 FOR_EACH_BB_FN (bb
, cfun
)
2181 basic_block prev_bb
= bb
;
2183 if (bb
->index
>= saved_last_basic_block
) continue;
2185 /* Flush the mem ref hash table, if current bb doesn't have
2186 exactly one predecessor, or if that predecessor (skipping
2187 over asan created basic blocks) isn't the last processed
2188 basic block. Thus we effectively flush on extended basic
2189 block boundaries. */
2190 while (single_pred_p (prev_bb
))
2192 prev_bb
= single_pred (prev_bb
);
2193 if (prev_bb
->index
< saved_last_basic_block
)
2196 if (prev_bb
!= last_bb
)
2197 empty_mem_ref_hash_table ();
2200 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
);)
2202 gimple
*s
= gsi_stmt (i
);
2204 if (has_stmt_been_instrumented_p (s
))
2206 else if (gimple_assign_single_p (s
)
2207 && !gimple_clobber_p (s
)
2208 && maybe_instrument_assignment (&i
))
2209 /* Nothing to do as maybe_instrument_assignment advanced
2211 else if (is_gimple_call (s
) && maybe_instrument_call (&i
))
2212 /* Nothing to do as maybe_instrument_call
2213 advanced the iterator I. */;
2216 /* No instrumentation happened.
2218 If the current instruction is a function call that
2219 might free something, let's forget about the memory
2220 references that got instrumented. Otherwise we might
2221 miss some instrumentation opportunities. Do the same
2222 for a ASAN_MARK poisoning internal function. */
2223 if (is_gimple_call (s
)
2224 && (!nonfreeing_call_p (s
)
2225 || asan_mark_p (s
, ASAN_MARK_POISON
)))
2226 empty_mem_ref_hash_table ();
2232 free_mem_ref_resources ();
2236 __asan_before_dynamic_init (module_name)
2238 __asan_after_dynamic_init ()
2242 asan_dynamic_init_call (bool after_p
)
2244 if (shadow_ptr_types
[0] == NULL_TREE
)
2245 asan_init_shadow_ptr_types ();
2247 tree fn
= builtin_decl_implicit (after_p
2248 ? BUILT_IN_ASAN_AFTER_DYNAMIC_INIT
2249 : BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT
);
2250 tree module_name_cst
= NULL_TREE
;
2253 pretty_printer module_name_pp
;
2254 pp_string (&module_name_pp
, main_input_filename
);
2256 module_name_cst
= asan_pp_string (&module_name_pp
);
2257 module_name_cst
= fold_convert (const_ptr_type_node
,
2261 return build_call_expr (fn
, after_p
? 0 : 1, module_name_cst
);
2265 struct __asan_global
2269 uptr __size_with_redzone;
2271 const void *__module_name;
2272 uptr __has_dynamic_init;
2273 __asan_global_source_location *__location;
2274 char *__odr_indicator;
2278 asan_global_struct (void)
2280 static const char *field_names
[]
2281 = { "__beg", "__size", "__size_with_redzone",
2282 "__name", "__module_name", "__has_dynamic_init", "__location",
2283 "__odr_indicator" };
2284 tree fields
[ARRAY_SIZE (field_names
)], ret
;
2287 ret
= make_node (RECORD_TYPE
);
2288 for (i
= 0; i
< ARRAY_SIZE (field_names
); i
++)
2291 = build_decl (UNKNOWN_LOCATION
, FIELD_DECL
,
2292 get_identifier (field_names
[i
]),
2293 (i
== 0 || i
== 3) ? const_ptr_type_node
2294 : pointer_sized_int_node
);
2295 DECL_CONTEXT (fields
[i
]) = ret
;
2297 DECL_CHAIN (fields
[i
- 1]) = fields
[i
];
2299 tree type_decl
= build_decl (input_location
, TYPE_DECL
,
2300 get_identifier ("__asan_global"), ret
);
2301 DECL_IGNORED_P (type_decl
) = 1;
2302 DECL_ARTIFICIAL (type_decl
) = 1;
2303 TYPE_FIELDS (ret
) = fields
[0];
2304 TYPE_NAME (ret
) = type_decl
;
2305 TYPE_STUB_DECL (ret
) = type_decl
;
2310 /* Create and return odr indicator symbol for DECL.
2311 TYPE is __asan_global struct type as returned by asan_global_struct. */
2314 create_odr_indicator (tree decl
, tree type
)
2317 tree uptr
= TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type
)));
2319 = (HAS_DECL_ASSEMBLER_NAME_P (decl
) ? DECL_ASSEMBLER_NAME (decl
)
2320 : DECL_NAME (decl
));
2321 /* DECL_NAME theoretically might be NULL. Bail out with 0 in this case. */
2322 if (decl_name
== NULL_TREE
)
2323 return build_int_cst (uptr
, 0);
2324 size_t len
= strlen (IDENTIFIER_POINTER (decl_name
)) + sizeof ("__odr_asan_");
2325 name
= XALLOCAVEC (char, len
);
2326 snprintf (name
, len
, "__odr_asan_%s", IDENTIFIER_POINTER (decl_name
));
2327 #ifndef NO_DOT_IN_LABEL
2328 name
[sizeof ("__odr_asan") - 1] = '.';
2329 #elif !defined(NO_DOLLAR_IN_LABEL)
2330 name
[sizeof ("__odr_asan") - 1] = '$';
2332 tree var
= build_decl (UNKNOWN_LOCATION
, VAR_DECL
, get_identifier (name
),
2334 TREE_ADDRESSABLE (var
) = 1;
2335 TREE_READONLY (var
) = 0;
2336 TREE_THIS_VOLATILE (var
) = 1;
2337 DECL_GIMPLE_REG_P (var
) = 0;
2338 DECL_ARTIFICIAL (var
) = 1;
2339 DECL_IGNORED_P (var
) = 1;
2340 TREE_STATIC (var
) = 1;
2341 TREE_PUBLIC (var
) = 1;
2342 DECL_VISIBILITY (var
) = DECL_VISIBILITY (decl
);
2343 DECL_VISIBILITY_SPECIFIED (var
) = DECL_VISIBILITY_SPECIFIED (decl
);
2345 TREE_USED (var
) = 1;
2346 tree ctor
= build_constructor_va (TREE_TYPE (var
), 1, NULL_TREE
,
2347 build_int_cst (unsigned_type_node
, 0));
2348 TREE_CONSTANT (ctor
) = 1;
2349 TREE_STATIC (ctor
) = 1;
2350 DECL_INITIAL (var
) = ctor
;
2351 DECL_ATTRIBUTES (var
) = tree_cons (get_identifier ("asan odr indicator"),
2352 NULL
, DECL_ATTRIBUTES (var
));
2353 make_decl_rtl (var
);
2354 varpool_node::finalize_decl (var
);
2355 return fold_convert (uptr
, build_fold_addr_expr (var
));
2358 /* Return true if DECL, a global var, might be overridden and needs
2359 an additional odr indicator symbol. */
2362 asan_needs_odr_indicator_p (tree decl
)
2364 /* Don't emit ODR indicators for kernel because:
2365 a) Kernel is written in C thus doesn't need ODR indicators.
2366 b) Some kernel code may have assumptions about symbols containing specific
2367 patterns in their names. Since ODR indicators contain original names
2368 of symbols they are emitted for, these assumptions would be broken for
2369 ODR indicator symbols. */
2370 return (!(flag_sanitize
& SANITIZE_KERNEL_ADDRESS
)
2371 && !DECL_ARTIFICIAL (decl
)
2372 && !DECL_WEAK (decl
)
2373 && TREE_PUBLIC (decl
));
2376 /* Append description of a single global DECL into vector V.
2377 TYPE is __asan_global struct type as returned by asan_global_struct. */
2380 asan_add_global (tree decl
, tree type
, vec
<constructor_elt
, va_gc
> *v
)
2382 tree init
, uptr
= TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type
)));
2383 unsigned HOST_WIDE_INT size
;
2384 tree str_cst
, module_name_cst
, refdecl
= decl
;
2385 vec
<constructor_elt
, va_gc
> *vinner
= NULL
;
2387 pretty_printer asan_pp
, module_name_pp
;
2389 if (DECL_NAME (decl
))
2390 pp_tree_identifier (&asan_pp
, DECL_NAME (decl
));
2392 pp_string (&asan_pp
, "<unknown>");
2393 str_cst
= asan_pp_string (&asan_pp
);
2395 pp_string (&module_name_pp
, main_input_filename
);
2396 module_name_cst
= asan_pp_string (&module_name_pp
);
2398 if (asan_needs_local_alias (decl
))
2401 ASM_GENERATE_INTERNAL_LABEL (buf
, "LASAN", vec_safe_length (v
) + 1);
2402 refdecl
= build_decl (DECL_SOURCE_LOCATION (decl
),
2403 VAR_DECL
, get_identifier (buf
), TREE_TYPE (decl
));
2404 TREE_ADDRESSABLE (refdecl
) = TREE_ADDRESSABLE (decl
);
2405 TREE_READONLY (refdecl
) = TREE_READONLY (decl
);
2406 TREE_THIS_VOLATILE (refdecl
) = TREE_THIS_VOLATILE (decl
);
2407 DECL_GIMPLE_REG_P (refdecl
) = DECL_GIMPLE_REG_P (decl
);
2408 DECL_ARTIFICIAL (refdecl
) = DECL_ARTIFICIAL (decl
);
2409 DECL_IGNORED_P (refdecl
) = DECL_IGNORED_P (decl
);
2410 TREE_STATIC (refdecl
) = 1;
2411 TREE_PUBLIC (refdecl
) = 0;
2412 TREE_USED (refdecl
) = 1;
2413 assemble_alias (refdecl
, DECL_ASSEMBLER_NAME (decl
));
2416 tree odr_indicator_ptr
2417 = (asan_needs_odr_indicator_p (decl
) ? create_odr_indicator (decl
, type
)
2418 : build_int_cst (uptr
, 0));
2419 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
,
2420 fold_convert (const_ptr_type_node
,
2421 build_fold_addr_expr (refdecl
)));
2422 size
= tree_to_uhwi (DECL_SIZE_UNIT (decl
));
2423 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
, build_int_cst (uptr
, size
));
2424 size
+= asan_red_zone_size (size
);
2425 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
, build_int_cst (uptr
, size
));
2426 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
,
2427 fold_convert (const_ptr_type_node
, str_cst
));
2428 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
,
2429 fold_convert (const_ptr_type_node
, module_name_cst
));
2430 varpool_node
*vnode
= varpool_node::get (decl
);
2431 int has_dynamic_init
= 0;
2432 /* FIXME: Enable initialization order fiasco detection in LTO mode once
2433 proper fix for PR 79061 will be applied. */
2435 has_dynamic_init
= vnode
? vnode
->dynamically_initialized
: 0;
2436 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
,
2437 build_int_cst (uptr
, has_dynamic_init
));
2438 tree locptr
= NULL_TREE
;
2439 location_t loc
= DECL_SOURCE_LOCATION (decl
);
2440 expanded_location xloc
= expand_location (loc
);
2441 if (xloc
.file
!= NULL
)
2443 static int lasanloccnt
= 0;
2445 ASM_GENERATE_INTERNAL_LABEL (buf
, "LASANLOC", ++lasanloccnt
);
2446 tree var
= build_decl (UNKNOWN_LOCATION
, VAR_DECL
, get_identifier (buf
),
2447 ubsan_get_source_location_type ());
2448 TREE_STATIC (var
) = 1;
2449 TREE_PUBLIC (var
) = 0;
2450 DECL_ARTIFICIAL (var
) = 1;
2451 DECL_IGNORED_P (var
) = 1;
2452 pretty_printer filename_pp
;
2453 pp_string (&filename_pp
, xloc
.file
);
2454 tree str
= asan_pp_string (&filename_pp
);
2455 tree ctor
= build_constructor_va (TREE_TYPE (var
), 3,
2456 NULL_TREE
, str
, NULL_TREE
,
2457 build_int_cst (unsigned_type_node
,
2458 xloc
.line
), NULL_TREE
,
2459 build_int_cst (unsigned_type_node
,
2461 TREE_CONSTANT (ctor
) = 1;
2462 TREE_STATIC (ctor
) = 1;
2463 DECL_INITIAL (var
) = ctor
;
2464 varpool_node::finalize_decl (var
);
2465 locptr
= fold_convert (uptr
, build_fold_addr_expr (var
));
2468 locptr
= build_int_cst (uptr
, 0);
2469 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
, locptr
);
2470 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
, odr_indicator_ptr
);
2471 init
= build_constructor (type
, vinner
);
2472 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, init
);
2475 /* Initialize sanitizer.def builtins if the FE hasn't initialized them. */
2477 initialize_sanitizer_builtins (void)
2481 if (builtin_decl_implicit_p (BUILT_IN_ASAN_INIT
))
2484 tree BT_FN_VOID
= build_function_type_list (void_type_node
, NULL_TREE
);
2486 = build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
2487 tree BT_FN_VOID_CONST_PTR
2488 = build_function_type_list (void_type_node
, const_ptr_type_node
, NULL_TREE
);
2489 tree BT_FN_VOID_PTR_PTR
2490 = build_function_type_list (void_type_node
, ptr_type_node
,
2491 ptr_type_node
, NULL_TREE
);
2492 tree BT_FN_VOID_PTR_PTR_PTR
2493 = build_function_type_list (void_type_node
, ptr_type_node
,
2494 ptr_type_node
, ptr_type_node
, NULL_TREE
);
2495 tree BT_FN_VOID_PTR_PTRMODE
2496 = build_function_type_list (void_type_node
, ptr_type_node
,
2497 pointer_sized_int_node
, NULL_TREE
);
2499 = build_function_type_list (void_type_node
, integer_type_node
, NULL_TREE
);
2500 tree BT_FN_SIZE_CONST_PTR_INT
2501 = build_function_type_list (size_type_node
, const_ptr_type_node
,
2502 integer_type_node
, NULL_TREE
);
2503 tree BT_FN_BOOL_VPTR_PTR_IX_INT_INT
[5];
2504 tree BT_FN_IX_CONST_VPTR_INT
[5];
2505 tree BT_FN_IX_VPTR_IX_INT
[5];
2506 tree BT_FN_VOID_VPTR_IX_INT
[5];
2508 = build_pointer_type (build_qualified_type (void_type_node
,
2509 TYPE_QUAL_VOLATILE
));
2511 = build_pointer_type (build_qualified_type (void_type_node
,
2515 = lang_hooks
.types
.type_for_size (BOOL_TYPE_SIZE
, 1);
2517 for (i
= 0; i
< 5; i
++)
2519 tree ix
= build_nonstandard_integer_type (BITS_PER_UNIT
* (1 << i
), 1);
2520 BT_FN_BOOL_VPTR_PTR_IX_INT_INT
[i
]
2521 = build_function_type_list (boolt
, vptr
, ptr_type_node
, ix
,
2522 integer_type_node
, integer_type_node
,
2524 BT_FN_IX_CONST_VPTR_INT
[i
]
2525 = build_function_type_list (ix
, cvptr
, integer_type_node
, NULL_TREE
);
2526 BT_FN_IX_VPTR_IX_INT
[i
]
2527 = build_function_type_list (ix
, vptr
, ix
, integer_type_node
,
2529 BT_FN_VOID_VPTR_IX_INT
[i
]
2530 = build_function_type_list (void_type_node
, vptr
, ix
,
2531 integer_type_node
, NULL_TREE
);
2533 #define BT_FN_BOOL_VPTR_PTR_I1_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[0]
2534 #define BT_FN_I1_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[0]
2535 #define BT_FN_I1_VPTR_I1_INT BT_FN_IX_VPTR_IX_INT[0]
2536 #define BT_FN_VOID_VPTR_I1_INT BT_FN_VOID_VPTR_IX_INT[0]
2537 #define BT_FN_BOOL_VPTR_PTR_I2_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[1]
2538 #define BT_FN_I2_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[1]
2539 #define BT_FN_I2_VPTR_I2_INT BT_FN_IX_VPTR_IX_INT[1]
2540 #define BT_FN_VOID_VPTR_I2_INT BT_FN_VOID_VPTR_IX_INT[1]
2541 #define BT_FN_BOOL_VPTR_PTR_I4_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[2]
2542 #define BT_FN_I4_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[2]
2543 #define BT_FN_I4_VPTR_I4_INT BT_FN_IX_VPTR_IX_INT[2]
2544 #define BT_FN_VOID_VPTR_I4_INT BT_FN_VOID_VPTR_IX_INT[2]
2545 #define BT_FN_BOOL_VPTR_PTR_I8_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[3]
2546 #define BT_FN_I8_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[3]
2547 #define BT_FN_I8_VPTR_I8_INT BT_FN_IX_VPTR_IX_INT[3]
2548 #define BT_FN_VOID_VPTR_I8_INT BT_FN_VOID_VPTR_IX_INT[3]
2549 #define BT_FN_BOOL_VPTR_PTR_I16_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[4]
2550 #define BT_FN_I16_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[4]
2551 #define BT_FN_I16_VPTR_I16_INT BT_FN_IX_VPTR_IX_INT[4]
2552 #define BT_FN_VOID_VPTR_I16_INT BT_FN_VOID_VPTR_IX_INT[4]
2553 #undef ATTR_NOTHROW_LEAF_LIST
2554 #define ATTR_NOTHROW_LEAF_LIST ECF_NOTHROW | ECF_LEAF
2555 #undef ATTR_TMPURE_NOTHROW_LEAF_LIST
2556 #define ATTR_TMPURE_NOTHROW_LEAF_LIST ECF_TM_PURE | ATTR_NOTHROW_LEAF_LIST
2557 #undef ATTR_NORETURN_NOTHROW_LEAF_LIST
2558 #define ATTR_NORETURN_NOTHROW_LEAF_LIST ECF_NORETURN | ATTR_NOTHROW_LEAF_LIST
2559 #undef ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
2560 #define ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST \
2561 ECF_CONST | ATTR_NORETURN_NOTHROW_LEAF_LIST
2562 #undef ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST
2563 #define ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST \
2564 ECF_TM_PURE | ATTR_NORETURN_NOTHROW_LEAF_LIST
2565 #undef ATTR_COLD_NOTHROW_LEAF_LIST
2566 #define ATTR_COLD_NOTHROW_LEAF_LIST \
2567 /* ECF_COLD missing */ ATTR_NOTHROW_LEAF_LIST
2568 #undef ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST
2569 #define ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST \
2570 /* ECF_COLD missing */ ATTR_NORETURN_NOTHROW_LEAF_LIST
2571 #undef ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST
2572 #define ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST \
2573 /* ECF_COLD missing */ ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
2574 #undef ATTR_PURE_NOTHROW_LEAF_LIST
2575 #define ATTR_PURE_NOTHROW_LEAF_LIST ECF_PURE | ATTR_NOTHROW_LEAF_LIST
2576 #undef DEF_BUILTIN_STUB
2577 #define DEF_BUILTIN_STUB(ENUM, NAME)
2578 #undef DEF_SANITIZER_BUILTIN
2579 #define DEF_SANITIZER_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
2580 decl = add_builtin_function ("__builtin_" NAME, TYPE, ENUM, \
2581 BUILT_IN_NORMAL, NAME, NULL_TREE); \
2582 set_call_expr_flags (decl, ATTRS); \
2583 set_builtin_decl (ENUM, decl, true);
2585 #include "sanitizer.def"
2587 /* -fsanitize=object-size uses __builtin_object_size, but that might
2588 not be available for e.g. Fortran at this point. We use
2589 DEF_SANITIZER_BUILTIN here only as a convenience macro. */
2590 if ((flag_sanitize
& SANITIZE_OBJECT_SIZE
)
2591 && !builtin_decl_implicit_p (BUILT_IN_OBJECT_SIZE
))
2592 DEF_SANITIZER_BUILTIN (BUILT_IN_OBJECT_SIZE
, "object_size",
2593 BT_FN_SIZE_CONST_PTR_INT
,
2594 ATTR_PURE_NOTHROW_LEAF_LIST
)
2596 #undef DEF_SANITIZER_BUILTIN
2597 #undef DEF_BUILTIN_STUB
2600 /* Called via htab_traverse. Count number of emitted
2601 STRING_CSTs in the constant hash table. */
2604 count_string_csts (constant_descriptor_tree
**slot
,
2605 unsigned HOST_WIDE_INT
*data
)
2607 struct constant_descriptor_tree
*desc
= *slot
;
2608 if (TREE_CODE (desc
->value
) == STRING_CST
2609 && TREE_ASM_WRITTEN (desc
->value
)
2610 && asan_protect_global (desc
->value
))
2615 /* Helper structure to pass two parameters to
2618 struct asan_add_string_csts_data
2621 vec
<constructor_elt
, va_gc
> *v
;
2624 /* Called via hash_table::traverse. Call asan_add_global
2625 on emitted STRING_CSTs from the constant hash table. */
2628 add_string_csts (constant_descriptor_tree
**slot
,
2629 asan_add_string_csts_data
*aascd
)
2631 struct constant_descriptor_tree
*desc
= *slot
;
2632 if (TREE_CODE (desc
->value
) == STRING_CST
2633 && TREE_ASM_WRITTEN (desc
->value
)
2634 && asan_protect_global (desc
->value
))
2636 asan_add_global (SYMBOL_REF_DECL (XEXP (desc
->rtl
, 0)),
2637 aascd
->type
, aascd
->v
);
2642 /* Needs to be GTY(()), because cgraph_build_static_cdtor may
2643 invoke ggc_collect. */
2644 static GTY(()) tree asan_ctor_statements
;
2646 /* Module-level instrumentation.
2647 - Insert __asan_init_vN() into the list of CTORs.
2648 - TODO: insert redzones around globals.
2652 asan_finish_file (void)
2654 varpool_node
*vnode
;
2655 unsigned HOST_WIDE_INT gcount
= 0;
2657 if (shadow_ptr_types
[0] == NULL_TREE
)
2658 asan_init_shadow_ptr_types ();
2659 /* Avoid instrumenting code in the asan ctors/dtors.
2660 We don't need to insert padding after the description strings,
2661 nor after .LASAN* array. */
2662 flag_sanitize
&= ~SANITIZE_ADDRESS
;
2664 /* For user-space we want asan constructors to run first.
2665 Linux kernel does not support priorities other than default, and the only
2666 other user of constructors is coverage. So we run with the default
2668 int priority
= flag_sanitize
& SANITIZE_USER_ADDRESS
2669 ? MAX_RESERVED_INIT_PRIORITY
- 1 : DEFAULT_INIT_PRIORITY
;
2671 if (flag_sanitize
& SANITIZE_USER_ADDRESS
)
2673 tree fn
= builtin_decl_implicit (BUILT_IN_ASAN_INIT
);
2674 append_to_statement_list (build_call_expr (fn
, 0), &asan_ctor_statements
);
2675 fn
= builtin_decl_implicit (BUILT_IN_ASAN_VERSION_MISMATCH_CHECK
);
2676 append_to_statement_list (build_call_expr (fn
, 0), &asan_ctor_statements
);
2678 FOR_EACH_DEFINED_VARIABLE (vnode
)
2679 if (TREE_ASM_WRITTEN (vnode
->decl
)
2680 && asan_protect_global (vnode
->decl
))
2682 hash_table
<tree_descriptor_hasher
> *const_desc_htab
= constant_pool_htab ();
2683 const_desc_htab
->traverse
<unsigned HOST_WIDE_INT
*, count_string_csts
>
2687 tree type
= asan_global_struct (), var
, ctor
;
2688 tree dtor_statements
= NULL_TREE
;
2689 vec
<constructor_elt
, va_gc
> *v
;
2692 type
= build_array_type_nelts (type
, gcount
);
2693 ASM_GENERATE_INTERNAL_LABEL (buf
, "LASAN", 0);
2694 var
= build_decl (UNKNOWN_LOCATION
, VAR_DECL
, get_identifier (buf
),
2696 TREE_STATIC (var
) = 1;
2697 TREE_PUBLIC (var
) = 0;
2698 DECL_ARTIFICIAL (var
) = 1;
2699 DECL_IGNORED_P (var
) = 1;
2700 vec_alloc (v
, gcount
);
2701 FOR_EACH_DEFINED_VARIABLE (vnode
)
2702 if (TREE_ASM_WRITTEN (vnode
->decl
)
2703 && asan_protect_global (vnode
->decl
))
2704 asan_add_global (vnode
->decl
, TREE_TYPE (type
), v
);
2705 struct asan_add_string_csts_data aascd
;
2706 aascd
.type
= TREE_TYPE (type
);
2708 const_desc_htab
->traverse
<asan_add_string_csts_data
*, add_string_csts
>
2710 ctor
= build_constructor (type
, v
);
2711 TREE_CONSTANT (ctor
) = 1;
2712 TREE_STATIC (ctor
) = 1;
2713 DECL_INITIAL (var
) = ctor
;
2714 varpool_node::finalize_decl (var
);
2716 tree fn
= builtin_decl_implicit (BUILT_IN_ASAN_REGISTER_GLOBALS
);
2717 tree gcount_tree
= build_int_cst (pointer_sized_int_node
, gcount
);
2718 append_to_statement_list (build_call_expr (fn
, 2,
2719 build_fold_addr_expr (var
),
2721 &asan_ctor_statements
);
2723 fn
= builtin_decl_implicit (BUILT_IN_ASAN_UNREGISTER_GLOBALS
);
2724 append_to_statement_list (build_call_expr (fn
, 2,
2725 build_fold_addr_expr (var
),
2728 cgraph_build_static_cdtor ('D', dtor_statements
, priority
);
2730 if (asan_ctor_statements
)
2731 cgraph_build_static_cdtor ('I', asan_ctor_statements
, priority
);
2732 flag_sanitize
|= SANITIZE_ADDRESS
;
2735 /* Poison or unpoison (depending on IS_CLOBBER variable) shadow memory based
2736 on SHADOW address. Newly added statements will be added to ITER with
2737 given location LOC. We mark SIZE bytes in shadow memory, where
2738 LAST_CHUNK_SIZE is greater than zero in situation where we are at the
2739 end of a variable. */
2742 asan_store_shadow_bytes (gimple_stmt_iterator
*iter
, location_t loc
,
2744 unsigned HOST_WIDE_INT base_addr_offset
,
2745 bool is_clobber
, unsigned size
,
2746 unsigned last_chunk_size
)
2748 tree shadow_ptr_type
;
2753 shadow_ptr_type
= shadow_ptr_types
[0];
2756 shadow_ptr_type
= shadow_ptr_types
[1];
2759 shadow_ptr_type
= shadow_ptr_types
[2];
2765 unsigned char c
= (char) is_clobber
? ASAN_STACK_MAGIC_USE_AFTER_SCOPE
: 0;
2766 unsigned HOST_WIDE_INT val
= 0;
2767 for (unsigned i
= 0; i
< size
; ++i
)
2769 unsigned char shadow_c
= c
;
2770 if (i
== size
- 1 && last_chunk_size
&& !is_clobber
)
2771 shadow_c
= last_chunk_size
;
2772 val
|= (unsigned HOST_WIDE_INT
) shadow_c
<< (BITS_PER_UNIT
* i
);
2775 /* Handle last chunk in unpoisoning. */
2776 tree magic
= build_int_cst (TREE_TYPE (shadow_ptr_type
), val
);
2778 tree dest
= build2 (MEM_REF
, TREE_TYPE (shadow_ptr_type
), shadow
,
2779 build_int_cst (shadow_ptr_type
, base_addr_offset
));
2781 gimple
*g
= gimple_build_assign (dest
, magic
);
2782 gimple_set_location (g
, loc
);
2783 gsi_insert_after (iter
, g
, GSI_NEW_STMT
);
2786 /* Expand the ASAN_MARK builtins. */
2789 asan_expand_mark_ifn (gimple_stmt_iterator
*iter
)
2791 gimple
*g
= gsi_stmt (*iter
);
2792 location_t loc
= gimple_location (g
);
2793 HOST_WIDE_INT flag
= tree_to_shwi (gimple_call_arg (g
, 0));
2794 bool is_poison
= ((asan_mark_flags
)flag
) == ASAN_MARK_POISON
;
2796 tree base
= gimple_call_arg (g
, 1);
2797 gcc_checking_assert (TREE_CODE (base
) == ADDR_EXPR
);
2798 tree decl
= TREE_OPERAND (base
, 0);
2800 /* For a nested function, we can have: ASAN_MARK (2, &FRAME.2.fp_input, 4) */
2801 if (TREE_CODE (decl
) == COMPONENT_REF
2802 && DECL_NONLOCAL_FRAME (TREE_OPERAND (decl
, 0)))
2803 decl
= TREE_OPERAND (decl
, 0);
2805 gcc_checking_assert (TREE_CODE (decl
) == VAR_DECL
);
2806 if (asan_handled_variables
== NULL
)
2807 asan_handled_variables
= new hash_set
<tree
> (16);
2808 asan_handled_variables
->add (decl
);
2809 tree len
= gimple_call_arg (g
, 2);
2811 gcc_assert (tree_fits_shwi_p (len
));
2812 unsigned HOST_WIDE_INT size_in_bytes
= tree_to_shwi (len
);
2813 gcc_assert (size_in_bytes
);
2815 g
= gimple_build_assign (make_ssa_name (pointer_sized_int_node
),
2817 gimple_set_location (g
, loc
);
2818 gsi_replace (iter
, g
, false);
2819 tree base_addr
= gimple_assign_lhs (g
);
2821 /* Generate direct emission if size_in_bytes is small. */
2822 if (size_in_bytes
<= ASAN_PARAM_USE_AFTER_SCOPE_DIRECT_EMISSION_THRESHOLD
)
2824 unsigned HOST_WIDE_INT shadow_size
= shadow_mem_size (size_in_bytes
);
2826 tree shadow
= build_shadow_mem_access (iter
, loc
, base_addr
,
2827 shadow_ptr_types
[0], true);
2829 for (unsigned HOST_WIDE_INT offset
= 0; offset
< shadow_size
;)
2832 if (shadow_size
- offset
>= 4)
2834 else if (shadow_size
- offset
>= 2)
2837 unsigned HOST_WIDE_INT last_chunk_size
= 0;
2838 unsigned HOST_WIDE_INT s
= (offset
+ size
) * ASAN_SHADOW_GRANULARITY
;
2839 if (s
> size_in_bytes
)
2840 last_chunk_size
= ASAN_SHADOW_GRANULARITY
- (s
- size_in_bytes
);
2842 asan_store_shadow_bytes (iter
, loc
, shadow
, offset
, is_poison
,
2843 size
, last_chunk_size
);
2849 g
= gimple_build_assign (make_ssa_name (pointer_sized_int_node
),
2851 gimple_set_location (g
, loc
);
2852 gsi_insert_before (iter
, g
, GSI_SAME_STMT
);
2853 tree sz_arg
= gimple_assign_lhs (g
);
2856 = builtin_decl_implicit (is_poison
? BUILT_IN_ASAN_POISON_STACK_MEMORY
2857 : BUILT_IN_ASAN_UNPOISON_STACK_MEMORY
);
2858 g
= gimple_build_call (fun
, 2, base_addr
, sz_arg
);
2859 gimple_set_location (g
, loc
);
2860 gsi_insert_after (iter
, g
, GSI_NEW_STMT
);
2866 /* Expand the ASAN_{LOAD,STORE} builtins. */
2869 asan_expand_check_ifn (gimple_stmt_iterator
*iter
, bool use_calls
)
2871 gimple
*g
= gsi_stmt (*iter
);
2872 location_t loc
= gimple_location (g
);
2874 if (flag_sanitize
& SANITIZE_USER_ADDRESS
)
2875 recover_p
= (flag_sanitize_recover
& SANITIZE_USER_ADDRESS
) != 0;
2877 recover_p
= (flag_sanitize_recover
& SANITIZE_KERNEL_ADDRESS
) != 0;
2879 HOST_WIDE_INT flags
= tree_to_shwi (gimple_call_arg (g
, 0));
2880 gcc_assert (flags
< ASAN_CHECK_LAST
);
2881 bool is_scalar_access
= (flags
& ASAN_CHECK_SCALAR_ACCESS
) != 0;
2882 bool is_store
= (flags
& ASAN_CHECK_STORE
) != 0;
2883 bool is_non_zero_len
= (flags
& ASAN_CHECK_NON_ZERO_LEN
) != 0;
2885 tree base
= gimple_call_arg (g
, 1);
2886 tree len
= gimple_call_arg (g
, 2);
2887 HOST_WIDE_INT align
= tree_to_shwi (gimple_call_arg (g
, 3));
2889 HOST_WIDE_INT size_in_bytes
2890 = is_scalar_access
&& tree_fits_shwi_p (len
) ? tree_to_shwi (len
) : -1;
2894 /* Instrument using callbacks. */
2895 gimple
*g
= gimple_build_assign (make_ssa_name (pointer_sized_int_node
),
2897 gimple_set_location (g
, loc
);
2898 gsi_insert_before (iter
, g
, GSI_SAME_STMT
);
2899 tree base_addr
= gimple_assign_lhs (g
);
2902 tree fun
= check_func (is_store
, recover_p
, size_in_bytes
, &nargs
);
2904 g
= gimple_build_call (fun
, 1, base_addr
);
2907 gcc_assert (nargs
== 2);
2908 g
= gimple_build_assign (make_ssa_name (pointer_sized_int_node
),
2910 gimple_set_location (g
, loc
);
2911 gsi_insert_before (iter
, g
, GSI_SAME_STMT
);
2912 tree sz_arg
= gimple_assign_lhs (g
);
2913 g
= gimple_build_call (fun
, nargs
, base_addr
, sz_arg
);
2915 gimple_set_location (g
, loc
);
2916 gsi_replace (iter
, g
, false);
2920 HOST_WIDE_INT real_size_in_bytes
= size_in_bytes
== -1 ? 1 : size_in_bytes
;
2922 tree shadow_ptr_type
= shadow_ptr_types
[real_size_in_bytes
== 16 ? 1 : 0];
2923 tree shadow_type
= TREE_TYPE (shadow_ptr_type
);
2925 gimple_stmt_iterator gsi
= *iter
;
2927 if (!is_non_zero_len
)
2929 /* So, the length of the memory area to asan-protect is
2930 non-constant. Let's guard the generated instrumentation code
2935 //asan instrumentation code goes here.
2937 // falltrough instructions, starting with *ITER. */
2939 g
= gimple_build_cond (NE_EXPR
,
2941 build_int_cst (TREE_TYPE (len
), 0),
2942 NULL_TREE
, NULL_TREE
);
2943 gimple_set_location (g
, loc
);
2945 basic_block then_bb
, fallthrough_bb
;
2946 insert_if_then_before_iter (as_a
<gcond
*> (g
), iter
,
2947 /*then_more_likely_p=*/true,
2948 &then_bb
, &fallthrough_bb
);
2949 /* Note that fallthrough_bb starts with the statement that was
2950 pointed to by ITER. */
2952 /* The 'then block' of the 'if (len != 0) condition is where
2953 we'll generate the asan instrumentation code now. */
2954 gsi
= gsi_last_bb (then_bb
);
2957 /* Get an iterator on the point where we can add the condition
2958 statement for the instrumentation. */
2959 basic_block then_bb
, else_bb
;
2960 gsi
= create_cond_insert_point (&gsi
, /*before_p*/false,
2961 /*then_more_likely_p=*/false,
2962 /*create_then_fallthru_edge*/recover_p
,
2966 g
= gimple_build_assign (make_ssa_name (pointer_sized_int_node
),
2968 gimple_set_location (g
, loc
);
2969 gsi_insert_before (&gsi
, g
, GSI_NEW_STMT
);
2970 tree base_addr
= gimple_assign_lhs (g
);
2973 if (real_size_in_bytes
>= 8)
2975 tree shadow
= build_shadow_mem_access (&gsi
, loc
, base_addr
,
2981 /* Slow path for 1, 2 and 4 byte accesses. */
2982 /* Test (shadow != 0)
2983 & ((base_addr & 7) + (real_size_in_bytes - 1)) >= shadow). */
2984 tree shadow
= build_shadow_mem_access (&gsi
, loc
, base_addr
,
2986 gimple
*shadow_test
= build_assign (NE_EXPR
, shadow
, 0);
2987 gimple_seq seq
= NULL
;
2988 gimple_seq_add_stmt (&seq
, shadow_test
);
2989 /* Aligned (>= 8 bytes) can test just
2990 (real_size_in_bytes - 1 >= shadow), as base_addr & 7 is known
2994 gimple_seq_add_stmt (&seq
, build_assign (BIT_AND_EXPR
,
2996 gimple_seq_add_stmt (&seq
,
2997 build_type_cast (shadow_type
,
2998 gimple_seq_last (seq
)));
2999 if (real_size_in_bytes
> 1)
3000 gimple_seq_add_stmt (&seq
,
3001 build_assign (PLUS_EXPR
,
3002 gimple_seq_last (seq
),
3003 real_size_in_bytes
- 1));
3004 t
= gimple_assign_lhs (gimple_seq_last_stmt (seq
));
3007 t
= build_int_cst (shadow_type
, real_size_in_bytes
- 1);
3008 gimple_seq_add_stmt (&seq
, build_assign (GE_EXPR
, t
, shadow
));
3009 gimple_seq_add_stmt (&seq
, build_assign (BIT_AND_EXPR
, shadow_test
,
3010 gimple_seq_last (seq
)));
3011 t
= gimple_assign_lhs (gimple_seq_last (seq
));
3012 gimple_seq_set_location (seq
, loc
);
3013 gsi_insert_seq_after (&gsi
, seq
, GSI_CONTINUE_LINKING
);
3015 /* For non-constant, misaligned or otherwise weird access sizes,
3016 check first and last byte. */
3017 if (size_in_bytes
== -1)
3019 g
= gimple_build_assign (make_ssa_name (pointer_sized_int_node
),
3021 build_int_cst (pointer_sized_int_node
, 1));
3022 gimple_set_location (g
, loc
);
3023 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
3024 tree last
= gimple_assign_lhs (g
);
3025 g
= gimple_build_assign (make_ssa_name (pointer_sized_int_node
),
3026 PLUS_EXPR
, base_addr
, last
);
3027 gimple_set_location (g
, loc
);
3028 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
3029 tree base_end_addr
= gimple_assign_lhs (g
);
3031 tree shadow
= build_shadow_mem_access (&gsi
, loc
, base_end_addr
,
3033 gimple
*shadow_test
= build_assign (NE_EXPR
, shadow
, 0);
3034 gimple_seq seq
= NULL
;
3035 gimple_seq_add_stmt (&seq
, shadow_test
);
3036 gimple_seq_add_stmt (&seq
, build_assign (BIT_AND_EXPR
,
3038 gimple_seq_add_stmt (&seq
, build_type_cast (shadow_type
,
3039 gimple_seq_last (seq
)));
3040 gimple_seq_add_stmt (&seq
, build_assign (GE_EXPR
,
3041 gimple_seq_last (seq
),
3043 gimple_seq_add_stmt (&seq
, build_assign (BIT_AND_EXPR
, shadow_test
,
3044 gimple_seq_last (seq
)));
3045 gimple_seq_add_stmt (&seq
, build_assign (BIT_IOR_EXPR
, t
,
3046 gimple_seq_last (seq
)));
3047 t
= gimple_assign_lhs (gimple_seq_last (seq
));
3048 gimple_seq_set_location (seq
, loc
);
3049 gsi_insert_seq_after (&gsi
, seq
, GSI_CONTINUE_LINKING
);
3053 g
= gimple_build_cond (NE_EXPR
, t
, build_int_cst (TREE_TYPE (t
), 0),
3054 NULL_TREE
, NULL_TREE
);
3055 gimple_set_location (g
, loc
);
3056 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
3058 /* Generate call to the run-time library (e.g. __asan_report_load8). */
3059 gsi
= gsi_start_bb (then_bb
);
3061 tree fun
= report_error_func (is_store
, recover_p
, size_in_bytes
, &nargs
);
3062 g
= gimple_build_call (fun
, nargs
, base_addr
, len
);
3063 gimple_set_location (g
, loc
);
3064 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
3066 gsi_remove (iter
, true);
3067 *iter
= gsi_start_bb (else_bb
);
3072 /* Create ASAN shadow variable for a VAR_DECL which has been rewritten
3073 into SSA. Already seen VAR_DECLs are stored in SHADOW_VARS_MAPPING. */
3076 create_asan_shadow_var (tree var_decl
,
3077 hash_map
<tree
, tree
> &shadow_vars_mapping
)
3079 tree
*slot
= shadow_vars_mapping
.get (var_decl
);
3082 tree shadow_var
= copy_node (var_decl
);
3085 memset (&id
, 0, sizeof (copy_body_data
));
3086 id
.src_fn
= id
.dst_fn
= current_function_decl
;
3087 copy_decl_for_dup_finish (&id
, var_decl
, shadow_var
);
3089 DECL_ARTIFICIAL (shadow_var
) = 1;
3090 DECL_IGNORED_P (shadow_var
) = 1;
3091 DECL_SEEN_IN_BIND_EXPR_P (shadow_var
) = 0;
3092 gimple_add_tmp_var (shadow_var
);
3094 shadow_vars_mapping
.put (var_decl
, shadow_var
);
3101 /* Expand ASAN_POISON ifn. */
3104 asan_expand_poison_ifn (gimple_stmt_iterator
*iter
,
3105 bool *need_commit_edge_insert
,
3106 hash_map
<tree
, tree
> &shadow_vars_mapping
)
3108 gimple
*g
= gsi_stmt (*iter
);
3109 tree poisoned_var
= gimple_call_lhs (g
);
3110 if (!poisoned_var
|| has_zero_uses (poisoned_var
))
3112 gsi_remove (iter
, true);
3116 if (SSA_NAME_VAR (poisoned_var
) == NULL_TREE
)
3117 SET_SSA_NAME_VAR_OR_IDENTIFIER (poisoned_var
,
3118 create_tmp_var (TREE_TYPE (poisoned_var
)));
3120 tree shadow_var
= create_asan_shadow_var (SSA_NAME_VAR (poisoned_var
),
3121 shadow_vars_mapping
);
3124 if (flag_sanitize
& SANITIZE_USER_ADDRESS
)
3125 recover_p
= (flag_sanitize_recover
& SANITIZE_USER_ADDRESS
) != 0;
3127 recover_p
= (flag_sanitize_recover
& SANITIZE_KERNEL_ADDRESS
) != 0;
3128 tree size
= DECL_SIZE_UNIT (shadow_var
);
3130 = gimple_build_call_internal (IFN_ASAN_MARK
, 3,
3131 build_int_cst (integer_type_node
,
3133 build_fold_addr_expr (shadow_var
), size
);
3136 imm_use_iterator imm_iter
;
3137 FOR_EACH_IMM_USE_STMT (use
, imm_iter
, poisoned_var
)
3139 if (is_gimple_debug (use
))
3143 bool store_p
= gimple_call_internal_p (use
, IFN_ASAN_POISON_USE
);
3144 tree fun
= report_error_func (store_p
, recover_p
, tree_to_uhwi (size
),
3147 gcall
*call
= gimple_build_call (fun
, 1,
3148 build_fold_addr_expr (shadow_var
));
3149 gimple_set_location (call
, gimple_location (use
));
3150 gimple
*call_to_insert
= call
;
3152 /* The USE can be a gimple PHI node. If so, insert the call on
3153 all edges leading to the PHI node. */
3154 if (is_a
<gphi
*> (use
))
3156 gphi
*phi
= dyn_cast
<gphi
*> (use
);
3157 for (unsigned i
= 0; i
< gimple_phi_num_args (phi
); ++i
)
3158 if (gimple_phi_arg_def (phi
, i
) == poisoned_var
)
3160 edge e
= gimple_phi_arg_edge (phi
, i
);
3162 if (call_to_insert
== NULL
)
3163 call_to_insert
= gimple_copy (call
);
3165 gsi_insert_seq_on_edge (e
, call_to_insert
);
3166 *need_commit_edge_insert
= true;
3167 call_to_insert
= NULL
;
3172 gimple_stmt_iterator gsi
= gsi_for_stmt (use
);
3174 gsi_replace (&gsi
, call
, true);
3176 gsi_insert_before (&gsi
, call
, GSI_NEW_STMT
);
3180 SSA_NAME_IS_DEFAULT_DEF (poisoned_var
) = true;
3181 SSA_NAME_DEF_STMT (poisoned_var
) = gimple_build_nop ();
3182 gsi_replace (iter
, poison_call
, false);
3187 /* Instrument the current function. */
3190 asan_instrument (void)
3192 if (shadow_ptr_types
[0] == NULL_TREE
)
3193 asan_init_shadow_ptr_types ();
3194 transform_statements ();
3201 return (flag_sanitize
& SANITIZE_ADDRESS
) != 0
3202 && !lookup_attribute ("no_sanitize_address",
3203 DECL_ATTRIBUTES (current_function_decl
));
3208 const pass_data pass_data_asan
=
3210 GIMPLE_PASS
, /* type */
3212 OPTGROUP_NONE
, /* optinfo_flags */
3213 TV_NONE
, /* tv_id */
3214 ( PROP_ssa
| PROP_cfg
| PROP_gimple_leh
), /* properties_required */
3215 0, /* properties_provided */
3216 0, /* properties_destroyed */
3217 0, /* todo_flags_start */
3218 TODO_update_ssa
, /* todo_flags_finish */
3221 class pass_asan
: public gimple_opt_pass
3224 pass_asan (gcc::context
*ctxt
)
3225 : gimple_opt_pass (pass_data_asan
, ctxt
)
3228 /* opt_pass methods: */
3229 opt_pass
* clone () { return new pass_asan (m_ctxt
); }
3230 virtual bool gate (function
*) { return gate_asan (); }
3231 virtual unsigned int execute (function
*) { return asan_instrument (); }
3233 }; // class pass_asan
3238 make_pass_asan (gcc::context
*ctxt
)
3240 return new pass_asan (ctxt
);
3245 const pass_data pass_data_asan_O0
=
3247 GIMPLE_PASS
, /* type */
3249 OPTGROUP_NONE
, /* optinfo_flags */
3250 TV_NONE
, /* tv_id */
3251 ( PROP_ssa
| PROP_cfg
| PROP_gimple_leh
), /* properties_required */
3252 0, /* properties_provided */
3253 0, /* properties_destroyed */
3254 0, /* todo_flags_start */
3255 TODO_update_ssa
, /* todo_flags_finish */
3258 class pass_asan_O0
: public gimple_opt_pass
3261 pass_asan_O0 (gcc::context
*ctxt
)
3262 : gimple_opt_pass (pass_data_asan_O0
, ctxt
)
3265 /* opt_pass methods: */
3266 virtual bool gate (function
*) { return !optimize
&& gate_asan (); }
3267 virtual unsigned int execute (function
*) { return asan_instrument (); }
3269 }; // class pass_asan_O0
3274 make_pass_asan_O0 (gcc::context
*ctxt
)
3276 return new pass_asan_O0 (ctxt
);
3279 #include "gt-asan.h"