1 /* AddressSanitizer, a fast memory error detector.
2 Copyright (C) 2012-2015 Free Software Foundation, Inc.
3 Contributed by Kostya Serebryany <kcc@google.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "double-int.h"
36 #include "fold-const.h"
37 #include "hash-table.h"
40 #include "hard-reg-set.h"
42 #include "dominance.h"
45 #include "basic-block.h"
46 #include "tree-ssa-alias.h"
47 #include "internal-fn.h"
48 #include "gimple-expr.h"
52 #include "gimple-iterator.h"
55 #include "stor-layout.h"
56 #include "tree-iterator.h"
58 #include "plugin-api.h"
61 #include "stringpool.h"
62 #include "tree-ssanames.h"
63 #include "tree-pass.h"
65 #include "gimple-pretty-print.h"
70 #include "statistics.h"
72 #include "fixed-value.h"
73 #include "insn-config.h"
80 #include "insn-codes.h"
84 #include "langhooks.h"
85 #include "alloc-pool.h"
87 #include "gimple-builder.h"
93 /* AddressSanitizer finds out-of-bounds and use-after-free bugs
94 with <2x slowdown on average.
96 The tool consists of two parts:
97 instrumentation module (this file) and a run-time library.
98 The instrumentation module adds a run-time check before every memory insn.
99 For a 8- or 16- byte load accessing address X:
100 ShadowAddr = (X >> 3) + Offset
101 ShadowValue = *(char*)ShadowAddr; // *(short*) for 16-byte access.
103 __asan_report_load8(X);
104 For a load of N bytes (N=1, 2 or 4) from address X:
105 ShadowAddr = (X >> 3) + Offset
106 ShadowValue = *(char*)ShadowAddr;
108 if ((X & 7) + N - 1 > ShadowValue)
109 __asan_report_loadN(X);
110 Stores are instrumented similarly, but using __asan_report_storeN functions.
111 A call too __asan_init_vN() is inserted to the list of module CTORs.
112 N is the version number of the AddressSanitizer API. The changes between the
113 API versions are listed in libsanitizer/asan/asan_interface_internal.h.
115 The run-time library redefines malloc (so that redzone are inserted around
116 the allocated memory) and free (so that reuse of free-ed memory is delayed),
117 provides __asan_report* and __asan_init_vN functions.
120 http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm
122 The current implementation supports detection of out-of-bounds and
123 use-after-free in the heap, on the stack and for global variables.
125 [Protection of stack variables]
127 To understand how detection of out-of-bounds and use-after-free works
128 for stack variables, lets look at this example on x86_64 where the
129 stack grows downward:
143 For this function, the stack protected by asan will be organized as
144 follows, from the top of the stack to the bottom:
146 Slot 1/ [red zone of 32 bytes called 'RIGHT RedZone']
148 Slot 2/ [8 bytes of red zone, that adds up to the space of 'a' to make
149 the next slot be 32 bytes aligned; this one is called Partial
150 Redzone; this 32 bytes alignment is an asan constraint]
152 Slot 3/ [24 bytes for variable 'a']
154 Slot 4/ [red zone of 32 bytes called 'Middle RedZone']
156 Slot 5/ [24 bytes of Partial Red Zone (similar to slot 2]
158 Slot 6/ [8 bytes for variable 'b']
160 Slot 7/ [32 bytes of Red Zone at the bottom of the stack, called
163 The 32 bytes of LEFT red zone at the bottom of the stack can be
166 1/ The first 8 bytes contain a magical asan number that is always
169 2/ The following 8 bytes contains a pointer to a string (to be
170 parsed at runtime by the runtime asan library), which format is
173 "<function-name> <space> <num-of-variables-on-the-stack>
174 (<32-bytes-aligned-offset-in-bytes-of-variable> <space>
175 <length-of-var-in-bytes> ){n} "
177 where '(...){n}' means the content inside the parenthesis occurs 'n'
178 times, with 'n' being the number of variables on the stack.
180 3/ The following 8 bytes contain the PC of the current function which
181 will be used by the run-time library to print an error message.
183 4/ The following 8 bytes are reserved for internal use by the run-time.
185 The shadow memory for that stack layout is going to look like this:
187 - content of shadow memory 8 bytes for slot 7: 0xF1F1F1F1.
188 The F1 byte pattern is a magic number called
189 ASAN_STACK_MAGIC_LEFT and is a way for the runtime to know that
190 the memory for that shadow byte is part of a the LEFT red zone
191 intended to seat at the bottom of the variables on the stack.
193 - content of shadow memory 8 bytes for slots 6 and 5:
194 0xF4F4F400. The F4 byte pattern is a magic number
195 called ASAN_STACK_MAGIC_PARTIAL. It flags the fact that the
196 memory region for this shadow byte is a PARTIAL red zone
197 intended to pad a variable A, so that the slot following
198 {A,padding} is 32 bytes aligned.
200 Note that the fact that the least significant byte of this
201 shadow memory content is 00 means that 8 bytes of its
202 corresponding memory (which corresponds to the memory of
203 variable 'b') is addressable.
205 - content of shadow memory 8 bytes for slot 4: 0xF2F2F2F2.
206 The F2 byte pattern is a magic number called
207 ASAN_STACK_MAGIC_MIDDLE. It flags the fact that the memory
208 region for this shadow byte is a MIDDLE red zone intended to
209 seat between two 32 aligned slots of {variable,padding}.
211 - content of shadow memory 8 bytes for slot 3 and 2:
212 0xF4000000. This represents is the concatenation of
213 variable 'a' and the partial red zone following it, like what we
214 had for variable 'b'. The least significant 3 bytes being 00
215 means that the 3 bytes of variable 'a' are addressable.
217 - content of shadow memory 8 bytes for slot 1: 0xF3F3F3F3.
218 The F3 byte pattern is a magic number called
219 ASAN_STACK_MAGIC_RIGHT. It flags the fact that the memory
220 region for this shadow byte is a RIGHT red zone intended to seat
221 at the top of the variables of the stack.
223 Note that the real variable layout is done in expand_used_vars in
224 cfgexpand.c. As far as Address Sanitizer is concerned, it lays out
225 stack variables as well as the different red zones, emits some
226 prologue code to populate the shadow memory as to poison (mark as
227 non-accessible) the regions of the red zones and mark the regions of
228 stack variables as accessible, and emit some epilogue code to
229 un-poison (mark as accessible) the regions of red zones right before
232 [Protection of global variables]
234 The basic idea is to insert a red zone between two global variables
235 and install a constructor function that calls the asan runtime to do
236 the populating of the relevant shadow memory regions at load time.
238 So the global variables are laid out as to insert a red zone between
239 them. The size of the red zones is so that each variable starts on a
242 Then a constructor function is installed so that, for each global
243 variable, it calls the runtime asan library function
244 __asan_register_globals_with an instance of this type:
248 // Address of the beginning of the global variable.
251 // Initial size of the global variable.
254 // Size of the global variable + size of the red zone. This
255 // size is 32 bytes aligned.
256 uptr __size_with_redzone;
258 // Name of the global variable.
261 // Name of the module where the global variable is declared.
262 const void *__module_name;
264 // 1 if it has dynamic initialization, 0 otherwise.
265 uptr __has_dynamic_init;
267 // A pointer to struct that contains source location, could be NULL.
268 __asan_global_source_location *__location;
271 A destructor function that calls the runtime asan library function
272 _asan_unregister_globals is also installed. */
274 static unsigned HOST_WIDE_INT asan_shadow_offset_value
;
275 static bool asan_shadow_offset_computed
;
276 static vec
<char *> sanitized_sections
;
278 /* Sets shadow offset to value in string VAL. */
281 set_asan_shadow_offset (const char *val
)
286 #ifdef HAVE_LONG_LONG
287 asan_shadow_offset_value
= strtoull (val
, &endp
, 0);
289 asan_shadow_offset_value
= strtoul (val
, &endp
, 0);
291 if (!(*val
!= '\0' && *endp
== '\0' && errno
== 0))
294 asan_shadow_offset_computed
= true;
299 /* Set list of user-defined sections that need to be sanitized. */
302 set_sanitized_sections (const char *sections
)
306 FOR_EACH_VEC_ELT (sanitized_sections
, i
, pat
)
308 sanitized_sections
.truncate (0);
310 for (const char *s
= sections
; *s
; )
313 for (end
= s
; *end
&& *end
!= ','; ++end
);
314 size_t len
= end
- s
;
315 sanitized_sections
.safe_push (xstrndup (s
, len
));
316 s
= *end
? end
+ 1 : end
;
320 /* Checks whether section SEC should be sanitized. */
323 section_sanitized_p (const char *sec
)
327 FOR_EACH_VEC_ELT (sanitized_sections
, i
, pat
)
328 if (fnmatch (pat
, sec
, FNM_PERIOD
) == 0)
333 /* Returns Asan shadow offset. */
335 static unsigned HOST_WIDE_INT
336 asan_shadow_offset ()
338 if (!asan_shadow_offset_computed
)
340 asan_shadow_offset_computed
= true;
341 asan_shadow_offset_value
= targetm
.asan_shadow_offset ();
343 return asan_shadow_offset_value
;
346 alias_set_type asan_shadow_set
= -1;
348 /* Pointer types to 1 resp. 2 byte integers in shadow memory. A separate
349 alias set is used for all shadow memory accesses. */
350 static GTY(()) tree shadow_ptr_types
[2];
352 /* Decl for __asan_option_detect_stack_use_after_return. */
353 static GTY(()) tree asan_detect_stack_use_after_return
;
355 /* Various flags for Asan builtins. */
356 enum asan_check_flags
358 ASAN_CHECK_STORE
= 1 << 0,
359 ASAN_CHECK_SCALAR_ACCESS
= 1 << 1,
360 ASAN_CHECK_NON_ZERO_LEN
= 1 << 2,
361 ASAN_CHECK_LAST
= 1 << 3
364 /* Hashtable support for memory references used by gimple
367 /* This type represents a reference to a memory region. */
370 /* The expression of the beginning of the memory region. */
373 /* The size of the access. */
374 HOST_WIDE_INT access_size
;
377 static alloc_pool asan_mem_ref_alloc_pool
;
379 /* This creates the alloc pool used to store the instances of
380 asan_mem_ref that are stored in the hash table asan_mem_ref_ht. */
383 asan_mem_ref_get_alloc_pool ()
385 if (asan_mem_ref_alloc_pool
== NULL
)
386 asan_mem_ref_alloc_pool
= create_alloc_pool ("asan_mem_ref",
387 sizeof (asan_mem_ref
),
389 return asan_mem_ref_alloc_pool
;
393 /* Initializes an instance of asan_mem_ref. */
396 asan_mem_ref_init (asan_mem_ref
*ref
, tree start
, HOST_WIDE_INT access_size
)
399 ref
->access_size
= access_size
;
402 /* Allocates memory for an instance of asan_mem_ref into the memory
403 pool returned by asan_mem_ref_get_alloc_pool and initialize it.
404 START is the address of (or the expression pointing to) the
405 beginning of memory reference. ACCESS_SIZE is the size of the
406 access to the referenced memory. */
409 asan_mem_ref_new (tree start
, HOST_WIDE_INT access_size
)
412 (asan_mem_ref
*) pool_alloc (asan_mem_ref_get_alloc_pool ());
414 asan_mem_ref_init (ref
, start
, access_size
);
418 /* This builds and returns a pointer to the end of the memory region
419 that starts at START and of length LEN. */
422 asan_mem_ref_get_end (tree start
, tree len
)
424 if (len
== NULL_TREE
|| integer_zerop (len
))
427 if (!ptrofftype_p (len
))
428 len
= convert_to_ptrofftype (len
);
430 return fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (start
), start
, len
);
433 /* Return a tree expression that represents the end of the referenced
434 memory region. Beware that this function can actually build a new
438 asan_mem_ref_get_end (const asan_mem_ref
*ref
, tree len
)
440 return asan_mem_ref_get_end (ref
->start
, len
);
443 struct asan_mem_ref_hasher
444 : typed_noop_remove
<asan_mem_ref
>
446 typedef asan_mem_ref
*value_type
;
447 typedef asan_mem_ref
*compare_type
;
449 static inline hashval_t
hash (const asan_mem_ref
*);
450 static inline bool equal (const asan_mem_ref
*, const asan_mem_ref
*);
453 /* Hash a memory reference. */
456 asan_mem_ref_hasher::hash (const asan_mem_ref
*mem_ref
)
458 return iterative_hash_expr (mem_ref
->start
, 0);
461 /* Compare two memory references. We accept the length of either
462 memory references to be NULL_TREE. */
465 asan_mem_ref_hasher::equal (const asan_mem_ref
*m1
,
466 const asan_mem_ref
*m2
)
468 return operand_equal_p (m1
->start
, m2
->start
, 0);
471 static hash_table
<asan_mem_ref_hasher
> *asan_mem_ref_ht
;
473 /* Returns a reference to the hash table containing memory references.
474 This function ensures that the hash table is created. Note that
475 this hash table is updated by the function
476 update_mem_ref_hash_table. */
478 static hash_table
<asan_mem_ref_hasher
> *
479 get_mem_ref_hash_table ()
481 if (!asan_mem_ref_ht
)
482 asan_mem_ref_ht
= new hash_table
<asan_mem_ref_hasher
> (10);
484 return asan_mem_ref_ht
;
487 /* Clear all entries from the memory references hash table. */
490 empty_mem_ref_hash_table ()
493 asan_mem_ref_ht
->empty ();
496 /* Free the memory references hash table. */
499 free_mem_ref_resources ()
501 delete asan_mem_ref_ht
;
502 asan_mem_ref_ht
= NULL
;
504 if (asan_mem_ref_alloc_pool
)
506 free_alloc_pool (asan_mem_ref_alloc_pool
);
507 asan_mem_ref_alloc_pool
= NULL
;
511 /* Return true iff the memory reference REF has been instrumented. */
514 has_mem_ref_been_instrumented (tree ref
, HOST_WIDE_INT access_size
)
517 asan_mem_ref_init (&r
, ref
, access_size
);
519 asan_mem_ref
*saved_ref
= get_mem_ref_hash_table ()->find (&r
);
520 return saved_ref
&& saved_ref
->access_size
>= access_size
;
523 /* Return true iff the memory reference REF has been instrumented. */
526 has_mem_ref_been_instrumented (const asan_mem_ref
*ref
)
528 return has_mem_ref_been_instrumented (ref
->start
, ref
->access_size
);
531 /* Return true iff access to memory region starting at REF and of
532 length LEN has been instrumented. */
535 has_mem_ref_been_instrumented (const asan_mem_ref
*ref
, tree len
)
537 HOST_WIDE_INT size_in_bytes
538 = tree_fits_shwi_p (len
) ? tree_to_shwi (len
) : -1;
540 return size_in_bytes
!= -1
541 && has_mem_ref_been_instrumented (ref
->start
, size_in_bytes
);
544 /* Set REF to the memory reference present in a gimple assignment
545 ASSIGNMENT. Return true upon successful completion, false
549 get_mem_ref_of_assignment (const gassign
*assignment
,
553 gcc_assert (gimple_assign_single_p (assignment
));
555 if (gimple_store_p (assignment
)
556 && !gimple_clobber_p (assignment
))
558 ref
->start
= gimple_assign_lhs (assignment
);
559 *ref_is_store
= true;
561 else if (gimple_assign_load_p (assignment
))
563 ref
->start
= gimple_assign_rhs1 (assignment
);
564 *ref_is_store
= false;
569 ref
->access_size
= int_size_in_bytes (TREE_TYPE (ref
->start
));
573 /* Return the memory references contained in a gimple statement
574 representing a builtin call that has to do with memory access. */
577 get_mem_refs_of_builtin_call (const gcall
*call
,
590 gcc_checking_assert (gimple_call_builtin_p (call
, BUILT_IN_NORMAL
));
592 tree callee
= gimple_call_fndecl (call
);
593 tree source0
= NULL_TREE
, source1
= NULL_TREE
,
594 dest
= NULL_TREE
, len
= NULL_TREE
;
595 bool is_store
= true, got_reference_p
= false;
596 HOST_WIDE_INT access_size
= 1;
598 *intercepted_p
= asan_intercepted_p ((DECL_FUNCTION_CODE (callee
)));
600 switch (DECL_FUNCTION_CODE (callee
))
602 /* (s, s, n) style memops. */
604 case BUILT_IN_MEMCMP
:
605 source0
= gimple_call_arg (call
, 0);
606 source1
= gimple_call_arg (call
, 1);
607 len
= gimple_call_arg (call
, 2);
610 /* (src, dest, n) style memops. */
612 source0
= gimple_call_arg (call
, 0);
613 dest
= gimple_call_arg (call
, 1);
614 len
= gimple_call_arg (call
, 2);
617 /* (dest, src, n) style memops. */
618 case BUILT_IN_MEMCPY
:
619 case BUILT_IN_MEMCPY_CHK
:
620 case BUILT_IN_MEMMOVE
:
621 case BUILT_IN_MEMMOVE_CHK
:
622 case BUILT_IN_MEMPCPY
:
623 case BUILT_IN_MEMPCPY_CHK
:
624 dest
= gimple_call_arg (call
, 0);
625 source0
= gimple_call_arg (call
, 1);
626 len
= gimple_call_arg (call
, 2);
629 /* (dest, n) style memops. */
631 dest
= gimple_call_arg (call
, 0);
632 len
= gimple_call_arg (call
, 1);
635 /* (dest, x, n) style memops*/
636 case BUILT_IN_MEMSET
:
637 case BUILT_IN_MEMSET_CHK
:
638 dest
= gimple_call_arg (call
, 0);
639 len
= gimple_call_arg (call
, 2);
642 case BUILT_IN_STRLEN
:
643 source0
= gimple_call_arg (call
, 0);
644 len
= gimple_call_lhs (call
);
647 /* And now the __atomic* and __sync builtins.
648 These are handled differently from the classical memory memory
649 access builtins above. */
651 case BUILT_IN_ATOMIC_LOAD_1
:
652 case BUILT_IN_ATOMIC_LOAD_2
:
653 case BUILT_IN_ATOMIC_LOAD_4
:
654 case BUILT_IN_ATOMIC_LOAD_8
:
655 case BUILT_IN_ATOMIC_LOAD_16
:
659 case BUILT_IN_SYNC_FETCH_AND_ADD_1
:
660 case BUILT_IN_SYNC_FETCH_AND_ADD_2
:
661 case BUILT_IN_SYNC_FETCH_AND_ADD_4
:
662 case BUILT_IN_SYNC_FETCH_AND_ADD_8
:
663 case BUILT_IN_SYNC_FETCH_AND_ADD_16
:
665 case BUILT_IN_SYNC_FETCH_AND_SUB_1
:
666 case BUILT_IN_SYNC_FETCH_AND_SUB_2
:
667 case BUILT_IN_SYNC_FETCH_AND_SUB_4
:
668 case BUILT_IN_SYNC_FETCH_AND_SUB_8
:
669 case BUILT_IN_SYNC_FETCH_AND_SUB_16
:
671 case BUILT_IN_SYNC_FETCH_AND_OR_1
:
672 case BUILT_IN_SYNC_FETCH_AND_OR_2
:
673 case BUILT_IN_SYNC_FETCH_AND_OR_4
:
674 case BUILT_IN_SYNC_FETCH_AND_OR_8
:
675 case BUILT_IN_SYNC_FETCH_AND_OR_16
:
677 case BUILT_IN_SYNC_FETCH_AND_AND_1
:
678 case BUILT_IN_SYNC_FETCH_AND_AND_2
:
679 case BUILT_IN_SYNC_FETCH_AND_AND_4
:
680 case BUILT_IN_SYNC_FETCH_AND_AND_8
:
681 case BUILT_IN_SYNC_FETCH_AND_AND_16
:
683 case BUILT_IN_SYNC_FETCH_AND_XOR_1
:
684 case BUILT_IN_SYNC_FETCH_AND_XOR_2
:
685 case BUILT_IN_SYNC_FETCH_AND_XOR_4
:
686 case BUILT_IN_SYNC_FETCH_AND_XOR_8
:
687 case BUILT_IN_SYNC_FETCH_AND_XOR_16
:
689 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
690 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
691 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
692 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
694 case BUILT_IN_SYNC_ADD_AND_FETCH_1
:
695 case BUILT_IN_SYNC_ADD_AND_FETCH_2
:
696 case BUILT_IN_SYNC_ADD_AND_FETCH_4
:
697 case BUILT_IN_SYNC_ADD_AND_FETCH_8
:
698 case BUILT_IN_SYNC_ADD_AND_FETCH_16
:
700 case BUILT_IN_SYNC_SUB_AND_FETCH_1
:
701 case BUILT_IN_SYNC_SUB_AND_FETCH_2
:
702 case BUILT_IN_SYNC_SUB_AND_FETCH_4
:
703 case BUILT_IN_SYNC_SUB_AND_FETCH_8
:
704 case BUILT_IN_SYNC_SUB_AND_FETCH_16
:
706 case BUILT_IN_SYNC_OR_AND_FETCH_1
:
707 case BUILT_IN_SYNC_OR_AND_FETCH_2
:
708 case BUILT_IN_SYNC_OR_AND_FETCH_4
:
709 case BUILT_IN_SYNC_OR_AND_FETCH_8
:
710 case BUILT_IN_SYNC_OR_AND_FETCH_16
:
712 case BUILT_IN_SYNC_AND_AND_FETCH_1
:
713 case BUILT_IN_SYNC_AND_AND_FETCH_2
:
714 case BUILT_IN_SYNC_AND_AND_FETCH_4
:
715 case BUILT_IN_SYNC_AND_AND_FETCH_8
:
716 case BUILT_IN_SYNC_AND_AND_FETCH_16
:
718 case BUILT_IN_SYNC_XOR_AND_FETCH_1
:
719 case BUILT_IN_SYNC_XOR_AND_FETCH_2
:
720 case BUILT_IN_SYNC_XOR_AND_FETCH_4
:
721 case BUILT_IN_SYNC_XOR_AND_FETCH_8
:
722 case BUILT_IN_SYNC_XOR_AND_FETCH_16
:
724 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
725 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
726 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
727 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
729 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
:
730 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2
:
731 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4
:
732 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8
:
733 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16
:
735 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
:
736 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2
:
737 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4
:
738 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8
:
739 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16
:
741 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
:
742 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2
:
743 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4
:
744 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8
:
745 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16
:
747 case BUILT_IN_SYNC_LOCK_RELEASE_1
:
748 case BUILT_IN_SYNC_LOCK_RELEASE_2
:
749 case BUILT_IN_SYNC_LOCK_RELEASE_4
:
750 case BUILT_IN_SYNC_LOCK_RELEASE_8
:
751 case BUILT_IN_SYNC_LOCK_RELEASE_16
:
753 case BUILT_IN_ATOMIC_EXCHANGE_1
:
754 case BUILT_IN_ATOMIC_EXCHANGE_2
:
755 case BUILT_IN_ATOMIC_EXCHANGE_4
:
756 case BUILT_IN_ATOMIC_EXCHANGE_8
:
757 case BUILT_IN_ATOMIC_EXCHANGE_16
:
759 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
760 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
761 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
762 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
763 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
765 case BUILT_IN_ATOMIC_STORE_1
:
766 case BUILT_IN_ATOMIC_STORE_2
:
767 case BUILT_IN_ATOMIC_STORE_4
:
768 case BUILT_IN_ATOMIC_STORE_8
:
769 case BUILT_IN_ATOMIC_STORE_16
:
771 case BUILT_IN_ATOMIC_ADD_FETCH_1
:
772 case BUILT_IN_ATOMIC_ADD_FETCH_2
:
773 case BUILT_IN_ATOMIC_ADD_FETCH_4
:
774 case BUILT_IN_ATOMIC_ADD_FETCH_8
:
775 case BUILT_IN_ATOMIC_ADD_FETCH_16
:
777 case BUILT_IN_ATOMIC_SUB_FETCH_1
:
778 case BUILT_IN_ATOMIC_SUB_FETCH_2
:
779 case BUILT_IN_ATOMIC_SUB_FETCH_4
:
780 case BUILT_IN_ATOMIC_SUB_FETCH_8
:
781 case BUILT_IN_ATOMIC_SUB_FETCH_16
:
783 case BUILT_IN_ATOMIC_AND_FETCH_1
:
784 case BUILT_IN_ATOMIC_AND_FETCH_2
:
785 case BUILT_IN_ATOMIC_AND_FETCH_4
:
786 case BUILT_IN_ATOMIC_AND_FETCH_8
:
787 case BUILT_IN_ATOMIC_AND_FETCH_16
:
789 case BUILT_IN_ATOMIC_NAND_FETCH_1
:
790 case BUILT_IN_ATOMIC_NAND_FETCH_2
:
791 case BUILT_IN_ATOMIC_NAND_FETCH_4
:
792 case BUILT_IN_ATOMIC_NAND_FETCH_8
:
793 case BUILT_IN_ATOMIC_NAND_FETCH_16
:
795 case BUILT_IN_ATOMIC_XOR_FETCH_1
:
796 case BUILT_IN_ATOMIC_XOR_FETCH_2
:
797 case BUILT_IN_ATOMIC_XOR_FETCH_4
:
798 case BUILT_IN_ATOMIC_XOR_FETCH_8
:
799 case BUILT_IN_ATOMIC_XOR_FETCH_16
:
801 case BUILT_IN_ATOMIC_OR_FETCH_1
:
802 case BUILT_IN_ATOMIC_OR_FETCH_2
:
803 case BUILT_IN_ATOMIC_OR_FETCH_4
:
804 case BUILT_IN_ATOMIC_OR_FETCH_8
:
805 case BUILT_IN_ATOMIC_OR_FETCH_16
:
807 case BUILT_IN_ATOMIC_FETCH_ADD_1
:
808 case BUILT_IN_ATOMIC_FETCH_ADD_2
:
809 case BUILT_IN_ATOMIC_FETCH_ADD_4
:
810 case BUILT_IN_ATOMIC_FETCH_ADD_8
:
811 case BUILT_IN_ATOMIC_FETCH_ADD_16
:
813 case BUILT_IN_ATOMIC_FETCH_SUB_1
:
814 case BUILT_IN_ATOMIC_FETCH_SUB_2
:
815 case BUILT_IN_ATOMIC_FETCH_SUB_4
:
816 case BUILT_IN_ATOMIC_FETCH_SUB_8
:
817 case BUILT_IN_ATOMIC_FETCH_SUB_16
:
819 case BUILT_IN_ATOMIC_FETCH_AND_1
:
820 case BUILT_IN_ATOMIC_FETCH_AND_2
:
821 case BUILT_IN_ATOMIC_FETCH_AND_4
:
822 case BUILT_IN_ATOMIC_FETCH_AND_8
:
823 case BUILT_IN_ATOMIC_FETCH_AND_16
:
825 case BUILT_IN_ATOMIC_FETCH_NAND_1
:
826 case BUILT_IN_ATOMIC_FETCH_NAND_2
:
827 case BUILT_IN_ATOMIC_FETCH_NAND_4
:
828 case BUILT_IN_ATOMIC_FETCH_NAND_8
:
829 case BUILT_IN_ATOMIC_FETCH_NAND_16
:
831 case BUILT_IN_ATOMIC_FETCH_XOR_1
:
832 case BUILT_IN_ATOMIC_FETCH_XOR_2
:
833 case BUILT_IN_ATOMIC_FETCH_XOR_4
:
834 case BUILT_IN_ATOMIC_FETCH_XOR_8
:
835 case BUILT_IN_ATOMIC_FETCH_XOR_16
:
837 case BUILT_IN_ATOMIC_FETCH_OR_1
:
838 case BUILT_IN_ATOMIC_FETCH_OR_2
:
839 case BUILT_IN_ATOMIC_FETCH_OR_4
:
840 case BUILT_IN_ATOMIC_FETCH_OR_8
:
841 case BUILT_IN_ATOMIC_FETCH_OR_16
:
843 dest
= gimple_call_arg (call
, 0);
844 /* DEST represents the address of a memory location.
845 instrument_derefs wants the memory location, so lets
846 dereference the address DEST before handing it to
847 instrument_derefs. */
848 if (TREE_CODE (dest
) == ADDR_EXPR
)
849 dest
= TREE_OPERAND (dest
, 0);
850 else if (TREE_CODE (dest
) == SSA_NAME
|| TREE_CODE (dest
) == INTEGER_CST
)
851 dest
= build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (dest
)),
852 dest
, build_int_cst (TREE_TYPE (dest
), 0));
856 access_size
= int_size_in_bytes (TREE_TYPE (dest
));
860 /* The other builtins memory access are not instrumented in this
861 function because they either don't have any length parameter,
862 or their length parameter is just a limit. */
866 if (len
!= NULL_TREE
)
868 if (source0
!= NULL_TREE
)
870 src0
->start
= source0
;
871 src0
->access_size
= access_size
;
873 *src0_is_store
= false;
876 if (source1
!= NULL_TREE
)
878 src1
->start
= source1
;
879 src1
->access_size
= access_size
;
881 *src1_is_store
= false;
884 if (dest
!= NULL_TREE
)
887 dst
->access_size
= access_size
;
889 *dst_is_store
= true;
892 got_reference_p
= true;
897 dst
->access_size
= access_size
;
898 *dst_len
= NULL_TREE
;
899 *dst_is_store
= is_store
;
900 *dest_is_deref
= true;
901 got_reference_p
= true;
904 return got_reference_p
;
907 /* Return true iff a given gimple statement has been instrumented.
908 Note that the statement is "defined" by the memory references it
912 has_stmt_been_instrumented_p (gimple stmt
)
914 if (gimple_assign_single_p (stmt
))
918 asan_mem_ref_init (&r
, NULL
, 1);
920 if (get_mem_ref_of_assignment (as_a
<gassign
*> (stmt
), &r
,
922 return has_mem_ref_been_instrumented (&r
);
924 else if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
926 asan_mem_ref src0
, src1
, dest
;
927 asan_mem_ref_init (&src0
, NULL
, 1);
928 asan_mem_ref_init (&src1
, NULL
, 1);
929 asan_mem_ref_init (&dest
, NULL
, 1);
931 tree src0_len
= NULL_TREE
, src1_len
= NULL_TREE
, dest_len
= NULL_TREE
;
932 bool src0_is_store
= false, src1_is_store
= false,
933 dest_is_store
= false, dest_is_deref
= false, intercepted_p
= true;
934 if (get_mem_refs_of_builtin_call (as_a
<gcall
*> (stmt
),
935 &src0
, &src0_len
, &src0_is_store
,
936 &src1
, &src1_len
, &src1_is_store
,
937 &dest
, &dest_len
, &dest_is_store
,
938 &dest_is_deref
, &intercepted_p
))
940 if (src0
.start
!= NULL_TREE
941 && !has_mem_ref_been_instrumented (&src0
, src0_len
))
944 if (src1
.start
!= NULL_TREE
945 && !has_mem_ref_been_instrumented (&src1
, src1_len
))
948 if (dest
.start
!= NULL_TREE
949 && !has_mem_ref_been_instrumented (&dest
, dest_len
))
958 /* Insert a memory reference into the hash table. */
961 update_mem_ref_hash_table (tree ref
, HOST_WIDE_INT access_size
)
963 hash_table
<asan_mem_ref_hasher
> *ht
= get_mem_ref_hash_table ();
966 asan_mem_ref_init (&r
, ref
, access_size
);
968 asan_mem_ref
**slot
= ht
->find_slot (&r
, INSERT
);
969 if (*slot
== NULL
|| (*slot
)->access_size
< access_size
)
970 *slot
= asan_mem_ref_new (ref
, access_size
);
973 /* Initialize shadow_ptr_types array. */
976 asan_init_shadow_ptr_types (void)
978 asan_shadow_set
= new_alias_set ();
979 shadow_ptr_types
[0] = build_distinct_type_copy (signed_char_type_node
);
980 TYPE_ALIAS_SET (shadow_ptr_types
[0]) = asan_shadow_set
;
981 shadow_ptr_types
[0] = build_pointer_type (shadow_ptr_types
[0]);
982 shadow_ptr_types
[1] = build_distinct_type_copy (short_integer_type_node
);
983 TYPE_ALIAS_SET (shadow_ptr_types
[1]) = asan_shadow_set
;
984 shadow_ptr_types
[1] = build_pointer_type (shadow_ptr_types
[1]);
985 initialize_sanitizer_builtins ();
988 /* Create ADDR_EXPR of STRING_CST with the PP pretty printer text. */
991 asan_pp_string (pretty_printer
*pp
)
993 const char *buf
= pp_formatted_text (pp
);
994 size_t len
= strlen (buf
);
995 tree ret
= build_string (len
+ 1, buf
);
997 = build_array_type (TREE_TYPE (shadow_ptr_types
[0]),
998 build_index_type (size_int (len
)));
999 TREE_READONLY (ret
) = 1;
1000 TREE_STATIC (ret
) = 1;
1001 return build1 (ADDR_EXPR
, shadow_ptr_types
[0], ret
);
1004 /* Return a CONST_INT representing 4 subsequent shadow memory bytes. */
1007 asan_shadow_cst (unsigned char shadow_bytes
[4])
1010 unsigned HOST_WIDE_INT val
= 0;
1011 gcc_assert (WORDS_BIG_ENDIAN
== BYTES_BIG_ENDIAN
);
1012 for (i
= 0; i
< 4; i
++)
1013 val
|= (unsigned HOST_WIDE_INT
) shadow_bytes
[BYTES_BIG_ENDIAN
? 3 - i
: i
]
1014 << (BITS_PER_UNIT
* i
);
1015 return gen_int_mode (val
, SImode
);
1018 /* Clear shadow memory at SHADOW_MEM, LEN bytes. Can't call a library call here
1022 asan_clear_shadow (rtx shadow_mem
, HOST_WIDE_INT len
)
1024 rtx_insn
*insn
, *insns
, *jump
;
1025 rtx_code_label
*top_label
;
1029 clear_storage (shadow_mem
, GEN_INT (len
), BLOCK_OP_NORMAL
);
1030 insns
= get_insns ();
1032 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
1035 if (insn
== NULL_RTX
)
1041 gcc_assert ((len
& 3) == 0);
1042 top_label
= gen_label_rtx ();
1043 addr
= copy_to_mode_reg (Pmode
, XEXP (shadow_mem
, 0));
1044 shadow_mem
= adjust_automodify_address (shadow_mem
, SImode
, addr
, 0);
1045 end
= force_reg (Pmode
, plus_constant (Pmode
, addr
, len
));
1046 emit_label (top_label
);
1048 emit_move_insn (shadow_mem
, const0_rtx
);
1049 tmp
= expand_simple_binop (Pmode
, PLUS
, addr
, gen_int_mode (4, Pmode
), addr
,
1050 true, OPTAB_LIB_WIDEN
);
1052 emit_move_insn (addr
, tmp
);
1053 emit_cmp_and_jump_insns (addr
, end
, LT
, NULL_RTX
, Pmode
, true, top_label
);
1054 jump
= get_last_insn ();
1055 gcc_assert (JUMP_P (jump
));
1056 add_int_reg_note (jump
, REG_BR_PROB
, REG_BR_PROB_BASE
* 80 / 100);
1060 asan_function_start (void)
1062 section
*fnsec
= function_section (current_function_decl
);
1063 switch_to_section (fnsec
);
1064 ASM_OUTPUT_DEBUG_LABEL (asm_out_file
, "LASANPC",
1065 current_function_funcdef_no
);
1068 /* Insert code to protect stack vars. The prologue sequence should be emitted
1069 directly, epilogue sequence returned. BASE is the register holding the
1070 stack base, against which OFFSETS array offsets are relative to, OFFSETS
1071 array contains pairs of offsets in reverse order, always the end offset
1072 of some gap that needs protection followed by starting offset,
1073 and DECLS is an array of representative decls for each var partition.
1074 LENGTH is the length of the OFFSETS array, DECLS array is LENGTH / 2 - 1
1075 elements long (OFFSETS include gap before the first variable as well
1076 as gaps after each stack variable). PBASE is, if non-NULL, some pseudo
1077 register which stack vars DECL_RTLs are based on. Either BASE should be
1078 assigned to PBASE, when not doing use after return protection, or
1079 corresponding address based on __asan_stack_malloc* return value. */
1082 asan_emit_stack_protection (rtx base
, rtx pbase
, unsigned int alignb
,
1083 HOST_WIDE_INT
*offsets
, tree
*decls
, int length
)
1085 rtx shadow_base
, shadow_mem
, ret
, mem
, orig_base
;
1086 rtx_code_label
*lab
;
1089 unsigned char shadow_bytes
[4];
1090 HOST_WIDE_INT base_offset
= offsets
[length
- 1];
1091 HOST_WIDE_INT base_align_bias
= 0, offset
, prev_offset
;
1092 HOST_WIDE_INT asan_frame_size
= offsets
[0] - base_offset
;
1093 HOST_WIDE_INT last_offset
, last_size
;
1095 unsigned char cur_shadow_byte
= ASAN_STACK_MAGIC_LEFT
;
1096 tree str_cst
, decl
, id
;
1097 int use_after_return_class
= -1;
1099 if (shadow_ptr_types
[0] == NULL_TREE
)
1100 asan_init_shadow_ptr_types ();
1102 /* First of all, prepare the description string. */
1103 pretty_printer asan_pp
;
1105 pp_decimal_int (&asan_pp
, length
/ 2 - 1);
1106 pp_space (&asan_pp
);
1107 for (l
= length
- 2; l
; l
-= 2)
1109 tree decl
= decls
[l
/ 2 - 1];
1110 pp_wide_integer (&asan_pp
, offsets
[l
] - base_offset
);
1111 pp_space (&asan_pp
);
1112 pp_wide_integer (&asan_pp
, offsets
[l
- 1] - offsets
[l
]);
1113 pp_space (&asan_pp
);
1114 if (DECL_P (decl
) && DECL_NAME (decl
))
1116 pp_decimal_int (&asan_pp
, IDENTIFIER_LENGTH (DECL_NAME (decl
)));
1117 pp_space (&asan_pp
);
1118 pp_tree_identifier (&asan_pp
, DECL_NAME (decl
));
1121 pp_string (&asan_pp
, "9 <unknown>");
1122 pp_space (&asan_pp
);
1124 str_cst
= asan_pp_string (&asan_pp
);
1126 /* Emit the prologue sequence. */
1127 if (asan_frame_size
> 32 && asan_frame_size
<= 65536 && pbase
1128 && ASAN_USE_AFTER_RETURN
)
1130 use_after_return_class
= floor_log2 (asan_frame_size
- 1) - 5;
1131 /* __asan_stack_malloc_N guarantees alignment
1132 N < 6 ? (64 << N) : 4096 bytes. */
1133 if (alignb
> (use_after_return_class
< 6
1134 ? (64U << use_after_return_class
) : 4096U))
1135 use_after_return_class
= -1;
1136 else if (alignb
> ASAN_RED_ZONE_SIZE
&& (asan_frame_size
& (alignb
- 1)))
1137 base_align_bias
= ((asan_frame_size
+ alignb
- 1)
1138 & ~(alignb
- HOST_WIDE_INT_1
)) - asan_frame_size
;
1140 /* Align base if target is STRICT_ALIGNMENT. */
1141 if (STRICT_ALIGNMENT
)
1142 base
= expand_binop (Pmode
, and_optab
, base
,
1143 gen_int_mode (-((GET_MODE_ALIGNMENT (SImode
)
1144 << ASAN_SHADOW_SHIFT
)
1145 / BITS_PER_UNIT
), Pmode
), NULL_RTX
,
1148 if (use_after_return_class
== -1 && pbase
)
1149 emit_move_insn (pbase
, base
);
1151 base
= expand_binop (Pmode
, add_optab
, base
,
1152 gen_int_mode (base_offset
- base_align_bias
, Pmode
),
1153 NULL_RTX
, 1, OPTAB_DIRECT
);
1154 orig_base
= NULL_RTX
;
1155 if (use_after_return_class
!= -1)
1157 if (asan_detect_stack_use_after_return
== NULL_TREE
)
1159 id
= get_identifier ("__asan_option_detect_stack_use_after_return");
1160 decl
= build_decl (BUILTINS_LOCATION
, VAR_DECL
, id
,
1162 SET_DECL_ASSEMBLER_NAME (decl
, id
);
1163 TREE_ADDRESSABLE (decl
) = 1;
1164 DECL_ARTIFICIAL (decl
) = 1;
1165 DECL_IGNORED_P (decl
) = 1;
1166 DECL_EXTERNAL (decl
) = 1;
1167 TREE_STATIC (decl
) = 1;
1168 TREE_PUBLIC (decl
) = 1;
1169 TREE_USED (decl
) = 1;
1170 asan_detect_stack_use_after_return
= decl
;
1172 orig_base
= gen_reg_rtx (Pmode
);
1173 emit_move_insn (orig_base
, base
);
1174 ret
= expand_normal (asan_detect_stack_use_after_return
);
1175 lab
= gen_label_rtx ();
1176 int very_likely
= REG_BR_PROB_BASE
- (REG_BR_PROB_BASE
/ 2000 - 1);
1177 emit_cmp_and_jump_insns (ret
, const0_rtx
, EQ
, NULL_RTX
,
1178 VOIDmode
, 0, lab
, very_likely
);
1179 snprintf (buf
, sizeof buf
, "__asan_stack_malloc_%d",
1180 use_after_return_class
);
1181 ret
= init_one_libfunc (buf
);
1182 rtx addr
= convert_memory_address (ptr_mode
, base
);
1183 ret
= emit_library_call_value (ret
, NULL_RTX
, LCT_NORMAL
, ptr_mode
, 2,
1184 GEN_INT (asan_frame_size
1186 TYPE_MODE (pointer_sized_int_node
),
1188 ret
= convert_memory_address (Pmode
, ret
);
1189 emit_move_insn (base
, ret
);
1191 emit_move_insn (pbase
, expand_binop (Pmode
, add_optab
, base
,
1192 gen_int_mode (base_align_bias
1193 - base_offset
, Pmode
),
1194 NULL_RTX
, 1, OPTAB_DIRECT
));
1196 mem
= gen_rtx_MEM (ptr_mode
, base
);
1197 mem
= adjust_address (mem
, VOIDmode
, base_align_bias
);
1198 emit_move_insn (mem
, gen_int_mode (ASAN_STACK_FRAME_MAGIC
, ptr_mode
));
1199 mem
= adjust_address (mem
, VOIDmode
, GET_MODE_SIZE (ptr_mode
));
1200 emit_move_insn (mem
, expand_normal (str_cst
));
1201 mem
= adjust_address (mem
, VOIDmode
, GET_MODE_SIZE (ptr_mode
));
1202 ASM_GENERATE_INTERNAL_LABEL (buf
, "LASANPC", current_function_funcdef_no
);
1203 id
= get_identifier (buf
);
1204 decl
= build_decl (DECL_SOURCE_LOCATION (current_function_decl
),
1205 VAR_DECL
, id
, char_type_node
);
1206 SET_DECL_ASSEMBLER_NAME (decl
, id
);
1207 TREE_ADDRESSABLE (decl
) = 1;
1208 TREE_READONLY (decl
) = 1;
1209 DECL_ARTIFICIAL (decl
) = 1;
1210 DECL_IGNORED_P (decl
) = 1;
1211 TREE_STATIC (decl
) = 1;
1212 TREE_PUBLIC (decl
) = 0;
1213 TREE_USED (decl
) = 1;
1214 DECL_INITIAL (decl
) = decl
;
1215 TREE_ASM_WRITTEN (decl
) = 1;
1216 TREE_ASM_WRITTEN (id
) = 1;
1217 emit_move_insn (mem
, expand_normal (build_fold_addr_expr (decl
)));
1218 shadow_base
= expand_binop (Pmode
, lshr_optab
, base
,
1219 GEN_INT (ASAN_SHADOW_SHIFT
),
1220 NULL_RTX
, 1, OPTAB_DIRECT
);
1222 = plus_constant (Pmode
, shadow_base
,
1223 asan_shadow_offset ()
1224 + (base_align_bias
>> ASAN_SHADOW_SHIFT
));
1225 gcc_assert (asan_shadow_set
!= -1
1226 && (ASAN_RED_ZONE_SIZE
>> ASAN_SHADOW_SHIFT
) == 4);
1227 shadow_mem
= gen_rtx_MEM (SImode
, shadow_base
);
1228 set_mem_alias_set (shadow_mem
, asan_shadow_set
);
1229 if (STRICT_ALIGNMENT
)
1230 set_mem_align (shadow_mem
, (GET_MODE_ALIGNMENT (SImode
)));
1231 prev_offset
= base_offset
;
1232 for (l
= length
; l
; l
-= 2)
1235 cur_shadow_byte
= ASAN_STACK_MAGIC_RIGHT
;
1236 offset
= offsets
[l
- 1];
1237 if ((offset
- base_offset
) & (ASAN_RED_ZONE_SIZE
- 1))
1241 = base_offset
+ ((offset
- base_offset
)
1242 & ~(ASAN_RED_ZONE_SIZE
- HOST_WIDE_INT_1
));
1243 shadow_mem
= adjust_address (shadow_mem
, VOIDmode
,
1244 (aoff
- prev_offset
)
1245 >> ASAN_SHADOW_SHIFT
);
1247 for (i
= 0; i
< 4; i
++, aoff
+= (1 << ASAN_SHADOW_SHIFT
))
1250 if (aoff
< offset
- (1 << ASAN_SHADOW_SHIFT
) + 1)
1251 shadow_bytes
[i
] = 0;
1253 shadow_bytes
[i
] = offset
- aoff
;
1256 shadow_bytes
[i
] = ASAN_STACK_MAGIC_PARTIAL
;
1257 emit_move_insn (shadow_mem
, asan_shadow_cst (shadow_bytes
));
1260 while (offset
<= offsets
[l
- 2] - ASAN_RED_ZONE_SIZE
)
1262 shadow_mem
= adjust_address (shadow_mem
, VOIDmode
,
1263 (offset
- prev_offset
)
1264 >> ASAN_SHADOW_SHIFT
);
1265 prev_offset
= offset
;
1266 memset (shadow_bytes
, cur_shadow_byte
, 4);
1267 emit_move_insn (shadow_mem
, asan_shadow_cst (shadow_bytes
));
1268 offset
+= ASAN_RED_ZONE_SIZE
;
1270 cur_shadow_byte
= ASAN_STACK_MAGIC_MIDDLE
;
1272 do_pending_stack_adjust ();
1274 /* Construct epilogue sequence. */
1278 if (use_after_return_class
!= -1)
1280 rtx_code_label
*lab2
= gen_label_rtx ();
1281 char c
= (char) ASAN_STACK_MAGIC_USE_AFTER_RET
;
1282 int very_likely
= REG_BR_PROB_BASE
- (REG_BR_PROB_BASE
/ 2000 - 1);
1283 emit_cmp_and_jump_insns (orig_base
, base
, EQ
, NULL_RTX
,
1284 VOIDmode
, 0, lab2
, very_likely
);
1285 shadow_mem
= gen_rtx_MEM (BLKmode
, shadow_base
);
1286 set_mem_alias_set (shadow_mem
, asan_shadow_set
);
1287 mem
= gen_rtx_MEM (ptr_mode
, base
);
1288 mem
= adjust_address (mem
, VOIDmode
, base_align_bias
);
1289 emit_move_insn (mem
, gen_int_mode (ASAN_STACK_RETIRED_MAGIC
, ptr_mode
));
1290 unsigned HOST_WIDE_INT sz
= asan_frame_size
>> ASAN_SHADOW_SHIFT
;
1291 if (use_after_return_class
< 5
1292 && can_store_by_pieces (sz
, builtin_memset_read_str
, &c
,
1293 BITS_PER_UNIT
, true))
1294 store_by_pieces (shadow_mem
, sz
, builtin_memset_read_str
, &c
,
1295 BITS_PER_UNIT
, true, 0);
1296 else if (use_after_return_class
>= 5
1297 || !set_storage_via_setmem (shadow_mem
,
1299 gen_int_mode (c
, QImode
),
1300 BITS_PER_UNIT
, BITS_PER_UNIT
,
1303 snprintf (buf
, sizeof buf
, "__asan_stack_free_%d",
1304 use_after_return_class
);
1305 ret
= init_one_libfunc (buf
);
1306 rtx addr
= convert_memory_address (ptr_mode
, base
);
1307 rtx orig_addr
= convert_memory_address (ptr_mode
, orig_base
);
1308 emit_library_call (ret
, LCT_NORMAL
, ptr_mode
, 3, addr
, ptr_mode
,
1309 GEN_INT (asan_frame_size
+ base_align_bias
),
1310 TYPE_MODE (pointer_sized_int_node
),
1311 orig_addr
, ptr_mode
);
1313 lab
= gen_label_rtx ();
1318 shadow_mem
= gen_rtx_MEM (BLKmode
, shadow_base
);
1319 set_mem_alias_set (shadow_mem
, asan_shadow_set
);
1321 if (STRICT_ALIGNMENT
)
1322 set_mem_align (shadow_mem
, (GET_MODE_ALIGNMENT (SImode
)));
1324 prev_offset
= base_offset
;
1325 last_offset
= base_offset
;
1327 for (l
= length
; l
; l
-= 2)
1329 offset
= base_offset
+ ((offsets
[l
- 1] - base_offset
)
1330 & ~(ASAN_RED_ZONE_SIZE
- HOST_WIDE_INT_1
));
1331 if (last_offset
+ last_size
!= offset
)
1333 shadow_mem
= adjust_address (shadow_mem
, VOIDmode
,
1334 (last_offset
- prev_offset
)
1335 >> ASAN_SHADOW_SHIFT
);
1336 prev_offset
= last_offset
;
1337 asan_clear_shadow (shadow_mem
, last_size
>> ASAN_SHADOW_SHIFT
);
1338 last_offset
= offset
;
1341 last_size
+= base_offset
+ ((offsets
[l
- 2] - base_offset
)
1342 & ~(ASAN_RED_ZONE_SIZE
- HOST_WIDE_INT_1
))
1347 shadow_mem
= adjust_address (shadow_mem
, VOIDmode
,
1348 (last_offset
- prev_offset
)
1349 >> ASAN_SHADOW_SHIFT
);
1350 asan_clear_shadow (shadow_mem
, last_size
>> ASAN_SHADOW_SHIFT
);
1353 do_pending_stack_adjust ();
1357 insns
= get_insns ();
1362 /* Return true if DECL, a global var, might be overridden and needs
1363 therefore a local alias. */
1366 asan_needs_local_alias (tree decl
)
1368 return DECL_WEAK (decl
) || !targetm
.binds_local_p (decl
);
1371 /* Return true if DECL is a VAR_DECL that should be protected
1372 by Address Sanitizer, by appending a red zone with protected
1373 shadow memory after it and aligning it to at least
1374 ASAN_RED_ZONE_SIZE bytes. */
1377 asan_protect_global (tree decl
)
1384 if (TREE_CODE (decl
) == STRING_CST
)
1386 /* Instrument all STRING_CSTs except those created
1387 by asan_pp_string here. */
1388 if (shadow_ptr_types
[0] != NULL_TREE
1389 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
1390 && TREE_TYPE (TREE_TYPE (decl
)) == TREE_TYPE (shadow_ptr_types
[0]))
1394 if (TREE_CODE (decl
) != VAR_DECL
1395 /* TLS vars aren't statically protectable. */
1396 || DECL_THREAD_LOCAL_P (decl
)
1397 /* Externs will be protected elsewhere. */
1398 || DECL_EXTERNAL (decl
)
1399 || !DECL_RTL_SET_P (decl
)
1400 /* Comdat vars pose an ABI problem, we can't know if
1401 the var that is selected by the linker will have
1403 || DECL_ONE_ONLY (decl
)
1404 /* Similarly for common vars. People can use -fno-common.
1405 Note: Linux kernel is built with -fno-common, so we do instrument
1406 globals there even if it is C. */
1407 || (DECL_COMMON (decl
) && TREE_PUBLIC (decl
))
1408 /* Don't protect if using user section, often vars placed
1409 into user section from multiple TUs are then assumed
1410 to be an array of such vars, putting padding in there
1411 breaks this assumption. */
1412 || (DECL_SECTION_NAME (decl
) != NULL
1413 && !symtab_node::get (decl
)->implicit_section
1414 && !section_sanitized_p (DECL_SECTION_NAME (decl
)))
1415 || DECL_SIZE (decl
) == 0
1416 || ASAN_RED_ZONE_SIZE
* BITS_PER_UNIT
> MAX_OFILE_ALIGNMENT
1417 || !valid_constant_size_p (DECL_SIZE_UNIT (decl
))
1418 || DECL_ALIGN_UNIT (decl
) > 2 * ASAN_RED_ZONE_SIZE
1419 || TREE_TYPE (decl
) == ubsan_get_source_location_type ())
1422 rtl
= DECL_RTL (decl
);
1423 if (!MEM_P (rtl
) || GET_CODE (XEXP (rtl
, 0)) != SYMBOL_REF
)
1425 symbol
= XEXP (rtl
, 0);
1427 if (CONSTANT_POOL_ADDRESS_P (symbol
)
1428 || TREE_CONSTANT_POOL_ADDRESS_P (symbol
))
1431 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl
)))
1434 #ifndef ASM_OUTPUT_DEF
1435 if (asan_needs_local_alias (decl
))
1442 /* Construct a function tree for __asan_report_{load,store}{1,2,4,8,16,_n}.
1443 IS_STORE is either 1 (for a store) or 0 (for a load). */
1446 report_error_func (bool is_store
, bool recover_p
, HOST_WIDE_INT size_in_bytes
,
1449 static enum built_in_function report
[2][2][6]
1450 = { { { BUILT_IN_ASAN_REPORT_LOAD1
, BUILT_IN_ASAN_REPORT_LOAD2
,
1451 BUILT_IN_ASAN_REPORT_LOAD4
, BUILT_IN_ASAN_REPORT_LOAD8
,
1452 BUILT_IN_ASAN_REPORT_LOAD16
, BUILT_IN_ASAN_REPORT_LOAD_N
},
1453 { BUILT_IN_ASAN_REPORT_STORE1
, BUILT_IN_ASAN_REPORT_STORE2
,
1454 BUILT_IN_ASAN_REPORT_STORE4
, BUILT_IN_ASAN_REPORT_STORE8
,
1455 BUILT_IN_ASAN_REPORT_STORE16
, BUILT_IN_ASAN_REPORT_STORE_N
} },
1456 { { BUILT_IN_ASAN_REPORT_LOAD1_NOABORT
,
1457 BUILT_IN_ASAN_REPORT_LOAD2_NOABORT
,
1458 BUILT_IN_ASAN_REPORT_LOAD4_NOABORT
,
1459 BUILT_IN_ASAN_REPORT_LOAD8_NOABORT
,
1460 BUILT_IN_ASAN_REPORT_LOAD16_NOABORT
,
1461 BUILT_IN_ASAN_REPORT_LOAD_N_NOABORT
},
1462 { BUILT_IN_ASAN_REPORT_STORE1_NOABORT
,
1463 BUILT_IN_ASAN_REPORT_STORE2_NOABORT
,
1464 BUILT_IN_ASAN_REPORT_STORE4_NOABORT
,
1465 BUILT_IN_ASAN_REPORT_STORE8_NOABORT
,
1466 BUILT_IN_ASAN_REPORT_STORE16_NOABORT
,
1467 BUILT_IN_ASAN_REPORT_STORE_N_NOABORT
} } };
1468 if (size_in_bytes
== -1)
1471 return builtin_decl_implicit (report
[recover_p
][is_store
][5]);
1474 int size_log2
= exact_log2 (size_in_bytes
);
1475 return builtin_decl_implicit (report
[recover_p
][is_store
][size_log2
]);
1478 /* Construct a function tree for __asan_{load,store}{1,2,4,8,16,_n}.
1479 IS_STORE is either 1 (for a store) or 0 (for a load). */
1482 check_func (bool is_store
, bool recover_p
, HOST_WIDE_INT size_in_bytes
,
1485 static enum built_in_function check
[2][2][6]
1486 = { { { BUILT_IN_ASAN_LOAD1
, BUILT_IN_ASAN_LOAD2
,
1487 BUILT_IN_ASAN_LOAD4
, BUILT_IN_ASAN_LOAD8
,
1488 BUILT_IN_ASAN_LOAD16
, BUILT_IN_ASAN_LOADN
},
1489 { BUILT_IN_ASAN_STORE1
, BUILT_IN_ASAN_STORE2
,
1490 BUILT_IN_ASAN_STORE4
, BUILT_IN_ASAN_STORE8
,
1491 BUILT_IN_ASAN_STORE16
, BUILT_IN_ASAN_STOREN
} },
1492 { { BUILT_IN_ASAN_LOAD1_NOABORT
,
1493 BUILT_IN_ASAN_LOAD2_NOABORT
,
1494 BUILT_IN_ASAN_LOAD4_NOABORT
,
1495 BUILT_IN_ASAN_LOAD8_NOABORT
,
1496 BUILT_IN_ASAN_LOAD16_NOABORT
,
1497 BUILT_IN_ASAN_LOADN_NOABORT
},
1498 { BUILT_IN_ASAN_STORE1_NOABORT
,
1499 BUILT_IN_ASAN_STORE2_NOABORT
,
1500 BUILT_IN_ASAN_STORE4_NOABORT
,
1501 BUILT_IN_ASAN_STORE8_NOABORT
,
1502 BUILT_IN_ASAN_STORE16_NOABORT
,
1503 BUILT_IN_ASAN_STOREN_NOABORT
} } };
1504 if (size_in_bytes
== -1)
1507 return builtin_decl_implicit (check
[recover_p
][is_store
][5]);
1510 int size_log2
= exact_log2 (size_in_bytes
);
1511 return builtin_decl_implicit (check
[recover_p
][is_store
][size_log2
]);
1514 /* Split the current basic block and create a condition statement
1515 insertion point right before or after the statement pointed to by
1516 ITER. Return an iterator to the point at which the caller might
1517 safely insert the condition statement.
1519 THEN_BLOCK must be set to the address of an uninitialized instance
1520 of basic_block. The function will then set *THEN_BLOCK to the
1521 'then block' of the condition statement to be inserted by the
1524 If CREATE_THEN_FALLTHRU_EDGE is false, no edge will be created from
1525 *THEN_BLOCK to *FALLTHROUGH_BLOCK.
1527 Similarly, the function will set *FALLTRHOUGH_BLOCK to the 'else
1528 block' of the condition statement to be inserted by the caller.
1530 Note that *FALLTHROUGH_BLOCK is a new block that contains the
1531 statements starting from *ITER, and *THEN_BLOCK is a new empty
1534 *ITER is adjusted to point to always point to the first statement
1535 of the basic block * FALLTHROUGH_BLOCK. That statement is the
1536 same as what ITER was pointing to prior to calling this function,
1537 if BEFORE_P is true; otherwise, it is its following statement. */
1539 gimple_stmt_iterator
1540 create_cond_insert_point (gimple_stmt_iterator
*iter
,
1542 bool then_more_likely_p
,
1543 bool create_then_fallthru_edge
,
1544 basic_block
*then_block
,
1545 basic_block
*fallthrough_block
)
1547 gimple_stmt_iterator gsi
= *iter
;
1549 if (!gsi_end_p (gsi
) && before_p
)
1552 basic_block cur_bb
= gsi_bb (*iter
);
1554 edge e
= split_block (cur_bb
, gsi_stmt (gsi
));
1556 /* Get a hold on the 'condition block', the 'then block' and the
1558 basic_block cond_bb
= e
->src
;
1559 basic_block fallthru_bb
= e
->dest
;
1560 basic_block then_bb
= create_empty_bb (cond_bb
);
1563 add_bb_to_loop (then_bb
, cond_bb
->loop_father
);
1564 loops_state_set (LOOPS_NEED_FIXUP
);
1567 /* Set up the newly created 'then block'. */
1568 e
= make_edge (cond_bb
, then_bb
, EDGE_TRUE_VALUE
);
1569 int fallthrough_probability
1570 = then_more_likely_p
1571 ? PROB_VERY_UNLIKELY
1572 : PROB_ALWAYS
- PROB_VERY_UNLIKELY
;
1573 e
->probability
= PROB_ALWAYS
- fallthrough_probability
;
1574 if (create_then_fallthru_edge
)
1575 make_single_succ_edge (then_bb
, fallthru_bb
, EDGE_FALLTHRU
);
1577 /* Set up the fallthrough basic block. */
1578 e
= find_edge (cond_bb
, fallthru_bb
);
1579 e
->flags
= EDGE_FALSE_VALUE
;
1580 e
->count
= cond_bb
->count
;
1581 e
->probability
= fallthrough_probability
;
1583 /* Update dominance info for the newly created then_bb; note that
1584 fallthru_bb's dominance info has already been updated by
1586 if (dom_info_available_p (CDI_DOMINATORS
))
1587 set_immediate_dominator (CDI_DOMINATORS
, then_bb
, cond_bb
);
1589 *then_block
= then_bb
;
1590 *fallthrough_block
= fallthru_bb
;
1591 *iter
= gsi_start_bb (fallthru_bb
);
1593 return gsi_last_bb (cond_bb
);
1596 /* Insert an if condition followed by a 'then block' right before the
1597 statement pointed to by ITER. The fallthrough block -- which is the
1598 else block of the condition as well as the destination of the
1599 outcoming edge of the 'then block' -- starts with the statement
1602 COND is the condition of the if.
1604 If THEN_MORE_LIKELY_P is true, the probability of the edge to the
1605 'then block' is higher than the probability of the edge to the
1608 Upon completion of the function, *THEN_BB is set to the newly
1609 inserted 'then block' and similarly, *FALLTHROUGH_BB is set to the
1612 *ITER is adjusted to still point to the same statement it was
1613 pointing to initially. */
1616 insert_if_then_before_iter (gcond
*cond
,
1617 gimple_stmt_iterator
*iter
,
1618 bool then_more_likely_p
,
1619 basic_block
*then_bb
,
1620 basic_block
*fallthrough_bb
)
1622 gimple_stmt_iterator cond_insert_point
=
1623 create_cond_insert_point (iter
,
1626 /*create_then_fallthru_edge=*/true,
1629 gsi_insert_after (&cond_insert_point
, cond
, GSI_NEW_STMT
);
1633 (base_addr >> ASAN_SHADOW_SHIFT) + asan_shadow_offset (). */
1636 build_shadow_mem_access (gimple_stmt_iterator
*gsi
, location_t location
,
1637 tree base_addr
, tree shadow_ptr_type
)
1639 tree t
, uintptr_type
= TREE_TYPE (base_addr
);
1640 tree shadow_type
= TREE_TYPE (shadow_ptr_type
);
1643 t
= build_int_cst (uintptr_type
, ASAN_SHADOW_SHIFT
);
1644 g
= gimple_build_assign (make_ssa_name (uintptr_type
), RSHIFT_EXPR
,
1646 gimple_set_location (g
, location
);
1647 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
1649 t
= build_int_cst (uintptr_type
, asan_shadow_offset ());
1650 g
= gimple_build_assign (make_ssa_name (uintptr_type
), PLUS_EXPR
,
1651 gimple_assign_lhs (g
), t
);
1652 gimple_set_location (g
, location
);
1653 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
1655 g
= gimple_build_assign (make_ssa_name (shadow_ptr_type
), NOP_EXPR
,
1656 gimple_assign_lhs (g
));
1657 gimple_set_location (g
, location
);
1658 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
1660 t
= build2 (MEM_REF
, shadow_type
, gimple_assign_lhs (g
),
1661 build_int_cst (shadow_ptr_type
, 0));
1662 g
= gimple_build_assign (make_ssa_name (shadow_type
), MEM_REF
, t
);
1663 gimple_set_location (g
, location
);
1664 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
1665 return gimple_assign_lhs (g
);
1668 /* BASE can already be an SSA_NAME; in that case, do not create a
1669 new SSA_NAME for it. */
1672 maybe_create_ssa_name (location_t loc
, tree base
, gimple_stmt_iterator
*iter
,
1675 if (TREE_CODE (base
) == SSA_NAME
)
1677 gimple g
= gimple_build_assign (make_ssa_name (TREE_TYPE (base
)),
1678 TREE_CODE (base
), base
);
1679 gimple_set_location (g
, loc
);
1681 gsi_insert_before (iter
, g
, GSI_SAME_STMT
);
1683 gsi_insert_after (iter
, g
, GSI_NEW_STMT
);
1684 return gimple_assign_lhs (g
);
1687 /* LEN can already have necessary size and precision;
1688 in that case, do not create a new variable. */
1691 maybe_cast_to_ptrmode (location_t loc
, tree len
, gimple_stmt_iterator
*iter
,
1694 if (ptrofftype_p (len
))
1696 gimple g
= gimple_build_assign (make_ssa_name (pointer_sized_int_node
),
1698 gimple_set_location (g
, loc
);
1700 gsi_insert_before (iter
, g
, GSI_SAME_STMT
);
1702 gsi_insert_after (iter
, g
, GSI_NEW_STMT
);
1703 return gimple_assign_lhs (g
);
1706 /* Instrument the memory access instruction BASE. Insert new
1707 statements before or after ITER.
1709 Note that the memory access represented by BASE can be either an
1710 SSA_NAME, or a non-SSA expression. LOCATION is the source code
1711 location. IS_STORE is TRUE for a store, FALSE for a load.
1712 BEFORE_P is TRUE for inserting the instrumentation code before
1713 ITER, FALSE for inserting it after ITER. IS_SCALAR_ACCESS is TRUE
1714 for a scalar memory access and FALSE for memory region access.
1715 NON_ZERO_P is TRUE if memory region is guaranteed to have non-zero
1716 length. ALIGN tells alignment of accessed memory object.
1718 START_INSTRUMENTED and END_INSTRUMENTED are TRUE if start/end of
1719 memory region have already been instrumented.
1721 If BEFORE_P is TRUE, *ITER is arranged to still point to the
1722 statement it was pointing to prior to calling this function,
1723 otherwise, it points to the statement logically following it. */
1726 build_check_stmt (location_t loc
, tree base
, tree len
,
1727 HOST_WIDE_INT size_in_bytes
, gimple_stmt_iterator
*iter
,
1728 bool is_non_zero_len
, bool before_p
, bool is_store
,
1729 bool is_scalar_access
, unsigned int align
= 0)
1731 gimple_stmt_iterator gsi
= *iter
;
1734 gcc_assert (!(size_in_bytes
> 0 && !is_non_zero_len
));
1738 base
= unshare_expr (base
);
1739 base
= maybe_create_ssa_name (loc
, base
, &gsi
, before_p
);
1743 len
= unshare_expr (len
);
1744 len
= maybe_cast_to_ptrmode (loc
, len
, iter
, before_p
);
1748 gcc_assert (size_in_bytes
!= -1);
1749 len
= build_int_cst (pointer_sized_int_node
, size_in_bytes
);
1752 if (size_in_bytes
> 1)
1754 if ((size_in_bytes
& (size_in_bytes
- 1)) != 0
1755 || size_in_bytes
> 16)
1756 is_scalar_access
= false;
1757 else if (align
&& align
< size_in_bytes
* BITS_PER_UNIT
)
1759 /* On non-strict alignment targets, if
1760 16-byte access is just 8-byte aligned,
1761 this will result in misaligned shadow
1762 memory 2 byte load, but otherwise can
1763 be handled using one read. */
1764 if (size_in_bytes
!= 16
1766 || align
< 8 * BITS_PER_UNIT
)
1767 is_scalar_access
= false;
1771 HOST_WIDE_INT flags
= 0;
1773 flags
|= ASAN_CHECK_STORE
;
1774 if (is_non_zero_len
)
1775 flags
|= ASAN_CHECK_NON_ZERO_LEN
;
1776 if (is_scalar_access
)
1777 flags
|= ASAN_CHECK_SCALAR_ACCESS
;
1779 g
= gimple_build_call_internal (IFN_ASAN_CHECK
, 4,
1780 build_int_cst (integer_type_node
, flags
),
1782 build_int_cst (integer_type_node
,
1783 align
/ BITS_PER_UNIT
));
1784 gimple_set_location (g
, loc
);
1786 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
1789 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
1795 /* If T represents a memory access, add instrumentation code before ITER.
1796 LOCATION is source code location.
1797 IS_STORE is either TRUE (for a store) or FALSE (for a load). */
1800 instrument_derefs (gimple_stmt_iterator
*iter
, tree t
,
1801 location_t location
, bool is_store
)
1803 if (is_store
&& !ASAN_INSTRUMENT_WRITES
)
1805 if (!is_store
&& !ASAN_INSTRUMENT_READS
)
1809 HOST_WIDE_INT size_in_bytes
;
1811 type
= TREE_TYPE (t
);
1812 switch (TREE_CODE (t
))
1826 size_in_bytes
= int_size_in_bytes (type
);
1827 if (size_in_bytes
<= 0)
1830 HOST_WIDE_INT bitsize
, bitpos
;
1833 int volatilep
= 0, unsignedp
= 0;
1834 tree inner
= get_inner_reference (t
, &bitsize
, &bitpos
, &offset
,
1835 &mode
, &unsignedp
, &volatilep
, false);
1837 if (TREE_CODE (t
) == COMPONENT_REF
1838 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t
, 1)) != NULL_TREE
)
1840 tree repr
= DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t
, 1));
1841 instrument_derefs (iter
, build3 (COMPONENT_REF
, TREE_TYPE (repr
),
1842 TREE_OPERAND (t
, 0), repr
,
1843 NULL_TREE
), location
, is_store
);
1847 if (bitpos
% BITS_PER_UNIT
1848 || bitsize
!= size_in_bytes
* BITS_PER_UNIT
)
1851 if (TREE_CODE (inner
) == VAR_DECL
1852 && offset
== NULL_TREE
1854 && DECL_SIZE (inner
)
1855 && tree_fits_shwi_p (DECL_SIZE (inner
))
1856 && bitpos
+ bitsize
<= tree_to_shwi (DECL_SIZE (inner
)))
1858 if (DECL_THREAD_LOCAL_P (inner
))
1860 if (!ASAN_GLOBALS
&& is_global_var (inner
))
1862 if (!TREE_STATIC (inner
))
1864 /* Automatic vars in the current function will be always
1866 if (decl_function_context (inner
) == current_function_decl
)
1869 /* Always instrument external vars, they might be dynamically
1871 else if (!DECL_EXTERNAL (inner
))
1873 /* For static vars if they are known not to be dynamically
1874 initialized, they will be always accessible. */
1875 varpool_node
*vnode
= varpool_node::get (inner
);
1876 if (vnode
&& !vnode
->dynamically_initialized
)
1881 base
= build_fold_addr_expr (t
);
1882 if (!has_mem_ref_been_instrumented (base
, size_in_bytes
))
1884 unsigned int align
= get_object_alignment (t
);
1885 build_check_stmt (location
, base
, NULL_TREE
, size_in_bytes
, iter
,
1886 /*is_non_zero_len*/size_in_bytes
> 0, /*before_p=*/true,
1887 is_store
, /*is_scalar_access*/true, align
);
1888 update_mem_ref_hash_table (base
, size_in_bytes
);
1889 update_mem_ref_hash_table (t
, size_in_bytes
);
1894 /* Insert a memory reference into the hash table if access length
1895 can be determined in compile time. */
1898 maybe_update_mem_ref_hash_table (tree base
, tree len
)
1900 if (!POINTER_TYPE_P (TREE_TYPE (base
))
1901 || !INTEGRAL_TYPE_P (TREE_TYPE (len
)))
1904 HOST_WIDE_INT size_in_bytes
= tree_fits_shwi_p (len
) ? tree_to_shwi (len
) : -1;
1906 if (size_in_bytes
!= -1)
1907 update_mem_ref_hash_table (base
, size_in_bytes
);
1910 /* Instrument an access to a contiguous memory region that starts at
1911 the address pointed to by BASE, over a length of LEN (expressed in
1912 the sizeof (*BASE) bytes). ITER points to the instruction before
1913 which the instrumentation instructions must be inserted. LOCATION
1914 is the source location that the instrumentation instructions must
1915 have. If IS_STORE is true, then the memory access is a store;
1916 otherwise, it's a load. */
1919 instrument_mem_region_access (tree base
, tree len
,
1920 gimple_stmt_iterator
*iter
,
1921 location_t location
, bool is_store
)
1923 if (!POINTER_TYPE_P (TREE_TYPE (base
))
1924 || !INTEGRAL_TYPE_P (TREE_TYPE (len
))
1925 || integer_zerop (len
))
1928 HOST_WIDE_INT size_in_bytes
= tree_fits_shwi_p (len
) ? tree_to_shwi (len
) : -1;
1930 if ((size_in_bytes
== -1)
1931 || !has_mem_ref_been_instrumented (base
, size_in_bytes
))
1933 build_check_stmt (location
, base
, len
, size_in_bytes
, iter
,
1934 /*is_non_zero_len*/size_in_bytes
> 0, /*before_p*/true,
1935 is_store
, /*is_scalar_access*/false, /*align*/0);
1938 maybe_update_mem_ref_hash_table (base
, len
);
1939 *iter
= gsi_for_stmt (gsi_stmt (*iter
));
1942 /* Instrument the call to a built-in memory access function that is
1943 pointed to by the iterator ITER.
1945 Upon completion, return TRUE iff *ITER has been advanced to the
1946 statement following the one it was originally pointing to. */
1949 instrument_builtin_call (gimple_stmt_iterator
*iter
)
1951 if (!ASAN_MEMINTRIN
)
1954 bool iter_advanced_p
= false;
1955 gcall
*call
= as_a
<gcall
*> (gsi_stmt (*iter
));
1957 gcc_checking_assert (gimple_call_builtin_p (call
, BUILT_IN_NORMAL
));
1959 location_t loc
= gimple_location (call
);
1961 asan_mem_ref src0
, src1
, dest
;
1962 asan_mem_ref_init (&src0
, NULL
, 1);
1963 asan_mem_ref_init (&src1
, NULL
, 1);
1964 asan_mem_ref_init (&dest
, NULL
, 1);
1966 tree src0_len
= NULL_TREE
, src1_len
= NULL_TREE
, dest_len
= NULL_TREE
;
1967 bool src0_is_store
= false, src1_is_store
= false, dest_is_store
= false,
1968 dest_is_deref
= false, intercepted_p
= true;
1970 if (get_mem_refs_of_builtin_call (call
,
1971 &src0
, &src0_len
, &src0_is_store
,
1972 &src1
, &src1_len
, &src1_is_store
,
1973 &dest
, &dest_len
, &dest_is_store
,
1974 &dest_is_deref
, &intercepted_p
))
1978 instrument_derefs (iter
, dest
.start
, loc
, dest_is_store
);
1980 iter_advanced_p
= true;
1982 else if (!intercepted_p
1983 && (src0_len
|| src1_len
|| dest_len
))
1985 if (src0
.start
!= NULL_TREE
)
1986 instrument_mem_region_access (src0
.start
, src0_len
,
1987 iter
, loc
, /*is_store=*/false);
1988 if (src1
.start
!= NULL_TREE
)
1989 instrument_mem_region_access (src1
.start
, src1_len
,
1990 iter
, loc
, /*is_store=*/false);
1991 if (dest
.start
!= NULL_TREE
)
1992 instrument_mem_region_access (dest
.start
, dest_len
,
1993 iter
, loc
, /*is_store=*/true);
1995 *iter
= gsi_for_stmt (call
);
1997 iter_advanced_p
= true;
2001 if (src0
.start
!= NULL_TREE
)
2002 maybe_update_mem_ref_hash_table (src0
.start
, src0_len
);
2003 if (src1
.start
!= NULL_TREE
)
2004 maybe_update_mem_ref_hash_table (src1
.start
, src1_len
);
2005 if (dest
.start
!= NULL_TREE
)
2006 maybe_update_mem_ref_hash_table (dest
.start
, dest_len
);
2009 return iter_advanced_p
;
2012 /* Instrument the assignment statement ITER if it is subject to
2013 instrumentation. Return TRUE iff instrumentation actually
2014 happened. In that case, the iterator ITER is advanced to the next
2015 logical expression following the one initially pointed to by ITER,
2016 and the relevant memory reference that which access has been
2017 instrumented is added to the memory references hash table. */
2020 maybe_instrument_assignment (gimple_stmt_iterator
*iter
)
2022 gimple s
= gsi_stmt (*iter
);
2024 gcc_assert (gimple_assign_single_p (s
));
2026 tree ref_expr
= NULL_TREE
;
2027 bool is_store
, is_instrumented
= false;
2029 if (gimple_store_p (s
))
2031 ref_expr
= gimple_assign_lhs (s
);
2033 instrument_derefs (iter
, ref_expr
,
2034 gimple_location (s
),
2036 is_instrumented
= true;
2039 if (gimple_assign_load_p (s
))
2041 ref_expr
= gimple_assign_rhs1 (s
);
2043 instrument_derefs (iter
, ref_expr
,
2044 gimple_location (s
),
2046 is_instrumented
= true;
2049 if (is_instrumented
)
2052 return is_instrumented
;
2055 /* Instrument the function call pointed to by the iterator ITER, if it
2056 is subject to instrumentation. At the moment, the only function
2057 calls that are instrumented are some built-in functions that access
2058 memory. Look at instrument_builtin_call to learn more.
2060 Upon completion return TRUE iff *ITER was advanced to the statement
2061 following the one it was originally pointing to. */
2064 maybe_instrument_call (gimple_stmt_iterator
*iter
)
2066 gimple stmt
= gsi_stmt (*iter
);
2067 bool is_builtin
= gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
);
2069 if (is_builtin
&& instrument_builtin_call (iter
))
2072 if (gimple_call_noreturn_p (stmt
))
2076 tree callee
= gimple_call_fndecl (stmt
);
2077 switch (DECL_FUNCTION_CODE (callee
))
2079 case BUILT_IN_UNREACHABLE
:
2081 /* Don't instrument these. */
2087 tree decl
= builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN
);
2088 gimple g
= gimple_build_call (decl
, 0);
2089 gimple_set_location (g
, gimple_location (stmt
));
2090 gsi_insert_before (iter
, g
, GSI_SAME_STMT
);
2095 /* Walk each instruction of all basic block and instrument those that
2096 represent memory references: loads, stores, or function calls.
2097 In a given basic block, this function avoids instrumenting memory
2098 references that have already been instrumented. */
2101 transform_statements (void)
2103 basic_block bb
, last_bb
= NULL
;
2104 gimple_stmt_iterator i
;
2105 int saved_last_basic_block
= last_basic_block_for_fn (cfun
);
2107 FOR_EACH_BB_FN (bb
, cfun
)
2109 basic_block prev_bb
= bb
;
2111 if (bb
->index
>= saved_last_basic_block
) continue;
2113 /* Flush the mem ref hash table, if current bb doesn't have
2114 exactly one predecessor, or if that predecessor (skipping
2115 over asan created basic blocks) isn't the last processed
2116 basic block. Thus we effectively flush on extended basic
2117 block boundaries. */
2118 while (single_pred_p (prev_bb
))
2120 prev_bb
= single_pred (prev_bb
);
2121 if (prev_bb
->index
< saved_last_basic_block
)
2124 if (prev_bb
!= last_bb
)
2125 empty_mem_ref_hash_table ();
2128 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
);)
2130 gimple s
= gsi_stmt (i
);
2132 if (has_stmt_been_instrumented_p (s
))
2134 else if (gimple_assign_single_p (s
)
2135 && !gimple_clobber_p (s
)
2136 && maybe_instrument_assignment (&i
))
2137 /* Nothing to do as maybe_instrument_assignment advanced
2139 else if (is_gimple_call (s
) && maybe_instrument_call (&i
))
2140 /* Nothing to do as maybe_instrument_call
2141 advanced the iterator I. */;
2144 /* No instrumentation happened.
2146 If the current instruction is a function call that
2147 might free something, let's forget about the memory
2148 references that got instrumented. Otherwise we might
2149 miss some instrumentation opportunities. */
2150 if (is_gimple_call (s
) && !nonfreeing_call_p (s
))
2151 empty_mem_ref_hash_table ();
2157 free_mem_ref_resources ();
2161 __asan_before_dynamic_init (module_name)
2163 __asan_after_dynamic_init ()
2167 asan_dynamic_init_call (bool after_p
)
2169 tree fn
= builtin_decl_implicit (after_p
2170 ? BUILT_IN_ASAN_AFTER_DYNAMIC_INIT
2171 : BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT
);
2172 tree module_name_cst
= NULL_TREE
;
2175 pretty_printer module_name_pp
;
2176 pp_string (&module_name_pp
, main_input_filename
);
2178 if (shadow_ptr_types
[0] == NULL_TREE
)
2179 asan_init_shadow_ptr_types ();
2180 module_name_cst
= asan_pp_string (&module_name_pp
);
2181 module_name_cst
= fold_convert (const_ptr_type_node
,
2185 return build_call_expr (fn
, after_p
? 0 : 1, module_name_cst
);
2189 struct __asan_global
2193 uptr __size_with_redzone;
2195 const void *__module_name;
2196 uptr __has_dynamic_init;
2197 __asan_global_source_location *__location;
2201 asan_global_struct (void)
2203 static const char *field_names
[7]
2204 = { "__beg", "__size", "__size_with_redzone",
2205 "__name", "__module_name", "__has_dynamic_init", "__location"};
2206 tree fields
[7], ret
;
2209 ret
= make_node (RECORD_TYPE
);
2210 for (i
= 0; i
< 7; i
++)
2213 = build_decl (UNKNOWN_LOCATION
, FIELD_DECL
,
2214 get_identifier (field_names
[i
]),
2215 (i
== 0 || i
== 3) ? const_ptr_type_node
2216 : pointer_sized_int_node
);
2217 DECL_CONTEXT (fields
[i
]) = ret
;
2219 DECL_CHAIN (fields
[i
- 1]) = fields
[i
];
2221 tree type_decl
= build_decl (input_location
, TYPE_DECL
,
2222 get_identifier ("__asan_global"), ret
);
2223 DECL_IGNORED_P (type_decl
) = 1;
2224 DECL_ARTIFICIAL (type_decl
) = 1;
2225 TYPE_FIELDS (ret
) = fields
[0];
2226 TYPE_NAME (ret
) = type_decl
;
2227 TYPE_STUB_DECL (ret
) = type_decl
;
2232 /* Append description of a single global DECL into vector V.
2233 TYPE is __asan_global struct type as returned by asan_global_struct. */
2236 asan_add_global (tree decl
, tree type
, vec
<constructor_elt
, va_gc
> *v
)
2238 tree init
, uptr
= TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type
)));
2239 unsigned HOST_WIDE_INT size
;
2240 tree str_cst
, module_name_cst
, refdecl
= decl
;
2241 vec
<constructor_elt
, va_gc
> *vinner
= NULL
;
2243 pretty_printer asan_pp
, module_name_pp
;
2245 if (DECL_NAME (decl
))
2246 pp_tree_identifier (&asan_pp
, DECL_NAME (decl
));
2248 pp_string (&asan_pp
, "<unknown>");
2249 str_cst
= asan_pp_string (&asan_pp
);
2251 pp_string (&module_name_pp
, main_input_filename
);
2252 module_name_cst
= asan_pp_string (&module_name_pp
);
2254 if (asan_needs_local_alias (decl
))
2257 ASM_GENERATE_INTERNAL_LABEL (buf
, "LASAN", vec_safe_length (v
) + 1);
2258 refdecl
= build_decl (DECL_SOURCE_LOCATION (decl
),
2259 VAR_DECL
, get_identifier (buf
), TREE_TYPE (decl
));
2260 TREE_ADDRESSABLE (refdecl
) = TREE_ADDRESSABLE (decl
);
2261 TREE_READONLY (refdecl
) = TREE_READONLY (decl
);
2262 TREE_THIS_VOLATILE (refdecl
) = TREE_THIS_VOLATILE (decl
);
2263 DECL_GIMPLE_REG_P (refdecl
) = DECL_GIMPLE_REG_P (decl
);
2264 DECL_ARTIFICIAL (refdecl
) = DECL_ARTIFICIAL (decl
);
2265 DECL_IGNORED_P (refdecl
) = DECL_IGNORED_P (decl
);
2266 TREE_STATIC (refdecl
) = 1;
2267 TREE_PUBLIC (refdecl
) = 0;
2268 TREE_USED (refdecl
) = 1;
2269 assemble_alias (refdecl
, DECL_ASSEMBLER_NAME (decl
));
2272 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
,
2273 fold_convert (const_ptr_type_node
,
2274 build_fold_addr_expr (refdecl
)));
2275 size
= tree_to_uhwi (DECL_SIZE_UNIT (decl
));
2276 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
, build_int_cst (uptr
, size
));
2277 size
+= asan_red_zone_size (size
);
2278 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
, build_int_cst (uptr
, size
));
2279 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
,
2280 fold_convert (const_ptr_type_node
, str_cst
));
2281 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
,
2282 fold_convert (const_ptr_type_node
, module_name_cst
));
2283 varpool_node
*vnode
= varpool_node::get (decl
);
2284 int has_dynamic_init
= vnode
? vnode
->dynamically_initialized
: 0;
2285 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
,
2286 build_int_cst (uptr
, has_dynamic_init
));
2287 tree locptr
= NULL_TREE
;
2288 location_t loc
= DECL_SOURCE_LOCATION (decl
);
2289 expanded_location xloc
= expand_location (loc
);
2290 if (xloc
.file
!= NULL
)
2292 static int lasanloccnt
= 0;
2294 ASM_GENERATE_INTERNAL_LABEL (buf
, "LASANLOC", ++lasanloccnt
);
2295 tree var
= build_decl (UNKNOWN_LOCATION
, VAR_DECL
, get_identifier (buf
),
2296 ubsan_get_source_location_type ());
2297 TREE_STATIC (var
) = 1;
2298 TREE_PUBLIC (var
) = 0;
2299 DECL_ARTIFICIAL (var
) = 1;
2300 DECL_IGNORED_P (var
) = 1;
2301 pretty_printer filename_pp
;
2302 pp_string (&filename_pp
, xloc
.file
);
2303 tree str
= asan_pp_string (&filename_pp
);
2304 tree ctor
= build_constructor_va (TREE_TYPE (var
), 3,
2305 NULL_TREE
, str
, NULL_TREE
,
2306 build_int_cst (unsigned_type_node
,
2307 xloc
.line
), NULL_TREE
,
2308 build_int_cst (unsigned_type_node
,
2310 TREE_CONSTANT (ctor
) = 1;
2311 TREE_STATIC (ctor
) = 1;
2312 DECL_INITIAL (var
) = ctor
;
2313 varpool_node::finalize_decl (var
);
2314 locptr
= fold_convert (uptr
, build_fold_addr_expr (var
));
2317 locptr
= build_int_cst (uptr
, 0);
2318 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
, locptr
);
2319 init
= build_constructor (type
, vinner
);
2320 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, init
);
2323 /* Initialize sanitizer.def builtins if the FE hasn't initialized them. */
2325 initialize_sanitizer_builtins (void)
2329 if (builtin_decl_implicit_p (BUILT_IN_ASAN_INIT
))
2332 tree BT_FN_VOID
= build_function_type_list (void_type_node
, NULL_TREE
);
2334 = build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
2335 tree BT_FN_VOID_CONST_PTR
2336 = build_function_type_list (void_type_node
, const_ptr_type_node
, NULL_TREE
);
2337 tree BT_FN_VOID_PTR_PTR
2338 = build_function_type_list (void_type_node
, ptr_type_node
,
2339 ptr_type_node
, NULL_TREE
);
2340 tree BT_FN_VOID_PTR_PTR_PTR
2341 = build_function_type_list (void_type_node
, ptr_type_node
,
2342 ptr_type_node
, ptr_type_node
, NULL_TREE
);
2343 tree BT_FN_VOID_PTR_PTRMODE
2344 = build_function_type_list (void_type_node
, ptr_type_node
,
2345 pointer_sized_int_node
, NULL_TREE
);
2347 = build_function_type_list (void_type_node
, integer_type_node
, NULL_TREE
);
2348 tree BT_FN_SIZE_CONST_PTR_INT
2349 = build_function_type_list (size_type_node
, const_ptr_type_node
,
2350 integer_type_node
, NULL_TREE
);
2351 tree BT_FN_BOOL_VPTR_PTR_IX_INT_INT
[5];
2352 tree BT_FN_IX_CONST_VPTR_INT
[5];
2353 tree BT_FN_IX_VPTR_IX_INT
[5];
2354 tree BT_FN_VOID_VPTR_IX_INT
[5];
2356 = build_pointer_type (build_qualified_type (void_type_node
,
2357 TYPE_QUAL_VOLATILE
));
2359 = build_pointer_type (build_qualified_type (void_type_node
,
2363 = lang_hooks
.types
.type_for_size (BOOL_TYPE_SIZE
, 1);
2365 for (i
= 0; i
< 5; i
++)
2367 tree ix
= build_nonstandard_integer_type (BITS_PER_UNIT
* (1 << i
), 1);
2368 BT_FN_BOOL_VPTR_PTR_IX_INT_INT
[i
]
2369 = build_function_type_list (boolt
, vptr
, ptr_type_node
, ix
,
2370 integer_type_node
, integer_type_node
,
2372 BT_FN_IX_CONST_VPTR_INT
[i
]
2373 = build_function_type_list (ix
, cvptr
, integer_type_node
, NULL_TREE
);
2374 BT_FN_IX_VPTR_IX_INT
[i
]
2375 = build_function_type_list (ix
, vptr
, ix
, integer_type_node
,
2377 BT_FN_VOID_VPTR_IX_INT
[i
]
2378 = build_function_type_list (void_type_node
, vptr
, ix
,
2379 integer_type_node
, NULL_TREE
);
2381 #define BT_FN_BOOL_VPTR_PTR_I1_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[0]
2382 #define BT_FN_I1_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[0]
2383 #define BT_FN_I1_VPTR_I1_INT BT_FN_IX_VPTR_IX_INT[0]
2384 #define BT_FN_VOID_VPTR_I1_INT BT_FN_VOID_VPTR_IX_INT[0]
2385 #define BT_FN_BOOL_VPTR_PTR_I2_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[1]
2386 #define BT_FN_I2_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[1]
2387 #define BT_FN_I2_VPTR_I2_INT BT_FN_IX_VPTR_IX_INT[1]
2388 #define BT_FN_VOID_VPTR_I2_INT BT_FN_VOID_VPTR_IX_INT[1]
2389 #define BT_FN_BOOL_VPTR_PTR_I4_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[2]
2390 #define BT_FN_I4_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[2]
2391 #define BT_FN_I4_VPTR_I4_INT BT_FN_IX_VPTR_IX_INT[2]
2392 #define BT_FN_VOID_VPTR_I4_INT BT_FN_VOID_VPTR_IX_INT[2]
2393 #define BT_FN_BOOL_VPTR_PTR_I8_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[3]
2394 #define BT_FN_I8_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[3]
2395 #define BT_FN_I8_VPTR_I8_INT BT_FN_IX_VPTR_IX_INT[3]
2396 #define BT_FN_VOID_VPTR_I8_INT BT_FN_VOID_VPTR_IX_INT[3]
2397 #define BT_FN_BOOL_VPTR_PTR_I16_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[4]
2398 #define BT_FN_I16_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[4]
2399 #define BT_FN_I16_VPTR_I16_INT BT_FN_IX_VPTR_IX_INT[4]
2400 #define BT_FN_VOID_VPTR_I16_INT BT_FN_VOID_VPTR_IX_INT[4]
2401 #undef ATTR_NOTHROW_LEAF_LIST
2402 #define ATTR_NOTHROW_LEAF_LIST ECF_NOTHROW | ECF_LEAF
2403 #undef ATTR_TMPURE_NOTHROW_LEAF_LIST
2404 #define ATTR_TMPURE_NOTHROW_LEAF_LIST ECF_TM_PURE | ATTR_NOTHROW_LEAF_LIST
2405 #undef ATTR_NORETURN_NOTHROW_LEAF_LIST
2406 #define ATTR_NORETURN_NOTHROW_LEAF_LIST ECF_NORETURN | ATTR_NOTHROW_LEAF_LIST
2407 #undef ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
2408 #define ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST \
2409 ECF_CONST | ATTR_NORETURN_NOTHROW_LEAF_LIST
2410 #undef ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST
2411 #define ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST \
2412 ECF_TM_PURE | ATTR_NORETURN_NOTHROW_LEAF_LIST
2413 #undef ATTR_COLD_NOTHROW_LEAF_LIST
2414 #define ATTR_COLD_NOTHROW_LEAF_LIST \
2415 /* ECF_COLD missing */ ATTR_NOTHROW_LEAF_LIST
2416 #undef ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST
2417 #define ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST \
2418 /* ECF_COLD missing */ ATTR_NORETURN_NOTHROW_LEAF_LIST
2419 #undef ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST
2420 #define ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST \
2421 /* ECF_COLD missing */ ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
2422 #undef ATTR_PURE_NOTHROW_LEAF_LIST
2423 #define ATTR_PURE_NOTHROW_LEAF_LIST ECF_PURE | ATTR_NOTHROW_LEAF_LIST
2424 #undef DEF_SANITIZER_BUILTIN
2425 #define DEF_SANITIZER_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
2426 decl = add_builtin_function ("__builtin_" NAME, TYPE, ENUM, \
2427 BUILT_IN_NORMAL, NAME, NULL_TREE); \
2428 set_call_expr_flags (decl, ATTRS); \
2429 set_builtin_decl (ENUM, decl, true);
2431 #include "sanitizer.def"
2433 /* -fsanitize=object-size uses __builtin_object_size, but that might
2434 not be available for e.g. Fortran at this point. We use
2435 DEF_SANITIZER_BUILTIN here only as a convenience macro. */
2436 if ((flag_sanitize
& SANITIZE_OBJECT_SIZE
)
2437 && !builtin_decl_implicit_p (BUILT_IN_OBJECT_SIZE
))
2438 DEF_SANITIZER_BUILTIN (BUILT_IN_OBJECT_SIZE
, "object_size",
2439 BT_FN_SIZE_CONST_PTR_INT
,
2440 ATTR_PURE_NOTHROW_LEAF_LIST
)
2442 #undef DEF_SANITIZER_BUILTIN
2445 /* Called via htab_traverse. Count number of emitted
2446 STRING_CSTs in the constant hash table. */
2449 count_string_csts (constant_descriptor_tree
**slot
,
2450 unsigned HOST_WIDE_INT
*data
)
2452 struct constant_descriptor_tree
*desc
= *slot
;
2453 if (TREE_CODE (desc
->value
) == STRING_CST
2454 && TREE_ASM_WRITTEN (desc
->value
)
2455 && asan_protect_global (desc
->value
))
2460 /* Helper structure to pass two parameters to
2463 struct asan_add_string_csts_data
2466 vec
<constructor_elt
, va_gc
> *v
;
2469 /* Called via hash_table::traverse. Call asan_add_global
2470 on emitted STRING_CSTs from the constant hash table. */
2473 add_string_csts (constant_descriptor_tree
**slot
,
2474 asan_add_string_csts_data
*aascd
)
2476 struct constant_descriptor_tree
*desc
= *slot
;
2477 if (TREE_CODE (desc
->value
) == STRING_CST
2478 && TREE_ASM_WRITTEN (desc
->value
)
2479 && asan_protect_global (desc
->value
))
2481 asan_add_global (SYMBOL_REF_DECL (XEXP (desc
->rtl
, 0)),
2482 aascd
->type
, aascd
->v
);
2487 /* Needs to be GTY(()), because cgraph_build_static_cdtor may
2488 invoke ggc_collect. */
2489 static GTY(()) tree asan_ctor_statements
;
2491 /* Module-level instrumentation.
2492 - Insert __asan_init_vN() into the list of CTORs.
2493 - TODO: insert redzones around globals.
2497 asan_finish_file (void)
2499 varpool_node
*vnode
;
2500 unsigned HOST_WIDE_INT gcount
= 0;
2502 if (shadow_ptr_types
[0] == NULL_TREE
)
2503 asan_init_shadow_ptr_types ();
2504 /* Avoid instrumenting code in the asan ctors/dtors.
2505 We don't need to insert padding after the description strings,
2506 nor after .LASAN* array. */
2507 flag_sanitize
&= ~SANITIZE_ADDRESS
;
2509 /* For user-space we want asan constructors to run first.
2510 Linux kernel does not support priorities other than default, and the only
2511 other user of constructors is coverage. So we run with the default
2513 int priority
= flag_sanitize
& SANITIZE_USER_ADDRESS
2514 ? MAX_RESERVED_INIT_PRIORITY
- 1 : DEFAULT_INIT_PRIORITY
;
2516 if (flag_sanitize
& SANITIZE_USER_ADDRESS
)
2518 tree fn
= builtin_decl_implicit (BUILT_IN_ASAN_INIT
);
2519 append_to_statement_list (build_call_expr (fn
, 0), &asan_ctor_statements
);
2521 FOR_EACH_DEFINED_VARIABLE (vnode
)
2522 if (TREE_ASM_WRITTEN (vnode
->decl
)
2523 && asan_protect_global (vnode
->decl
))
2525 hash_table
<tree_descriptor_hasher
> *const_desc_htab
= constant_pool_htab ();
2526 const_desc_htab
->traverse
<unsigned HOST_WIDE_INT
*, count_string_csts
>
2530 tree type
= asan_global_struct (), var
, ctor
;
2531 tree dtor_statements
= NULL_TREE
;
2532 vec
<constructor_elt
, va_gc
> *v
;
2535 type
= build_array_type_nelts (type
, gcount
);
2536 ASM_GENERATE_INTERNAL_LABEL (buf
, "LASAN", 0);
2537 var
= build_decl (UNKNOWN_LOCATION
, VAR_DECL
, get_identifier (buf
),
2539 TREE_STATIC (var
) = 1;
2540 TREE_PUBLIC (var
) = 0;
2541 DECL_ARTIFICIAL (var
) = 1;
2542 DECL_IGNORED_P (var
) = 1;
2543 vec_alloc (v
, gcount
);
2544 FOR_EACH_DEFINED_VARIABLE (vnode
)
2545 if (TREE_ASM_WRITTEN (vnode
->decl
)
2546 && asan_protect_global (vnode
->decl
))
2547 asan_add_global (vnode
->decl
, TREE_TYPE (type
), v
);
2548 struct asan_add_string_csts_data aascd
;
2549 aascd
.type
= TREE_TYPE (type
);
2551 const_desc_htab
->traverse
<asan_add_string_csts_data
*, add_string_csts
>
2553 ctor
= build_constructor (type
, v
);
2554 TREE_CONSTANT (ctor
) = 1;
2555 TREE_STATIC (ctor
) = 1;
2556 DECL_INITIAL (var
) = ctor
;
2557 varpool_node::finalize_decl (var
);
2559 tree fn
= builtin_decl_implicit (BUILT_IN_ASAN_REGISTER_GLOBALS
);
2560 tree gcount_tree
= build_int_cst (pointer_sized_int_node
, gcount
);
2561 append_to_statement_list (build_call_expr (fn
, 2,
2562 build_fold_addr_expr (var
),
2564 &asan_ctor_statements
);
2566 fn
= builtin_decl_implicit (BUILT_IN_ASAN_UNREGISTER_GLOBALS
);
2567 append_to_statement_list (build_call_expr (fn
, 2,
2568 build_fold_addr_expr (var
),
2571 cgraph_build_static_cdtor ('D', dtor_statements
, priority
);
2573 if (asan_ctor_statements
)
2574 cgraph_build_static_cdtor ('I', asan_ctor_statements
, priority
);
2575 flag_sanitize
|= SANITIZE_ADDRESS
;
2578 /* Expand the ASAN_{LOAD,STORE} builtins. */
2581 asan_expand_check_ifn (gimple_stmt_iterator
*iter
, bool use_calls
)
2583 gimple g
= gsi_stmt (*iter
);
2584 location_t loc
= gimple_location (g
);
2587 = (flag_sanitize
& flag_sanitize_recover
& SANITIZE_KERNEL_ADDRESS
) != 0;
2589 HOST_WIDE_INT flags
= tree_to_shwi (gimple_call_arg (g
, 0));
2590 gcc_assert (flags
< ASAN_CHECK_LAST
);
2591 bool is_scalar_access
= (flags
& ASAN_CHECK_SCALAR_ACCESS
) != 0;
2592 bool is_store
= (flags
& ASAN_CHECK_STORE
) != 0;
2593 bool is_non_zero_len
= (flags
& ASAN_CHECK_NON_ZERO_LEN
) != 0;
2595 tree base
= gimple_call_arg (g
, 1);
2596 tree len
= gimple_call_arg (g
, 2);
2597 HOST_WIDE_INT align
= tree_to_shwi (gimple_call_arg (g
, 3));
2599 HOST_WIDE_INT size_in_bytes
2600 = is_scalar_access
&& tree_fits_shwi_p (len
) ? tree_to_shwi (len
) : -1;
2604 /* Instrument using callbacks. */
2605 gimple g
= gimple_build_assign (make_ssa_name (pointer_sized_int_node
),
2607 gimple_set_location (g
, loc
);
2608 gsi_insert_before (iter
, g
, GSI_SAME_STMT
);
2609 tree base_addr
= gimple_assign_lhs (g
);
2612 tree fun
= check_func (is_store
, recover_p
, size_in_bytes
, &nargs
);
2614 g
= gimple_build_call (fun
, 1, base_addr
);
2617 gcc_assert (nargs
== 2);
2618 g
= gimple_build_assign (make_ssa_name (pointer_sized_int_node
),
2620 gimple_set_location (g
, loc
);
2621 gsi_insert_before (iter
, g
, GSI_SAME_STMT
);
2622 tree sz_arg
= gimple_assign_lhs (g
);
2623 g
= gimple_build_call (fun
, nargs
, base_addr
, sz_arg
);
2625 gimple_set_location (g
, loc
);
2626 gsi_replace (iter
, g
, false);
2630 HOST_WIDE_INT real_size_in_bytes
= size_in_bytes
== -1 ? 1 : size_in_bytes
;
2632 tree shadow_ptr_type
= shadow_ptr_types
[real_size_in_bytes
== 16 ? 1 : 0];
2633 tree shadow_type
= TREE_TYPE (shadow_ptr_type
);
2635 gimple_stmt_iterator gsi
= *iter
;
2637 if (!is_non_zero_len
)
2639 /* So, the length of the memory area to asan-protect is
2640 non-constant. Let's guard the generated instrumentation code
2645 //asan instrumentation code goes here.
2647 // falltrough instructions, starting with *ITER. */
2649 g
= gimple_build_cond (NE_EXPR
,
2651 build_int_cst (TREE_TYPE (len
), 0),
2652 NULL_TREE
, NULL_TREE
);
2653 gimple_set_location (g
, loc
);
2655 basic_block then_bb
, fallthrough_bb
;
2656 insert_if_then_before_iter (as_a
<gcond
*> (g
), iter
,
2657 /*then_more_likely_p=*/true,
2658 &then_bb
, &fallthrough_bb
);
2659 /* Note that fallthrough_bb starts with the statement that was
2660 pointed to by ITER. */
2662 /* The 'then block' of the 'if (len != 0) condition is where
2663 we'll generate the asan instrumentation code now. */
2664 gsi
= gsi_last_bb (then_bb
);
2667 /* Get an iterator on the point where we can add the condition
2668 statement for the instrumentation. */
2669 basic_block then_bb
, else_bb
;
2670 gsi
= create_cond_insert_point (&gsi
, /*before_p*/false,
2671 /*then_more_likely_p=*/false,
2672 /*create_then_fallthru_edge*/recover_p
,
2676 g
= gimple_build_assign (make_ssa_name (pointer_sized_int_node
),
2678 gimple_set_location (g
, loc
);
2679 gsi_insert_before (&gsi
, g
, GSI_NEW_STMT
);
2680 tree base_addr
= gimple_assign_lhs (g
);
2683 if (real_size_in_bytes
>= 8)
2685 tree shadow
= build_shadow_mem_access (&gsi
, loc
, base_addr
,
2691 /* Slow path for 1, 2 and 4 byte accesses. */
2692 /* Test (shadow != 0)
2693 & ((base_addr & 7) + (real_size_in_bytes - 1)) >= shadow). */
2694 tree shadow
= build_shadow_mem_access (&gsi
, loc
, base_addr
,
2696 gimple shadow_test
= build_assign (NE_EXPR
, shadow
, 0);
2697 gimple_seq seq
= NULL
;
2698 gimple_seq_add_stmt (&seq
, shadow_test
);
2699 /* Aligned (>= 8 bytes) can test just
2700 (real_size_in_bytes - 1 >= shadow), as base_addr & 7 is known
2704 gimple_seq_add_stmt (&seq
, build_assign (BIT_AND_EXPR
,
2706 gimple_seq_add_stmt (&seq
,
2707 build_type_cast (shadow_type
,
2708 gimple_seq_last (seq
)));
2709 if (real_size_in_bytes
> 1)
2710 gimple_seq_add_stmt (&seq
,
2711 build_assign (PLUS_EXPR
,
2712 gimple_seq_last (seq
),
2713 real_size_in_bytes
- 1));
2714 t
= gimple_assign_lhs (gimple_seq_last_stmt (seq
));
2717 t
= build_int_cst (shadow_type
, real_size_in_bytes
- 1);
2718 gimple_seq_add_stmt (&seq
, build_assign (GE_EXPR
, t
, shadow
));
2719 gimple_seq_add_stmt (&seq
, build_assign (BIT_AND_EXPR
, shadow_test
,
2720 gimple_seq_last (seq
)));
2721 t
= gimple_assign_lhs (gimple_seq_last (seq
));
2722 gimple_seq_set_location (seq
, loc
);
2723 gsi_insert_seq_after (&gsi
, seq
, GSI_CONTINUE_LINKING
);
2725 /* For non-constant, misaligned or otherwise weird access sizes,
2726 check first and last byte. */
2727 if (size_in_bytes
== -1)
2729 g
= gimple_build_assign (make_ssa_name (pointer_sized_int_node
),
2731 build_int_cst (pointer_sized_int_node
, 1));
2732 gimple_set_location (g
, loc
);
2733 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
2734 tree last
= gimple_assign_lhs (g
);
2735 g
= gimple_build_assign (make_ssa_name (pointer_sized_int_node
),
2736 PLUS_EXPR
, base_addr
, last
);
2737 gimple_set_location (g
, loc
);
2738 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
2739 tree base_end_addr
= gimple_assign_lhs (g
);
2741 tree shadow
= build_shadow_mem_access (&gsi
, loc
, base_end_addr
,
2743 gimple shadow_test
= build_assign (NE_EXPR
, shadow
, 0);
2744 gimple_seq seq
= NULL
;
2745 gimple_seq_add_stmt (&seq
, shadow_test
);
2746 gimple_seq_add_stmt (&seq
, build_assign (BIT_AND_EXPR
,
2748 gimple_seq_add_stmt (&seq
, build_type_cast (shadow_type
,
2749 gimple_seq_last (seq
)));
2750 gimple_seq_add_stmt (&seq
, build_assign (GE_EXPR
,
2751 gimple_seq_last (seq
),
2753 gimple_seq_add_stmt (&seq
, build_assign (BIT_AND_EXPR
, shadow_test
,
2754 gimple_seq_last (seq
)));
2755 gimple_seq_add_stmt (&seq
, build_assign (BIT_IOR_EXPR
, t
,
2756 gimple_seq_last (seq
)));
2757 t
= gimple_assign_lhs (gimple_seq_last (seq
));
2758 gimple_seq_set_location (seq
, loc
);
2759 gsi_insert_seq_after (&gsi
, seq
, GSI_CONTINUE_LINKING
);
2763 g
= gimple_build_cond (NE_EXPR
, t
, build_int_cst (TREE_TYPE (t
), 0),
2764 NULL_TREE
, NULL_TREE
);
2765 gimple_set_location (g
, loc
);
2766 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
2768 /* Generate call to the run-time library (e.g. __asan_report_load8). */
2769 gsi
= gsi_start_bb (then_bb
);
2771 tree fun
= report_error_func (is_store
, recover_p
, size_in_bytes
, &nargs
);
2772 g
= gimple_build_call (fun
, nargs
, base_addr
, len
);
2773 gimple_set_location (g
, loc
);
2774 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
2776 gsi_remove (iter
, true);
2777 *iter
= gsi_start_bb (else_bb
);
2782 /* Instrument the current function. */
2785 asan_instrument (void)
2787 if (shadow_ptr_types
[0] == NULL_TREE
)
2788 asan_init_shadow_ptr_types ();
2789 transform_statements ();
2796 return (flag_sanitize
& SANITIZE_ADDRESS
) != 0
2797 && !lookup_attribute ("no_sanitize_address",
2798 DECL_ATTRIBUTES (current_function_decl
));
2803 const pass_data pass_data_asan
=
2805 GIMPLE_PASS
, /* type */
2807 OPTGROUP_NONE
, /* optinfo_flags */
2808 TV_NONE
, /* tv_id */
2809 ( PROP_ssa
| PROP_cfg
| PROP_gimple_leh
), /* properties_required */
2810 0, /* properties_provided */
2811 0, /* properties_destroyed */
2812 0, /* todo_flags_start */
2813 TODO_update_ssa
, /* todo_flags_finish */
2816 class pass_asan
: public gimple_opt_pass
2819 pass_asan (gcc::context
*ctxt
)
2820 : gimple_opt_pass (pass_data_asan
, ctxt
)
2823 /* opt_pass methods: */
2824 opt_pass
* clone () { return new pass_asan (m_ctxt
); }
2825 virtual bool gate (function
*) { return gate_asan (); }
2826 virtual unsigned int execute (function
*) { return asan_instrument (); }
2828 }; // class pass_asan
2833 make_pass_asan (gcc::context
*ctxt
)
2835 return new pass_asan (ctxt
);
2840 const pass_data pass_data_asan_O0
=
2842 GIMPLE_PASS
, /* type */
2844 OPTGROUP_NONE
, /* optinfo_flags */
2845 TV_NONE
, /* tv_id */
2846 ( PROP_ssa
| PROP_cfg
| PROP_gimple_leh
), /* properties_required */
2847 0, /* properties_provided */
2848 0, /* properties_destroyed */
2849 0, /* todo_flags_start */
2850 TODO_update_ssa
, /* todo_flags_finish */
2853 class pass_asan_O0
: public gimple_opt_pass
2856 pass_asan_O0 (gcc::context
*ctxt
)
2857 : gimple_opt_pass (pass_data_asan_O0
, ctxt
)
2860 /* opt_pass methods: */
2861 virtual bool gate (function
*) { return !optimize
&& gate_asan (); }
2862 virtual unsigned int execute (function
*) { return asan_instrument (); }
2864 }; // class pass_asan_O0
2869 make_pass_asan_O0 (gcc::context
*ctxt
)
2871 return new pass_asan_O0 (ctxt
);
2874 #include "gt-asan.h"