1 /* AddressSanitizer, a fast memory error detector.
2 Copyright (C) 2012-2014 Free Software Foundation, Inc.
3 Contributed by Kostya Serebryany <kcc@google.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
26 #include "hash-table.h"
27 #include "basic-block.h"
28 #include "tree-ssa-alias.h"
29 #include "internal-fn.h"
30 #include "gimple-expr.h"
35 #include "gimple-iterator.h"
38 #include "stor-layout.h"
39 #include "tree-iterator.h"
41 #include "stringpool.h"
42 #include "tree-ssanames.h"
43 #include "tree-pass.h"
45 #include "gimple-pretty-print.h"
51 #include "langhooks.h"
52 #include "alloc-pool.h"
54 #include "gimple-builder.h"
60 /* AddressSanitizer finds out-of-bounds and use-after-free bugs
61 with <2x slowdown on average.
63 The tool consists of two parts:
64 instrumentation module (this file) and a run-time library.
65 The instrumentation module adds a run-time check before every memory insn.
66 For a 8- or 16- byte load accessing address X:
67 ShadowAddr = (X >> 3) + Offset
68 ShadowValue = *(char*)ShadowAddr; // *(short*) for 16-byte access.
70 __asan_report_load8(X);
71 For a load of N bytes (N=1, 2 or 4) from address X:
72 ShadowAddr = (X >> 3) + Offset
73 ShadowValue = *(char*)ShadowAddr;
75 if ((X & 7) + N - 1 > ShadowValue)
76 __asan_report_loadN(X);
77 Stores are instrumented similarly, but using __asan_report_storeN functions.
78 A call too __asan_init_vN() is inserted to the list of module CTORs.
79 N is the version number of the AddressSanitizer API. The changes between the
80 API versions are listed in libsanitizer/asan/asan_interface_internal.h.
82 The run-time library redefines malloc (so that redzone are inserted around
83 the allocated memory) and free (so that reuse of free-ed memory is delayed),
84 provides __asan_report* and __asan_init_vN functions.
87 http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm
89 The current implementation supports detection of out-of-bounds and
90 use-after-free in the heap, on the stack and for global variables.
92 [Protection of stack variables]
94 To understand how detection of out-of-bounds and use-after-free works
95 for stack variables, lets look at this example on x86_64 where the
110 For this function, the stack protected by asan will be organized as
111 follows, from the top of the stack to the bottom:
113 Slot 1/ [red zone of 32 bytes called 'RIGHT RedZone']
115 Slot 2/ [8 bytes of red zone, that adds up to the space of 'a' to make
116 the next slot be 32 bytes aligned; this one is called Partial
117 Redzone; this 32 bytes alignment is an asan constraint]
119 Slot 3/ [24 bytes for variable 'a']
121 Slot 4/ [red zone of 32 bytes called 'Middle RedZone']
123 Slot 5/ [24 bytes of Partial Red Zone (similar to slot 2]
125 Slot 6/ [8 bytes for variable 'b']
127 Slot 7/ [32 bytes of Red Zone at the bottom of the stack, called
130 The 32 bytes of LEFT red zone at the bottom of the stack can be
133 1/ The first 8 bytes contain a magical asan number that is always
136 2/ The following 8 bytes contains a pointer to a string (to be
137 parsed at runtime by the runtime asan library), which format is
140 "<function-name> <space> <num-of-variables-on-the-stack>
141 (<32-bytes-aligned-offset-in-bytes-of-variable> <space>
142 <length-of-var-in-bytes> ){n} "
144 where '(...){n}' means the content inside the parenthesis occurs 'n'
145 times, with 'n' being the number of variables on the stack.
147 3/ The following 8 bytes contain the PC of the current function which
148 will be used by the run-time library to print an error message.
150 4/ The following 8 bytes are reserved for internal use by the run-time.
152 The shadow memory for that stack layout is going to look like this:
154 - content of shadow memory 8 bytes for slot 7: 0xF1F1F1F1.
155 The F1 byte pattern is a magic number called
156 ASAN_STACK_MAGIC_LEFT and is a way for the runtime to know that
157 the memory for that shadow byte is part of a the LEFT red zone
158 intended to seat at the bottom of the variables on the stack.
160 - content of shadow memory 8 bytes for slots 6 and 5:
161 0xF4F4F400. The F4 byte pattern is a magic number
162 called ASAN_STACK_MAGIC_PARTIAL. It flags the fact that the
163 memory region for this shadow byte is a PARTIAL red zone
164 intended to pad a variable A, so that the slot following
165 {A,padding} is 32 bytes aligned.
167 Note that the fact that the least significant byte of this
168 shadow memory content is 00 means that 8 bytes of its
169 corresponding memory (which corresponds to the memory of
170 variable 'b') is addressable.
172 - content of shadow memory 8 bytes for slot 4: 0xF2F2F2F2.
173 The F2 byte pattern is a magic number called
174 ASAN_STACK_MAGIC_MIDDLE. It flags the fact that the memory
175 region for this shadow byte is a MIDDLE red zone intended to
176 seat between two 32 aligned slots of {variable,padding}.
178 - content of shadow memory 8 bytes for slot 3 and 2:
179 0xF4000000. This represents is the concatenation of
180 variable 'a' and the partial red zone following it, like what we
181 had for variable 'b'. The least significant 3 bytes being 00
182 means that the 3 bytes of variable 'a' are addressable.
184 - content of shadow memory 8 bytes for slot 1: 0xF3F3F3F3.
185 The F3 byte pattern is a magic number called
186 ASAN_STACK_MAGIC_RIGHT. It flags the fact that the memory
187 region for this shadow byte is a RIGHT red zone intended to seat
188 at the top of the variables of the stack.
190 Note that the real variable layout is done in expand_used_vars in
191 cfgexpand.c. As far as Address Sanitizer is concerned, it lays out
192 stack variables as well as the different red zones, emits some
193 prologue code to populate the shadow memory as to poison (mark as
194 non-accessible) the regions of the red zones and mark the regions of
195 stack variables as accessible, and emit some epilogue code to
196 un-poison (mark as accessible) the regions of red zones right before
199 [Protection of global variables]
201 The basic idea is to insert a red zone between two global variables
202 and install a constructor function that calls the asan runtime to do
203 the populating of the relevant shadow memory regions at load time.
205 So the global variables are laid out as to insert a red zone between
206 them. The size of the red zones is so that each variable starts on a
209 Then a constructor function is installed so that, for each global
210 variable, it calls the runtime asan library function
211 __asan_register_globals_with an instance of this type:
215 // Address of the beginning of the global variable.
218 // Initial size of the global variable.
221 // Size of the global variable + size of the red zone. This
222 // size is 32 bytes aligned.
223 uptr __size_with_redzone;
225 // Name of the global variable.
228 // Name of the module where the global variable is declared.
229 const void *__module_name;
231 // 1 if it has dynamic initialization, 0 otherwise.
232 uptr __has_dynamic_init;
235 A destructor function that calls the runtime asan library function
236 _asan_unregister_globals is also installed. */
238 alias_set_type asan_shadow_set
= -1;
240 /* Pointer types to 1 resp. 2 byte integers in shadow memory. A separate
241 alias set is used for all shadow memory accesses. */
242 static GTY(()) tree shadow_ptr_types
[2];
244 /* Decl for __asan_option_detect_stack_use_after_return. */
245 static GTY(()) tree asan_detect_stack_use_after_return
;
247 /* Number of instrumentations in current function so far. */
249 static int asan_num_accesses
;
251 /* Check whether we should replace inline instrumentation with calls. */
256 return ASAN_INSTRUMENTATION_WITH_CALL_THRESHOLD
< INT_MAX
257 && asan_num_accesses
>= ASAN_INSTRUMENTATION_WITH_CALL_THRESHOLD
;
260 /* Hashtable support for memory references used by gimple
263 /* This type represents a reference to a memory region. */
266 /* The expression of the beginning of the memory region. */
269 /* The size of the access. */
270 HOST_WIDE_INT access_size
;
273 static alloc_pool asan_mem_ref_alloc_pool
;
275 /* This creates the alloc pool used to store the instances of
276 asan_mem_ref that are stored in the hash table asan_mem_ref_ht. */
279 asan_mem_ref_get_alloc_pool ()
281 if (asan_mem_ref_alloc_pool
== NULL
)
282 asan_mem_ref_alloc_pool
= create_alloc_pool ("asan_mem_ref",
283 sizeof (asan_mem_ref
),
285 return asan_mem_ref_alloc_pool
;
289 /* Initializes an instance of asan_mem_ref. */
292 asan_mem_ref_init (asan_mem_ref
*ref
, tree start
, HOST_WIDE_INT access_size
)
295 ref
->access_size
= access_size
;
298 /* Allocates memory for an instance of asan_mem_ref into the memory
299 pool returned by asan_mem_ref_get_alloc_pool and initialize it.
300 START is the address of (or the expression pointing to) the
301 beginning of memory reference. ACCESS_SIZE is the size of the
302 access to the referenced memory. */
305 asan_mem_ref_new (tree start
, HOST_WIDE_INT access_size
)
308 (asan_mem_ref
*) pool_alloc (asan_mem_ref_get_alloc_pool ());
310 asan_mem_ref_init (ref
, start
, access_size
);
314 /* This builds and returns a pointer to the end of the memory region
315 that starts at START and of length LEN. */
318 asan_mem_ref_get_end (tree start
, tree len
)
320 if (len
== NULL_TREE
|| integer_zerop (len
))
323 return fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (start
), start
, len
);
326 /* Return a tree expression that represents the end of the referenced
327 memory region. Beware that this function can actually build a new
331 asan_mem_ref_get_end (const asan_mem_ref
*ref
, tree len
)
333 return asan_mem_ref_get_end (ref
->start
, len
);
336 struct asan_mem_ref_hasher
337 : typed_noop_remove
<asan_mem_ref
>
339 typedef asan_mem_ref value_type
;
340 typedef asan_mem_ref compare_type
;
342 static inline hashval_t
hash (const value_type
*);
343 static inline bool equal (const value_type
*, const compare_type
*);
346 /* Hash a memory reference. */
349 asan_mem_ref_hasher::hash (const asan_mem_ref
*mem_ref
)
351 inchash::hash hstate
;
352 inchash::add_expr (mem_ref
->start
, hstate
);
353 hstate
.add_wide_int (mem_ref
->access_size
);
354 return hstate
.end ();
357 /* Compare two memory references. We accept the length of either
358 memory references to be NULL_TREE. */
361 asan_mem_ref_hasher::equal (const asan_mem_ref
*m1
,
362 const asan_mem_ref
*m2
)
364 return (m1
->access_size
== m2
->access_size
365 && operand_equal_p (m1
->start
, m2
->start
, 0));
368 static hash_table
<asan_mem_ref_hasher
> *asan_mem_ref_ht
;
370 /* Returns a reference to the hash table containing memory references.
371 This function ensures that the hash table is created. Note that
372 this hash table is updated by the function
373 update_mem_ref_hash_table. */
375 static hash_table
<asan_mem_ref_hasher
> *
376 get_mem_ref_hash_table ()
378 if (!asan_mem_ref_ht
)
379 asan_mem_ref_ht
= new hash_table
<asan_mem_ref_hasher
> (10);
381 return asan_mem_ref_ht
;
384 /* Clear all entries from the memory references hash table. */
387 empty_mem_ref_hash_table ()
390 asan_mem_ref_ht
->empty ();
393 /* Free the memory references hash table. */
396 free_mem_ref_resources ()
398 delete asan_mem_ref_ht
;
399 asan_mem_ref_ht
= NULL
;
401 if (asan_mem_ref_alloc_pool
)
403 free_alloc_pool (asan_mem_ref_alloc_pool
);
404 asan_mem_ref_alloc_pool
= NULL
;
408 /* Return true iff the memory reference REF has been instrumented. */
411 has_mem_ref_been_instrumented (tree ref
, HOST_WIDE_INT access_size
)
414 asan_mem_ref_init (&r
, ref
, access_size
);
416 return (get_mem_ref_hash_table ()->find (&r
) != NULL
);
419 /* Return true iff the memory reference REF has been instrumented. */
422 has_mem_ref_been_instrumented (const asan_mem_ref
*ref
)
424 return has_mem_ref_been_instrumented (ref
->start
, ref
->access_size
);
427 /* Return true iff access to memory region starting at REF and of
428 length LEN has been instrumented. */
431 has_mem_ref_been_instrumented (const asan_mem_ref
*ref
, tree len
)
433 /* First let's see if the address of the beginning of REF has been
435 if (!has_mem_ref_been_instrumented (ref
))
440 /* Let's see if the end of the region has been instrumented. */
441 if (!has_mem_ref_been_instrumented (asan_mem_ref_get_end (ref
, len
),
448 /* Set REF to the memory reference present in a gimple assignment
449 ASSIGNMENT. Return true upon successful completion, false
453 get_mem_ref_of_assignment (const gimple assignment
,
457 gcc_assert (gimple_assign_single_p (assignment
));
459 if (gimple_store_p (assignment
)
460 && !gimple_clobber_p (assignment
))
462 ref
->start
= gimple_assign_lhs (assignment
);
463 *ref_is_store
= true;
465 else if (gimple_assign_load_p (assignment
))
467 ref
->start
= gimple_assign_rhs1 (assignment
);
468 *ref_is_store
= false;
473 ref
->access_size
= int_size_in_bytes (TREE_TYPE (ref
->start
));
477 /* Return the memory references contained in a gimple statement
478 representing a builtin call that has to do with memory access. */
481 get_mem_refs_of_builtin_call (const gimple call
,
493 gcc_checking_assert (gimple_call_builtin_p (call
, BUILT_IN_NORMAL
));
495 tree callee
= gimple_call_fndecl (call
);
496 tree source0
= NULL_TREE
, source1
= NULL_TREE
,
497 dest
= NULL_TREE
, len
= NULL_TREE
;
498 bool is_store
= true, got_reference_p
= false;
499 HOST_WIDE_INT access_size
= 1;
501 switch (DECL_FUNCTION_CODE (callee
))
503 /* (s, s, n) style memops. */
505 case BUILT_IN_MEMCMP
:
506 source0
= gimple_call_arg (call
, 0);
507 source1
= gimple_call_arg (call
, 1);
508 len
= gimple_call_arg (call
, 2);
511 /* (src, dest, n) style memops. */
513 source0
= gimple_call_arg (call
, 0);
514 dest
= gimple_call_arg (call
, 1);
515 len
= gimple_call_arg (call
, 2);
518 /* (dest, src, n) style memops. */
519 case BUILT_IN_MEMCPY
:
520 case BUILT_IN_MEMCPY_CHK
:
521 case BUILT_IN_MEMMOVE
:
522 case BUILT_IN_MEMMOVE_CHK
:
523 case BUILT_IN_MEMPCPY
:
524 case BUILT_IN_MEMPCPY_CHK
:
525 dest
= gimple_call_arg (call
, 0);
526 source0
= gimple_call_arg (call
, 1);
527 len
= gimple_call_arg (call
, 2);
530 /* (dest, n) style memops. */
532 dest
= gimple_call_arg (call
, 0);
533 len
= gimple_call_arg (call
, 1);
536 /* (dest, x, n) style memops*/
537 case BUILT_IN_MEMSET
:
538 case BUILT_IN_MEMSET_CHK
:
539 dest
= gimple_call_arg (call
, 0);
540 len
= gimple_call_arg (call
, 2);
543 case BUILT_IN_STRLEN
:
544 source0
= gimple_call_arg (call
, 0);
545 len
= gimple_call_lhs (call
);
548 /* And now the __atomic* and __sync builtins.
549 These are handled differently from the classical memory memory
550 access builtins above. */
552 case BUILT_IN_ATOMIC_LOAD_1
:
553 case BUILT_IN_ATOMIC_LOAD_2
:
554 case BUILT_IN_ATOMIC_LOAD_4
:
555 case BUILT_IN_ATOMIC_LOAD_8
:
556 case BUILT_IN_ATOMIC_LOAD_16
:
560 case BUILT_IN_SYNC_FETCH_AND_ADD_1
:
561 case BUILT_IN_SYNC_FETCH_AND_ADD_2
:
562 case BUILT_IN_SYNC_FETCH_AND_ADD_4
:
563 case BUILT_IN_SYNC_FETCH_AND_ADD_8
:
564 case BUILT_IN_SYNC_FETCH_AND_ADD_16
:
566 case BUILT_IN_SYNC_FETCH_AND_SUB_1
:
567 case BUILT_IN_SYNC_FETCH_AND_SUB_2
:
568 case BUILT_IN_SYNC_FETCH_AND_SUB_4
:
569 case BUILT_IN_SYNC_FETCH_AND_SUB_8
:
570 case BUILT_IN_SYNC_FETCH_AND_SUB_16
:
572 case BUILT_IN_SYNC_FETCH_AND_OR_1
:
573 case BUILT_IN_SYNC_FETCH_AND_OR_2
:
574 case BUILT_IN_SYNC_FETCH_AND_OR_4
:
575 case BUILT_IN_SYNC_FETCH_AND_OR_8
:
576 case BUILT_IN_SYNC_FETCH_AND_OR_16
:
578 case BUILT_IN_SYNC_FETCH_AND_AND_1
:
579 case BUILT_IN_SYNC_FETCH_AND_AND_2
:
580 case BUILT_IN_SYNC_FETCH_AND_AND_4
:
581 case BUILT_IN_SYNC_FETCH_AND_AND_8
:
582 case BUILT_IN_SYNC_FETCH_AND_AND_16
:
584 case BUILT_IN_SYNC_FETCH_AND_XOR_1
:
585 case BUILT_IN_SYNC_FETCH_AND_XOR_2
:
586 case BUILT_IN_SYNC_FETCH_AND_XOR_4
:
587 case BUILT_IN_SYNC_FETCH_AND_XOR_8
:
588 case BUILT_IN_SYNC_FETCH_AND_XOR_16
:
590 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
591 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
592 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
593 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
595 case BUILT_IN_SYNC_ADD_AND_FETCH_1
:
596 case BUILT_IN_SYNC_ADD_AND_FETCH_2
:
597 case BUILT_IN_SYNC_ADD_AND_FETCH_4
:
598 case BUILT_IN_SYNC_ADD_AND_FETCH_8
:
599 case BUILT_IN_SYNC_ADD_AND_FETCH_16
:
601 case BUILT_IN_SYNC_SUB_AND_FETCH_1
:
602 case BUILT_IN_SYNC_SUB_AND_FETCH_2
:
603 case BUILT_IN_SYNC_SUB_AND_FETCH_4
:
604 case BUILT_IN_SYNC_SUB_AND_FETCH_8
:
605 case BUILT_IN_SYNC_SUB_AND_FETCH_16
:
607 case BUILT_IN_SYNC_OR_AND_FETCH_1
:
608 case BUILT_IN_SYNC_OR_AND_FETCH_2
:
609 case BUILT_IN_SYNC_OR_AND_FETCH_4
:
610 case BUILT_IN_SYNC_OR_AND_FETCH_8
:
611 case BUILT_IN_SYNC_OR_AND_FETCH_16
:
613 case BUILT_IN_SYNC_AND_AND_FETCH_1
:
614 case BUILT_IN_SYNC_AND_AND_FETCH_2
:
615 case BUILT_IN_SYNC_AND_AND_FETCH_4
:
616 case BUILT_IN_SYNC_AND_AND_FETCH_8
:
617 case BUILT_IN_SYNC_AND_AND_FETCH_16
:
619 case BUILT_IN_SYNC_XOR_AND_FETCH_1
:
620 case BUILT_IN_SYNC_XOR_AND_FETCH_2
:
621 case BUILT_IN_SYNC_XOR_AND_FETCH_4
:
622 case BUILT_IN_SYNC_XOR_AND_FETCH_8
:
623 case BUILT_IN_SYNC_XOR_AND_FETCH_16
:
625 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
626 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
627 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
628 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
630 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
:
631 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2
:
632 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4
:
633 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8
:
634 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16
:
636 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
:
637 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2
:
638 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4
:
639 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8
:
640 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16
:
642 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
:
643 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2
:
644 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4
:
645 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8
:
646 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16
:
648 case BUILT_IN_SYNC_LOCK_RELEASE_1
:
649 case BUILT_IN_SYNC_LOCK_RELEASE_2
:
650 case BUILT_IN_SYNC_LOCK_RELEASE_4
:
651 case BUILT_IN_SYNC_LOCK_RELEASE_8
:
652 case BUILT_IN_SYNC_LOCK_RELEASE_16
:
654 case BUILT_IN_ATOMIC_EXCHANGE_1
:
655 case BUILT_IN_ATOMIC_EXCHANGE_2
:
656 case BUILT_IN_ATOMIC_EXCHANGE_4
:
657 case BUILT_IN_ATOMIC_EXCHANGE_8
:
658 case BUILT_IN_ATOMIC_EXCHANGE_16
:
660 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
661 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
662 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
663 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
664 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
666 case BUILT_IN_ATOMIC_STORE_1
:
667 case BUILT_IN_ATOMIC_STORE_2
:
668 case BUILT_IN_ATOMIC_STORE_4
:
669 case BUILT_IN_ATOMIC_STORE_8
:
670 case BUILT_IN_ATOMIC_STORE_16
:
672 case BUILT_IN_ATOMIC_ADD_FETCH_1
:
673 case BUILT_IN_ATOMIC_ADD_FETCH_2
:
674 case BUILT_IN_ATOMIC_ADD_FETCH_4
:
675 case BUILT_IN_ATOMIC_ADD_FETCH_8
:
676 case BUILT_IN_ATOMIC_ADD_FETCH_16
:
678 case BUILT_IN_ATOMIC_SUB_FETCH_1
:
679 case BUILT_IN_ATOMIC_SUB_FETCH_2
:
680 case BUILT_IN_ATOMIC_SUB_FETCH_4
:
681 case BUILT_IN_ATOMIC_SUB_FETCH_8
:
682 case BUILT_IN_ATOMIC_SUB_FETCH_16
:
684 case BUILT_IN_ATOMIC_AND_FETCH_1
:
685 case BUILT_IN_ATOMIC_AND_FETCH_2
:
686 case BUILT_IN_ATOMIC_AND_FETCH_4
:
687 case BUILT_IN_ATOMIC_AND_FETCH_8
:
688 case BUILT_IN_ATOMIC_AND_FETCH_16
:
690 case BUILT_IN_ATOMIC_NAND_FETCH_1
:
691 case BUILT_IN_ATOMIC_NAND_FETCH_2
:
692 case BUILT_IN_ATOMIC_NAND_FETCH_4
:
693 case BUILT_IN_ATOMIC_NAND_FETCH_8
:
694 case BUILT_IN_ATOMIC_NAND_FETCH_16
:
696 case BUILT_IN_ATOMIC_XOR_FETCH_1
:
697 case BUILT_IN_ATOMIC_XOR_FETCH_2
:
698 case BUILT_IN_ATOMIC_XOR_FETCH_4
:
699 case BUILT_IN_ATOMIC_XOR_FETCH_8
:
700 case BUILT_IN_ATOMIC_XOR_FETCH_16
:
702 case BUILT_IN_ATOMIC_OR_FETCH_1
:
703 case BUILT_IN_ATOMIC_OR_FETCH_2
:
704 case BUILT_IN_ATOMIC_OR_FETCH_4
:
705 case BUILT_IN_ATOMIC_OR_FETCH_8
:
706 case BUILT_IN_ATOMIC_OR_FETCH_16
:
708 case BUILT_IN_ATOMIC_FETCH_ADD_1
:
709 case BUILT_IN_ATOMIC_FETCH_ADD_2
:
710 case BUILT_IN_ATOMIC_FETCH_ADD_4
:
711 case BUILT_IN_ATOMIC_FETCH_ADD_8
:
712 case BUILT_IN_ATOMIC_FETCH_ADD_16
:
714 case BUILT_IN_ATOMIC_FETCH_SUB_1
:
715 case BUILT_IN_ATOMIC_FETCH_SUB_2
:
716 case BUILT_IN_ATOMIC_FETCH_SUB_4
:
717 case BUILT_IN_ATOMIC_FETCH_SUB_8
:
718 case BUILT_IN_ATOMIC_FETCH_SUB_16
:
720 case BUILT_IN_ATOMIC_FETCH_AND_1
:
721 case BUILT_IN_ATOMIC_FETCH_AND_2
:
722 case BUILT_IN_ATOMIC_FETCH_AND_4
:
723 case BUILT_IN_ATOMIC_FETCH_AND_8
:
724 case BUILT_IN_ATOMIC_FETCH_AND_16
:
726 case BUILT_IN_ATOMIC_FETCH_NAND_1
:
727 case BUILT_IN_ATOMIC_FETCH_NAND_2
:
728 case BUILT_IN_ATOMIC_FETCH_NAND_4
:
729 case BUILT_IN_ATOMIC_FETCH_NAND_8
:
730 case BUILT_IN_ATOMIC_FETCH_NAND_16
:
732 case BUILT_IN_ATOMIC_FETCH_XOR_1
:
733 case BUILT_IN_ATOMIC_FETCH_XOR_2
:
734 case BUILT_IN_ATOMIC_FETCH_XOR_4
:
735 case BUILT_IN_ATOMIC_FETCH_XOR_8
:
736 case BUILT_IN_ATOMIC_FETCH_XOR_16
:
738 case BUILT_IN_ATOMIC_FETCH_OR_1
:
739 case BUILT_IN_ATOMIC_FETCH_OR_2
:
740 case BUILT_IN_ATOMIC_FETCH_OR_4
:
741 case BUILT_IN_ATOMIC_FETCH_OR_8
:
742 case BUILT_IN_ATOMIC_FETCH_OR_16
:
744 dest
= gimple_call_arg (call
, 0);
745 /* DEST represents the address of a memory location.
746 instrument_derefs wants the memory location, so lets
747 dereference the address DEST before handing it to
748 instrument_derefs. */
749 if (TREE_CODE (dest
) == ADDR_EXPR
)
750 dest
= TREE_OPERAND (dest
, 0);
751 else if (TREE_CODE (dest
) == SSA_NAME
|| TREE_CODE (dest
) == INTEGER_CST
)
752 dest
= build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (dest
)),
753 dest
, build_int_cst (TREE_TYPE (dest
), 0));
757 access_size
= int_size_in_bytes (TREE_TYPE (dest
));
761 /* The other builtins memory access are not instrumented in this
762 function because they either don't have any length parameter,
763 or their length parameter is just a limit. */
767 if (len
!= NULL_TREE
)
769 if (source0
!= NULL_TREE
)
771 src0
->start
= source0
;
772 src0
->access_size
= access_size
;
774 *src0_is_store
= false;
777 if (source1
!= NULL_TREE
)
779 src1
->start
= source1
;
780 src1
->access_size
= access_size
;
782 *src1_is_store
= false;
785 if (dest
!= NULL_TREE
)
788 dst
->access_size
= access_size
;
790 *dst_is_store
= true;
793 got_reference_p
= true;
798 dst
->access_size
= access_size
;
799 *dst_len
= NULL_TREE
;
800 *dst_is_store
= is_store
;
801 *dest_is_deref
= true;
802 got_reference_p
= true;
805 return got_reference_p
;
808 /* Return true iff a given gimple statement has been instrumented.
809 Note that the statement is "defined" by the memory references it
813 has_stmt_been_instrumented_p (gimple stmt
)
815 if (gimple_assign_single_p (stmt
))
819 asan_mem_ref_init (&r
, NULL
, 1);
821 if (get_mem_ref_of_assignment (stmt
, &r
, &r_is_store
))
822 return has_mem_ref_been_instrumented (&r
);
824 else if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
826 asan_mem_ref src0
, src1
, dest
;
827 asan_mem_ref_init (&src0
, NULL
, 1);
828 asan_mem_ref_init (&src1
, NULL
, 1);
829 asan_mem_ref_init (&dest
, NULL
, 1);
831 tree src0_len
= NULL_TREE
, src1_len
= NULL_TREE
, dest_len
= NULL_TREE
;
832 bool src0_is_store
= false, src1_is_store
= false,
833 dest_is_store
= false, dest_is_deref
= false;
834 if (get_mem_refs_of_builtin_call (stmt
,
835 &src0
, &src0_len
, &src0_is_store
,
836 &src1
, &src1_len
, &src1_is_store
,
837 &dest
, &dest_len
, &dest_is_store
,
840 if (src0
.start
!= NULL_TREE
841 && !has_mem_ref_been_instrumented (&src0
, src0_len
))
844 if (src1
.start
!= NULL_TREE
845 && !has_mem_ref_been_instrumented (&src1
, src1_len
))
848 if (dest
.start
!= NULL_TREE
849 && !has_mem_ref_been_instrumented (&dest
, dest_len
))
858 /* Insert a memory reference into the hash table. */
861 update_mem_ref_hash_table (tree ref
, HOST_WIDE_INT access_size
)
863 hash_table
<asan_mem_ref_hasher
> *ht
= get_mem_ref_hash_table ();
866 asan_mem_ref_init (&r
, ref
, access_size
);
868 asan_mem_ref
**slot
= ht
->find_slot (&r
, INSERT
);
870 *slot
= asan_mem_ref_new (ref
, access_size
);
873 /* Initialize shadow_ptr_types array. */
876 asan_init_shadow_ptr_types (void)
878 asan_shadow_set
= new_alias_set ();
879 shadow_ptr_types
[0] = build_distinct_type_copy (signed_char_type_node
);
880 TYPE_ALIAS_SET (shadow_ptr_types
[0]) = asan_shadow_set
;
881 shadow_ptr_types
[0] = build_pointer_type (shadow_ptr_types
[0]);
882 shadow_ptr_types
[1] = build_distinct_type_copy (short_integer_type_node
);
883 TYPE_ALIAS_SET (shadow_ptr_types
[1]) = asan_shadow_set
;
884 shadow_ptr_types
[1] = build_pointer_type (shadow_ptr_types
[1]);
885 initialize_sanitizer_builtins ();
888 /* Create ADDR_EXPR of STRING_CST with the PP pretty printer text. */
891 asan_pp_string (pretty_printer
*pp
)
893 const char *buf
= pp_formatted_text (pp
);
894 size_t len
= strlen (buf
);
895 tree ret
= build_string (len
+ 1, buf
);
897 = build_array_type (TREE_TYPE (shadow_ptr_types
[0]),
898 build_index_type (size_int (len
)));
899 TREE_READONLY (ret
) = 1;
900 TREE_STATIC (ret
) = 1;
901 return build1 (ADDR_EXPR
, shadow_ptr_types
[0], ret
);
904 /* Return a CONST_INT representing 4 subsequent shadow memory bytes. */
907 asan_shadow_cst (unsigned char shadow_bytes
[4])
910 unsigned HOST_WIDE_INT val
= 0;
911 gcc_assert (WORDS_BIG_ENDIAN
== BYTES_BIG_ENDIAN
);
912 for (i
= 0; i
< 4; i
++)
913 val
|= (unsigned HOST_WIDE_INT
) shadow_bytes
[BYTES_BIG_ENDIAN
? 3 - i
: i
]
914 << (BITS_PER_UNIT
* i
);
915 return gen_int_mode (val
, SImode
);
918 /* Clear shadow memory at SHADOW_MEM, LEN bytes. Can't call a library call here
922 asan_clear_shadow (rtx shadow_mem
, HOST_WIDE_INT len
)
924 rtx insn
, insns
, top_label
, end
, addr
, tmp
, jump
;
927 clear_storage (shadow_mem
, GEN_INT (len
), BLOCK_OP_NORMAL
);
928 insns
= get_insns ();
930 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
933 if (insn
== NULL_RTX
)
939 gcc_assert ((len
& 3) == 0);
940 top_label
= gen_label_rtx ();
941 addr
= copy_to_mode_reg (Pmode
, XEXP (shadow_mem
, 0));
942 shadow_mem
= adjust_automodify_address (shadow_mem
, SImode
, addr
, 0);
943 end
= force_reg (Pmode
, plus_constant (Pmode
, addr
, len
));
944 emit_label (top_label
);
946 emit_move_insn (shadow_mem
, const0_rtx
);
947 tmp
= expand_simple_binop (Pmode
, PLUS
, addr
, gen_int_mode (4, Pmode
), addr
,
948 true, OPTAB_LIB_WIDEN
);
950 emit_move_insn (addr
, tmp
);
951 emit_cmp_and_jump_insns (addr
, end
, LT
, NULL_RTX
, Pmode
, true, top_label
);
952 jump
= get_last_insn ();
953 gcc_assert (JUMP_P (jump
));
954 add_int_reg_note (jump
, REG_BR_PROB
, REG_BR_PROB_BASE
* 80 / 100);
958 asan_function_start (void)
960 section
*fnsec
= function_section (current_function_decl
);
961 switch_to_section (fnsec
);
962 ASM_OUTPUT_DEBUG_LABEL (asm_out_file
, "LASANPC",
963 current_function_funcdef_no
);
966 /* Insert code to protect stack vars. The prologue sequence should be emitted
967 directly, epilogue sequence returned. BASE is the register holding the
968 stack base, against which OFFSETS array offsets are relative to, OFFSETS
969 array contains pairs of offsets in reverse order, always the end offset
970 of some gap that needs protection followed by starting offset,
971 and DECLS is an array of representative decls for each var partition.
972 LENGTH is the length of the OFFSETS array, DECLS array is LENGTH / 2 - 1
973 elements long (OFFSETS include gap before the first variable as well
974 as gaps after each stack variable). PBASE is, if non-NULL, some pseudo
975 register which stack vars DECL_RTLs are based on. Either BASE should be
976 assigned to PBASE, when not doing use after return protection, or
977 corresponding address based on __asan_stack_malloc* return value. */
980 asan_emit_stack_protection (rtx base
, rtx pbase
, unsigned int alignb
,
981 HOST_WIDE_INT
*offsets
, tree
*decls
, int length
)
983 rtx shadow_base
, shadow_mem
, ret
, mem
, orig_base
, lab
;
985 unsigned char shadow_bytes
[4];
986 HOST_WIDE_INT base_offset
= offsets
[length
- 1];
987 HOST_WIDE_INT base_align_bias
= 0, offset
, prev_offset
;
988 HOST_WIDE_INT asan_frame_size
= offsets
[0] - base_offset
;
989 HOST_WIDE_INT last_offset
, last_size
;
991 unsigned char cur_shadow_byte
= ASAN_STACK_MAGIC_LEFT
;
992 tree str_cst
, decl
, id
;
993 int use_after_return_class
= -1;
995 if (shadow_ptr_types
[0] == NULL_TREE
)
996 asan_init_shadow_ptr_types ();
998 /* First of all, prepare the description string. */
999 pretty_printer asan_pp
;
1001 pp_decimal_int (&asan_pp
, length
/ 2 - 1);
1002 pp_space (&asan_pp
);
1003 for (l
= length
- 2; l
; l
-= 2)
1005 tree decl
= decls
[l
/ 2 - 1];
1006 pp_wide_integer (&asan_pp
, offsets
[l
] - base_offset
);
1007 pp_space (&asan_pp
);
1008 pp_wide_integer (&asan_pp
, offsets
[l
- 1] - offsets
[l
]);
1009 pp_space (&asan_pp
);
1010 if (DECL_P (decl
) && DECL_NAME (decl
))
1012 pp_decimal_int (&asan_pp
, IDENTIFIER_LENGTH (DECL_NAME (decl
)));
1013 pp_space (&asan_pp
);
1014 pp_tree_identifier (&asan_pp
, DECL_NAME (decl
));
1017 pp_string (&asan_pp
, "9 <unknown>");
1018 pp_space (&asan_pp
);
1020 str_cst
= asan_pp_string (&asan_pp
);
1022 /* Emit the prologue sequence. */
1023 if (asan_frame_size
> 32 && asan_frame_size
<= 65536 && pbase
1024 && ASAN_USE_AFTER_RETURN
)
1026 use_after_return_class
= floor_log2 (asan_frame_size
- 1) - 5;
1027 /* __asan_stack_malloc_N guarantees alignment
1028 N < 6 ? (64 << N) : 4096 bytes. */
1029 if (alignb
> (use_after_return_class
< 6
1030 ? (64U << use_after_return_class
) : 4096U))
1031 use_after_return_class
= -1;
1032 else if (alignb
> ASAN_RED_ZONE_SIZE
&& (asan_frame_size
& (alignb
- 1)))
1033 base_align_bias
= ((asan_frame_size
+ alignb
- 1)
1034 & ~(alignb
- HOST_WIDE_INT_1
)) - asan_frame_size
;
1036 /* Align base if target is STRICT_ALIGNMENT. */
1037 if (STRICT_ALIGNMENT
)
1038 base
= expand_binop (Pmode
, and_optab
, base
,
1039 gen_int_mode (-((GET_MODE_ALIGNMENT (SImode
)
1040 << ASAN_SHADOW_SHIFT
)
1041 / BITS_PER_UNIT
), Pmode
), NULL_RTX
,
1044 if (use_after_return_class
== -1 && pbase
)
1045 emit_move_insn (pbase
, base
);
1047 base
= expand_binop (Pmode
, add_optab
, base
,
1048 gen_int_mode (base_offset
- base_align_bias
, Pmode
),
1049 NULL_RTX
, 1, OPTAB_DIRECT
);
1050 orig_base
= NULL_RTX
;
1051 if (use_after_return_class
!= -1)
1053 if (asan_detect_stack_use_after_return
== NULL_TREE
)
1055 id
= get_identifier ("__asan_option_detect_stack_use_after_return");
1056 decl
= build_decl (BUILTINS_LOCATION
, VAR_DECL
, id
,
1058 SET_DECL_ASSEMBLER_NAME (decl
, id
);
1059 TREE_ADDRESSABLE (decl
) = 1;
1060 DECL_ARTIFICIAL (decl
) = 1;
1061 DECL_IGNORED_P (decl
) = 1;
1062 DECL_EXTERNAL (decl
) = 1;
1063 TREE_STATIC (decl
) = 1;
1064 TREE_PUBLIC (decl
) = 1;
1065 TREE_USED (decl
) = 1;
1066 asan_detect_stack_use_after_return
= decl
;
1068 orig_base
= gen_reg_rtx (Pmode
);
1069 emit_move_insn (orig_base
, base
);
1070 ret
= expand_normal (asan_detect_stack_use_after_return
);
1071 lab
= gen_label_rtx ();
1072 int very_likely
= REG_BR_PROB_BASE
- (REG_BR_PROB_BASE
/ 2000 - 1);
1073 emit_cmp_and_jump_insns (ret
, const0_rtx
, EQ
, NULL_RTX
,
1074 VOIDmode
, 0, lab
, very_likely
);
1075 snprintf (buf
, sizeof buf
, "__asan_stack_malloc_%d",
1076 use_after_return_class
);
1077 ret
= init_one_libfunc (buf
);
1078 rtx addr
= convert_memory_address (ptr_mode
, base
);
1079 ret
= emit_library_call_value (ret
, NULL_RTX
, LCT_NORMAL
, ptr_mode
, 2,
1080 GEN_INT (asan_frame_size
1082 TYPE_MODE (pointer_sized_int_node
),
1084 ret
= convert_memory_address (Pmode
, ret
);
1085 emit_move_insn (base
, ret
);
1087 emit_move_insn (pbase
, expand_binop (Pmode
, add_optab
, base
,
1088 gen_int_mode (base_align_bias
1089 - base_offset
, Pmode
),
1090 NULL_RTX
, 1, OPTAB_DIRECT
));
1092 mem
= gen_rtx_MEM (ptr_mode
, base
);
1093 mem
= adjust_address (mem
, VOIDmode
, base_align_bias
);
1094 emit_move_insn (mem
, gen_int_mode (ASAN_STACK_FRAME_MAGIC
, ptr_mode
));
1095 mem
= adjust_address (mem
, VOIDmode
, GET_MODE_SIZE (ptr_mode
));
1096 emit_move_insn (mem
, expand_normal (str_cst
));
1097 mem
= adjust_address (mem
, VOIDmode
, GET_MODE_SIZE (ptr_mode
));
1098 ASM_GENERATE_INTERNAL_LABEL (buf
, "LASANPC", current_function_funcdef_no
);
1099 id
= get_identifier (buf
);
1100 decl
= build_decl (DECL_SOURCE_LOCATION (current_function_decl
),
1101 VAR_DECL
, id
, char_type_node
);
1102 SET_DECL_ASSEMBLER_NAME (decl
, id
);
1103 TREE_ADDRESSABLE (decl
) = 1;
1104 TREE_READONLY (decl
) = 1;
1105 DECL_ARTIFICIAL (decl
) = 1;
1106 DECL_IGNORED_P (decl
) = 1;
1107 TREE_STATIC (decl
) = 1;
1108 TREE_PUBLIC (decl
) = 0;
1109 TREE_USED (decl
) = 1;
1110 DECL_INITIAL (decl
) = decl
;
1111 TREE_ASM_WRITTEN (decl
) = 1;
1112 TREE_ASM_WRITTEN (id
) = 1;
1113 emit_move_insn (mem
, expand_normal (build_fold_addr_expr (decl
)));
1114 shadow_base
= expand_binop (Pmode
, lshr_optab
, base
,
1115 GEN_INT (ASAN_SHADOW_SHIFT
),
1116 NULL_RTX
, 1, OPTAB_DIRECT
);
1118 = plus_constant (Pmode
, shadow_base
,
1119 targetm
.asan_shadow_offset ()
1120 + (base_align_bias
>> ASAN_SHADOW_SHIFT
));
1121 gcc_assert (asan_shadow_set
!= -1
1122 && (ASAN_RED_ZONE_SIZE
>> ASAN_SHADOW_SHIFT
) == 4);
1123 shadow_mem
= gen_rtx_MEM (SImode
, shadow_base
);
1124 set_mem_alias_set (shadow_mem
, asan_shadow_set
);
1125 if (STRICT_ALIGNMENT
)
1126 set_mem_align (shadow_mem
, (GET_MODE_ALIGNMENT (SImode
)));
1127 prev_offset
= base_offset
;
1128 for (l
= length
; l
; l
-= 2)
1131 cur_shadow_byte
= ASAN_STACK_MAGIC_RIGHT
;
1132 offset
= offsets
[l
- 1];
1133 if ((offset
- base_offset
) & (ASAN_RED_ZONE_SIZE
- 1))
1137 = base_offset
+ ((offset
- base_offset
)
1138 & ~(ASAN_RED_ZONE_SIZE
- HOST_WIDE_INT_1
));
1139 shadow_mem
= adjust_address (shadow_mem
, VOIDmode
,
1140 (aoff
- prev_offset
)
1141 >> ASAN_SHADOW_SHIFT
);
1143 for (i
= 0; i
< 4; i
++, aoff
+= (1 << ASAN_SHADOW_SHIFT
))
1146 if (aoff
< offset
- (1 << ASAN_SHADOW_SHIFT
) + 1)
1147 shadow_bytes
[i
] = 0;
1149 shadow_bytes
[i
] = offset
- aoff
;
1152 shadow_bytes
[i
] = ASAN_STACK_MAGIC_PARTIAL
;
1153 emit_move_insn (shadow_mem
, asan_shadow_cst (shadow_bytes
));
1156 while (offset
<= offsets
[l
- 2] - ASAN_RED_ZONE_SIZE
)
1158 shadow_mem
= adjust_address (shadow_mem
, VOIDmode
,
1159 (offset
- prev_offset
)
1160 >> ASAN_SHADOW_SHIFT
);
1161 prev_offset
= offset
;
1162 memset (shadow_bytes
, cur_shadow_byte
, 4);
1163 emit_move_insn (shadow_mem
, asan_shadow_cst (shadow_bytes
));
1164 offset
+= ASAN_RED_ZONE_SIZE
;
1166 cur_shadow_byte
= ASAN_STACK_MAGIC_MIDDLE
;
1168 do_pending_stack_adjust ();
1170 /* Construct epilogue sequence. */
1174 if (use_after_return_class
!= -1)
1176 rtx lab2
= gen_label_rtx ();
1177 char c
= (char) ASAN_STACK_MAGIC_USE_AFTER_RET
;
1178 int very_likely
= REG_BR_PROB_BASE
- (REG_BR_PROB_BASE
/ 2000 - 1);
1179 emit_cmp_and_jump_insns (orig_base
, base
, EQ
, NULL_RTX
,
1180 VOIDmode
, 0, lab2
, very_likely
);
1181 shadow_mem
= gen_rtx_MEM (BLKmode
, shadow_base
);
1182 set_mem_alias_set (shadow_mem
, asan_shadow_set
);
1183 mem
= gen_rtx_MEM (ptr_mode
, base
);
1184 mem
= adjust_address (mem
, VOIDmode
, base_align_bias
);
1185 emit_move_insn (mem
, gen_int_mode (ASAN_STACK_RETIRED_MAGIC
, ptr_mode
));
1186 unsigned HOST_WIDE_INT sz
= asan_frame_size
>> ASAN_SHADOW_SHIFT
;
1187 if (use_after_return_class
< 5
1188 && can_store_by_pieces (sz
, builtin_memset_read_str
, &c
,
1189 BITS_PER_UNIT
, true))
1190 store_by_pieces (shadow_mem
, sz
, builtin_memset_read_str
, &c
,
1191 BITS_PER_UNIT
, true, 0);
1192 else if (use_after_return_class
>= 5
1193 || !set_storage_via_setmem (shadow_mem
,
1195 gen_int_mode (c
, QImode
),
1196 BITS_PER_UNIT
, BITS_PER_UNIT
,
1199 snprintf (buf
, sizeof buf
, "__asan_stack_free_%d",
1200 use_after_return_class
);
1201 ret
= init_one_libfunc (buf
);
1202 rtx addr
= convert_memory_address (ptr_mode
, base
);
1203 rtx orig_addr
= convert_memory_address (ptr_mode
, orig_base
);
1204 emit_library_call (ret
, LCT_NORMAL
, ptr_mode
, 3, addr
, ptr_mode
,
1205 GEN_INT (asan_frame_size
+ base_align_bias
),
1206 TYPE_MODE (pointer_sized_int_node
),
1207 orig_addr
, ptr_mode
);
1209 lab
= gen_label_rtx ();
1214 shadow_mem
= gen_rtx_MEM (BLKmode
, shadow_base
);
1215 set_mem_alias_set (shadow_mem
, asan_shadow_set
);
1217 if (STRICT_ALIGNMENT
)
1218 set_mem_align (shadow_mem
, (GET_MODE_ALIGNMENT (SImode
)));
1220 prev_offset
= base_offset
;
1221 last_offset
= base_offset
;
1223 for (l
= length
; l
; l
-= 2)
1225 offset
= base_offset
+ ((offsets
[l
- 1] - base_offset
)
1226 & ~(ASAN_RED_ZONE_SIZE
- HOST_WIDE_INT_1
));
1227 if (last_offset
+ last_size
!= offset
)
1229 shadow_mem
= adjust_address (shadow_mem
, VOIDmode
,
1230 (last_offset
- prev_offset
)
1231 >> ASAN_SHADOW_SHIFT
);
1232 prev_offset
= last_offset
;
1233 asan_clear_shadow (shadow_mem
, last_size
>> ASAN_SHADOW_SHIFT
);
1234 last_offset
= offset
;
1237 last_size
+= base_offset
+ ((offsets
[l
- 2] - base_offset
)
1238 & ~(ASAN_RED_ZONE_SIZE
- HOST_WIDE_INT_1
))
1243 shadow_mem
= adjust_address (shadow_mem
, VOIDmode
,
1244 (last_offset
- prev_offset
)
1245 >> ASAN_SHADOW_SHIFT
);
1246 asan_clear_shadow (shadow_mem
, last_size
>> ASAN_SHADOW_SHIFT
);
1249 do_pending_stack_adjust ();
1258 /* Return true if DECL, a global var, might be overridden and needs
1259 therefore a local alias. */
1262 asan_needs_local_alias (tree decl
)
1264 return DECL_WEAK (decl
) || !targetm
.binds_local_p (decl
);
1267 /* Return true if DECL is a VAR_DECL that should be protected
1268 by Address Sanitizer, by appending a red zone with protected
1269 shadow memory after it and aligning it to at least
1270 ASAN_RED_ZONE_SIZE bytes. */
1273 asan_protect_global (tree decl
)
1280 if (TREE_CODE (decl
) == STRING_CST
)
1282 /* Instrument all STRING_CSTs except those created
1283 by asan_pp_string here. */
1284 if (shadow_ptr_types
[0] != NULL_TREE
1285 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
1286 && TREE_TYPE (TREE_TYPE (decl
)) == TREE_TYPE (shadow_ptr_types
[0]))
1290 if (TREE_CODE (decl
) != VAR_DECL
1291 /* TLS vars aren't statically protectable. */
1292 || DECL_THREAD_LOCAL_P (decl
)
1293 /* Externs will be protected elsewhere. */
1294 || DECL_EXTERNAL (decl
)
1295 || !DECL_RTL_SET_P (decl
)
1296 /* Comdat vars pose an ABI problem, we can't know if
1297 the var that is selected by the linker will have
1299 || DECL_ONE_ONLY (decl
)
1300 /* Similarly for common vars. People can use -fno-common. */
1301 || (DECL_COMMON (decl
) && TREE_PUBLIC (decl
))
1302 /* Don't protect if using user section, often vars placed
1303 into user section from multiple TUs are then assumed
1304 to be an array of such vars, putting padding in there
1305 breaks this assumption. */
1306 || (DECL_SECTION_NAME (decl
) != NULL
1307 && !symtab_node::get (decl
)->implicit_section
)
1308 || DECL_SIZE (decl
) == 0
1309 || ASAN_RED_ZONE_SIZE
* BITS_PER_UNIT
> MAX_OFILE_ALIGNMENT
1310 || !valid_constant_size_p (DECL_SIZE_UNIT (decl
))
1311 || DECL_ALIGN_UNIT (decl
) > 2 * ASAN_RED_ZONE_SIZE
)
1314 rtl
= DECL_RTL (decl
);
1315 if (!MEM_P (rtl
) || GET_CODE (XEXP (rtl
, 0)) != SYMBOL_REF
)
1317 symbol
= XEXP (rtl
, 0);
1319 if (CONSTANT_POOL_ADDRESS_P (symbol
)
1320 || TREE_CONSTANT_POOL_ADDRESS_P (symbol
))
1323 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl
)))
1326 #ifndef ASM_OUTPUT_DEF
1327 if (asan_needs_local_alias (decl
))
1334 /* Construct a function tree for __asan_report_{load,store}{1,2,4,8,16,_n}.
1335 IS_STORE is either 1 (for a store) or 0 (for a load). */
1338 report_error_func (bool is_store
, HOST_WIDE_INT size_in_bytes
, int *nargs
)
1340 static enum built_in_function report
[2][6]
1341 = { { BUILT_IN_ASAN_REPORT_LOAD1
, BUILT_IN_ASAN_REPORT_LOAD2
,
1342 BUILT_IN_ASAN_REPORT_LOAD4
, BUILT_IN_ASAN_REPORT_LOAD8
,
1343 BUILT_IN_ASAN_REPORT_LOAD16
, BUILT_IN_ASAN_REPORT_LOAD_N
},
1344 { BUILT_IN_ASAN_REPORT_STORE1
, BUILT_IN_ASAN_REPORT_STORE2
,
1345 BUILT_IN_ASAN_REPORT_STORE4
, BUILT_IN_ASAN_REPORT_STORE8
,
1346 BUILT_IN_ASAN_REPORT_STORE16
, BUILT_IN_ASAN_REPORT_STORE_N
} };
1347 if (size_in_bytes
== -1)
1350 return builtin_decl_implicit (report
[is_store
][5]);
1353 return builtin_decl_implicit (report
[is_store
][exact_log2 (size_in_bytes
)]);
1356 /* Construct a function tree for __asan_{load,store}{1,2,4,8,16,_n}.
1357 IS_STORE is either 1 (for a store) or 0 (for a load). */
1360 check_func (bool is_store
, int size_in_bytes
, int *nargs
)
1362 static enum built_in_function check
[2][6]
1363 = { { BUILT_IN_ASAN_LOAD1
, BUILT_IN_ASAN_LOAD2
,
1364 BUILT_IN_ASAN_LOAD4
, BUILT_IN_ASAN_LOAD8
,
1365 BUILT_IN_ASAN_LOAD16
, BUILT_IN_ASAN_LOADN
},
1366 { BUILT_IN_ASAN_STORE1
, BUILT_IN_ASAN_STORE2
,
1367 BUILT_IN_ASAN_STORE4
, BUILT_IN_ASAN_STORE8
,
1368 BUILT_IN_ASAN_STORE16
, BUILT_IN_ASAN_STOREN
} };
1369 if (size_in_bytes
== -1)
1372 return builtin_decl_implicit (check
[is_store
][5]);
1375 return builtin_decl_implicit (check
[is_store
][exact_log2 (size_in_bytes
)]);
1378 /* Split the current basic block and create a condition statement
1379 insertion point right before or after the statement pointed to by
1380 ITER. Return an iterator to the point at which the caller might
1381 safely insert the condition statement.
1383 THEN_BLOCK must be set to the address of an uninitialized instance
1384 of basic_block. The function will then set *THEN_BLOCK to the
1385 'then block' of the condition statement to be inserted by the
1388 If CREATE_THEN_FALLTHRU_EDGE is false, no edge will be created from
1389 *THEN_BLOCK to *FALLTHROUGH_BLOCK.
1391 Similarly, the function will set *FALLTRHOUGH_BLOCK to the 'else
1392 block' of the condition statement to be inserted by the caller.
1394 Note that *FALLTHROUGH_BLOCK is a new block that contains the
1395 statements starting from *ITER, and *THEN_BLOCK is a new empty
1398 *ITER is adjusted to point to always point to the first statement
1399 of the basic block * FALLTHROUGH_BLOCK. That statement is the
1400 same as what ITER was pointing to prior to calling this function,
1401 if BEFORE_P is true; otherwise, it is its following statement. */
1403 gimple_stmt_iterator
1404 create_cond_insert_point (gimple_stmt_iterator
*iter
,
1406 bool then_more_likely_p
,
1407 bool create_then_fallthru_edge
,
1408 basic_block
*then_block
,
1409 basic_block
*fallthrough_block
)
1411 gimple_stmt_iterator gsi
= *iter
;
1413 if (!gsi_end_p (gsi
) && before_p
)
1416 basic_block cur_bb
= gsi_bb (*iter
);
1418 edge e
= split_block (cur_bb
, gsi_stmt (gsi
));
1420 /* Get a hold on the 'condition block', the 'then block' and the
1422 basic_block cond_bb
= e
->src
;
1423 basic_block fallthru_bb
= e
->dest
;
1424 basic_block then_bb
= create_empty_bb (cond_bb
);
1427 add_bb_to_loop (then_bb
, cond_bb
->loop_father
);
1428 loops_state_set (LOOPS_NEED_FIXUP
);
1431 /* Set up the newly created 'then block'. */
1432 e
= make_edge (cond_bb
, then_bb
, EDGE_TRUE_VALUE
);
1433 int fallthrough_probability
1434 = then_more_likely_p
1435 ? PROB_VERY_UNLIKELY
1436 : PROB_ALWAYS
- PROB_VERY_UNLIKELY
;
1437 e
->probability
= PROB_ALWAYS
- fallthrough_probability
;
1438 if (create_then_fallthru_edge
)
1439 make_single_succ_edge (then_bb
, fallthru_bb
, EDGE_FALLTHRU
);
1441 /* Set up the fallthrough basic block. */
1442 e
= find_edge (cond_bb
, fallthru_bb
);
1443 e
->flags
= EDGE_FALSE_VALUE
;
1444 e
->count
= cond_bb
->count
;
1445 e
->probability
= fallthrough_probability
;
1447 /* Update dominance info for the newly created then_bb; note that
1448 fallthru_bb's dominance info has already been updated by
1450 if (dom_info_available_p (CDI_DOMINATORS
))
1451 set_immediate_dominator (CDI_DOMINATORS
, then_bb
, cond_bb
);
1453 *then_block
= then_bb
;
1454 *fallthrough_block
= fallthru_bb
;
1455 *iter
= gsi_start_bb (fallthru_bb
);
1457 return gsi_last_bb (cond_bb
);
1460 /* Insert an if condition followed by a 'then block' right before the
1461 statement pointed to by ITER. The fallthrough block -- which is the
1462 else block of the condition as well as the destination of the
1463 outcoming edge of the 'then block' -- starts with the statement
1466 COND is the condition of the if.
1468 If THEN_MORE_LIKELY_P is true, the probability of the edge to the
1469 'then block' is higher than the probability of the edge to the
1472 Upon completion of the function, *THEN_BB is set to the newly
1473 inserted 'then block' and similarly, *FALLTHROUGH_BB is set to the
1476 *ITER is adjusted to still point to the same statement it was
1477 pointing to initially. */
1480 insert_if_then_before_iter (gimple cond
,
1481 gimple_stmt_iterator
*iter
,
1482 bool then_more_likely_p
,
1483 basic_block
*then_bb
,
1484 basic_block
*fallthrough_bb
)
1486 gimple_stmt_iterator cond_insert_point
=
1487 create_cond_insert_point (iter
,
1490 /*create_then_fallthru_edge=*/true,
1493 gsi_insert_after (&cond_insert_point
, cond
, GSI_NEW_STMT
);
1497 (base_addr >> ASAN_SHADOW_SHIFT) + targetm.asan_shadow_offset (). */
1500 build_shadow_mem_access (gimple_stmt_iterator
*gsi
, location_t location
,
1501 tree base_addr
, tree shadow_ptr_type
)
1503 tree t
, uintptr_type
= TREE_TYPE (base_addr
);
1504 tree shadow_type
= TREE_TYPE (shadow_ptr_type
);
1507 t
= build_int_cst (uintptr_type
, ASAN_SHADOW_SHIFT
);
1508 g
= gimple_build_assign_with_ops (RSHIFT_EXPR
,
1509 make_ssa_name (uintptr_type
, NULL
),
1511 gimple_set_location (g
, location
);
1512 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
1514 t
= build_int_cst (uintptr_type
, targetm
.asan_shadow_offset ());
1515 g
= gimple_build_assign_with_ops (PLUS_EXPR
,
1516 make_ssa_name (uintptr_type
, NULL
),
1517 gimple_assign_lhs (g
), t
);
1518 gimple_set_location (g
, location
);
1519 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
1521 g
= gimple_build_assign_with_ops (NOP_EXPR
,
1522 make_ssa_name (shadow_ptr_type
, NULL
),
1523 gimple_assign_lhs (g
), NULL_TREE
);
1524 gimple_set_location (g
, location
);
1525 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
1527 t
= build2 (MEM_REF
, shadow_type
, gimple_assign_lhs (g
),
1528 build_int_cst (shadow_ptr_type
, 0));
1529 g
= gimple_build_assign_with_ops (MEM_REF
,
1530 make_ssa_name (shadow_type
, NULL
),
1532 gimple_set_location (g
, location
);
1533 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
1534 return gimple_assign_lhs (g
);
1537 /* BASE can already be an SSA_NAME; in that case, do not create a
1538 new SSA_NAME for it. */
1541 maybe_create_ssa_name (location_t loc
, tree base
, gimple_stmt_iterator
*iter
,
1544 if (TREE_CODE (base
) == SSA_NAME
)
1547 = gimple_build_assign_with_ops (TREE_CODE (base
),
1548 make_ssa_name (TREE_TYPE (base
), NULL
),
1550 gimple_set_location (g
, loc
);
1552 gsi_insert_before (iter
, g
, GSI_SAME_STMT
);
1554 gsi_insert_after (iter
, g
, GSI_NEW_STMT
);
1555 return gimple_assign_lhs (g
);
1558 /* Instrument the memory access instruction using callbacks.
1559 Parameters are similar to BUILD_CHECK_STMT. */
1562 build_check_stmt_with_calls (location_t loc
, tree base
, tree len
,
1563 HOST_WIDE_INT size_in_bytes
, gimple_stmt_iterator
*iter
,
1564 bool before_p
, bool is_store
, bool is_scalar_access
)
1566 gimple_stmt_iterator gsi
= *iter
;
1567 tree base_ssa
= maybe_create_ssa_name (loc
, base
, &gsi
, before_p
);
1570 = gimple_build_assign_with_ops (NOP_EXPR
,
1571 make_ssa_name (pointer_sized_int_node
, NULL
),
1572 base_ssa
, NULL_TREE
);
1573 gimple_set_location (g
, loc
);
1575 gsi_insert_before (&gsi
, g
, GSI_NEW_STMT
);
1577 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
1578 tree base_addr
= gimple_assign_lhs (g
);
1582 = check_func (is_store
, is_scalar_access
? size_in_bytes
: -1, &nargs
);
1584 g
= gimple_build_call (fun
, 1, base_addr
);
1587 gcc_assert (nargs
== 2);
1588 g
= gimple_build_assign_with_ops (NOP_EXPR
,
1589 make_ssa_name (pointer_sized_int_node
,
1592 gimple_set_location (g
, loc
);
1593 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
1594 tree sz_arg
= gimple_assign_lhs (g
);
1595 g
= gimple_build_call (fun
, nargs
, base_addr
, sz_arg
);
1597 gimple_set_location (g
, loc
);
1598 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
1607 /* Instrument the memory access instruction BASE. Insert new
1608 statements before or after ITER.
1610 Note that the memory access represented by BASE can be either an
1611 SSA_NAME, or a non-SSA expression. LOCATION is the source code
1612 location. IS_STORE is TRUE for a store, FALSE for a load.
1613 BEFORE_P is TRUE for inserting the instrumentation code before
1614 ITER, FALSE for inserting it after ITER. IS_SCALAR_ACCESS is TRUE
1615 for a scalar memory access and FALSE for memory region access.
1616 NON_ZERO_P is TRUE if memory region is guaranteed to have non-zero
1617 length. ALIGN tells alignment of accessed memory object.
1619 START_INSTRUMENTED and END_INSTRUMENTED are TRUE if start/end of
1620 memory region have already been instrumented.
1622 If BEFORE_P is TRUE, *ITER is arranged to still point to the
1623 statement it was pointing to prior to calling this function,
1624 otherwise, it points to the statement logically following it. */
1627 build_check_stmt (location_t location
, tree base
, tree len
,
1628 HOST_WIDE_INT size_in_bytes
, gimple_stmt_iterator
*iter
,
1629 bool non_zero_len_p
, bool before_p
, bool is_store
,
1630 bool is_scalar_access
, unsigned int align
= 0,
1631 bool start_instrumented
= false,
1632 bool end_instrumented
= false)
1634 gimple_stmt_iterator gsi
= *iter
;
1637 = build_nonstandard_integer_type (TYPE_PRECISION (TREE_TYPE (base
)), 1);
1639 gcc_assert (!(size_in_bytes
> 0 && !non_zero_len_p
));
1641 if (start_instrumented
&& end_instrumented
)
1649 len
= unshare_expr (len
);
1652 gcc_assert (size_in_bytes
!= -1);
1653 len
= build_int_cst (pointer_sized_int_node
, size_in_bytes
);
1656 if (size_in_bytes
> 1)
1658 if ((size_in_bytes
& (size_in_bytes
- 1)) != 0
1659 || !is_scalar_access
1660 || size_in_bytes
> 16)
1662 else if (align
&& align
< size_in_bytes
* BITS_PER_UNIT
)
1664 /* On non-strict alignment targets, if
1665 16-byte access is just 8-byte aligned,
1666 this will result in misaligned shadow
1667 memory 2 byte load, but otherwise can
1668 be handled using one read. */
1669 if (size_in_bytes
!= 16
1671 || align
< 8 * BITS_PER_UNIT
)
1676 HOST_WIDE_INT real_size_in_bytes
= size_in_bytes
== -1 ? 1 : size_in_bytes
;
1678 tree shadow_ptr_type
= shadow_ptr_types
[real_size_in_bytes
== 16 ? 1 : 0];
1679 tree shadow_type
= TREE_TYPE (shadow_ptr_type
);
1681 base
= unshare_expr (base
);
1686 build_check_stmt_with_calls (location
, base
, len
, size_in_bytes
, iter
,
1687 before_p
, is_store
, is_scalar_access
);
1691 ++asan_num_accesses
;
1693 if (!non_zero_len_p
)
1695 gcc_assert (before_p
);
1697 /* So, the length of the memory area to asan-protect is
1698 non-constant. Let's guard the generated instrumentation code
1703 //asan instrumentation code goes here.
1705 // falltrough instructions, starting with *ITER. */
1707 g
= gimple_build_cond (NE_EXPR
,
1709 build_int_cst (TREE_TYPE (len
), 0),
1710 NULL_TREE
, NULL_TREE
);
1711 gimple_set_location (g
, location
);
1713 basic_block then_bb
, fallthrough_bb
;
1714 insert_if_then_before_iter (g
, iter
, /*then_more_likely_p=*/true,
1715 &then_bb
, &fallthrough_bb
);
1716 /* Note that fallthrough_bb starts with the statement that was
1717 pointed to by ITER. */
1719 /* The 'then block' of the 'if (len != 0) condition is where
1720 we'll generate the asan instrumentation code now. */
1721 gsi
= gsi_last_bb (then_bb
);
1722 build_check_stmt (location
, base
, len
, size_in_bytes
, &gsi
,
1723 /*non_zero_len_p*/true, /*before_p*/true, is_store
,
1724 is_scalar_access
, align
,
1725 start_instrumented
, end_instrumented
);
1729 /* Get an iterator on the point where we can add the condition
1730 statement for the instrumentation. */
1731 basic_block then_bb
, else_bb
;
1732 gsi
= create_cond_insert_point (&gsi
, before_p
,
1733 /*then_more_likely_p=*/false,
1734 /*create_then_fallthru_edge=*/false,
1738 tree base_ssa
= maybe_create_ssa_name (location
, base
, &gsi
,
1741 g
= gimple_build_assign_with_ops (NOP_EXPR
,
1742 make_ssa_name (uintptr_type
, NULL
),
1743 base_ssa
, NULL_TREE
);
1744 gimple_set_location (g
, location
);
1745 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
1746 tree base_addr
= gimple_assign_lhs (g
);
1749 if (real_size_in_bytes
>= 8)
1751 tree shadow
= build_shadow_mem_access (&gsi
, location
, base_addr
,
1757 /* Slow path for 1, 2 and 4 byte accesses. */
1759 if (!start_instrumented
)
1761 /* Test (shadow != 0)
1762 & ((base_addr & 7) + (real_size_in_bytes - 1)) >= shadow). */
1763 tree shadow
= build_shadow_mem_access (&gsi
, location
, base_addr
,
1765 gimple shadow_test
= build_assign (NE_EXPR
, shadow
, 0);
1766 gimple_seq seq
= NULL
;
1767 gimple_seq_add_stmt (&seq
, shadow_test
);
1768 gimple_seq_add_stmt (&seq
, build_assign (BIT_AND_EXPR
, base_addr
, 7));
1769 gimple_seq_add_stmt (&seq
, build_type_cast (shadow_type
,
1770 gimple_seq_last (seq
)));
1771 if (real_size_in_bytes
> 1)
1772 gimple_seq_add_stmt (&seq
,
1773 build_assign (PLUS_EXPR
, gimple_seq_last (seq
),
1774 real_size_in_bytes
- 1));
1775 gimple_seq_add_stmt (&seq
, build_assign (GE_EXPR
,
1776 gimple_seq_last (seq
),
1778 gimple_seq_add_stmt (&seq
, build_assign (BIT_AND_EXPR
, shadow_test
,
1779 gimple_seq_last (seq
)));
1780 t
= gimple_assign_lhs (gimple_seq_last (seq
));
1781 gimple_seq_set_location (seq
, location
);
1782 gsi_insert_seq_after (&gsi
, seq
, GSI_CONTINUE_LINKING
);
1785 /* For non-constant, misaligned or otherwise weird access sizes,
1786 check first and last byte. */
1787 if (size_in_bytes
== -1 && !end_instrumented
)
1789 g
= gimple_build_assign_with_ops (MINUS_EXPR
,
1790 make_ssa_name (uintptr_type
, NULL
),
1792 build_int_cst (uintptr_type
, 1));
1793 gimple_set_location (g
, location
);
1794 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
1795 tree last
= gimple_assign_lhs (g
);
1796 g
= gimple_build_assign_with_ops (PLUS_EXPR
,
1797 make_ssa_name (uintptr_type
, NULL
),
1800 gimple_set_location (g
, location
);
1801 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
1802 tree base_end_addr
= gimple_assign_lhs (g
);
1804 tree shadow
= build_shadow_mem_access (&gsi
, location
, base_end_addr
,
1806 gimple shadow_test
= build_assign (NE_EXPR
, shadow
, 0);
1807 gimple_seq seq
= NULL
;
1808 gimple_seq_add_stmt (&seq
, shadow_test
);
1809 gimple_seq_add_stmt (&seq
, build_assign (BIT_AND_EXPR
,
1811 gimple_seq_add_stmt (&seq
, build_type_cast (shadow_type
,
1812 gimple_seq_last (seq
)));
1813 gimple_seq_add_stmt (&seq
, build_assign (GE_EXPR
,
1814 gimple_seq_last (seq
),
1816 gimple_seq_add_stmt (&seq
, build_assign (BIT_AND_EXPR
, shadow_test
,
1817 gimple_seq_last (seq
)));
1818 if (!start_instrumented
)
1819 gimple_seq_add_stmt (&seq
, build_assign (BIT_IOR_EXPR
, t
,
1820 gimple_seq_last (seq
)));
1821 t
= gimple_assign_lhs (gimple_seq_last (seq
));
1822 gimple_seq_set_location (seq
, location
);
1823 gsi_insert_seq_after (&gsi
, seq
, GSI_CONTINUE_LINKING
);
1827 g
= gimple_build_cond (NE_EXPR
, t
, build_int_cst (TREE_TYPE (t
), 0),
1828 NULL_TREE
, NULL_TREE
);
1829 gimple_set_location (g
, location
);
1830 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
1832 /* Generate call to the run-time library (e.g. __asan_report_load8). */
1833 gsi
= gsi_start_bb (then_bb
);
1835 tree fun
= report_error_func (is_store
, is_scalar_access
? size_in_bytes
: -1,
1838 g
= gimple_build_call (fun
, 1, base_addr
);
1841 gcc_assert (nargs
== 2);
1842 g
= gimple_build_assign_with_ops (NOP_EXPR
,
1843 make_ssa_name (pointer_sized_int_node
,
1846 gimple_set_location (g
, location
);
1847 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
1848 tree sz_arg
= gimple_assign_lhs (g
);
1849 g
= gimple_build_call (fun
, nargs
, base_addr
, sz_arg
);
1851 gimple_set_location (g
, location
);
1852 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
1854 *iter
= gsi_start_bb (else_bb
);
1857 /* If T represents a memory access, add instrumentation code before ITER.
1858 LOCATION is source code location.
1859 IS_STORE is either TRUE (for a store) or FALSE (for a load). */
1862 instrument_derefs (gimple_stmt_iterator
*iter
, tree t
,
1863 location_t location
, bool is_store
)
1865 if (is_store
&& !ASAN_INSTRUMENT_WRITES
)
1867 if (!is_store
&& !ASAN_INSTRUMENT_READS
)
1871 HOST_WIDE_INT size_in_bytes
;
1873 type
= TREE_TYPE (t
);
1874 switch (TREE_CODE (t
))
1887 size_in_bytes
= int_size_in_bytes (type
);
1888 if (size_in_bytes
<= 0)
1891 HOST_WIDE_INT bitsize
, bitpos
;
1893 enum machine_mode mode
;
1894 int volatilep
= 0, unsignedp
= 0;
1895 tree inner
= get_inner_reference (t
, &bitsize
, &bitpos
, &offset
,
1896 &mode
, &unsignedp
, &volatilep
, false);
1897 if (((size_in_bytes
& (size_in_bytes
- 1)) == 0
1898 && (bitpos
% (size_in_bytes
* BITS_PER_UNIT
)))
1899 || bitsize
!= size_in_bytes
* BITS_PER_UNIT
)
1901 if (TREE_CODE (t
) == COMPONENT_REF
1902 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t
, 1)) != NULL_TREE
)
1904 tree repr
= DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t
, 1));
1905 instrument_derefs (iter
, build3 (COMPONENT_REF
, TREE_TYPE (repr
),
1906 TREE_OPERAND (t
, 0), repr
,
1907 NULL_TREE
), location
, is_store
);
1911 if (bitpos
% BITS_PER_UNIT
)
1914 if (TREE_CODE (inner
) == VAR_DECL
1915 && offset
== NULL_TREE
1917 && DECL_SIZE (inner
)
1918 && tree_fits_shwi_p (DECL_SIZE (inner
))
1919 && bitpos
+ bitsize
<= tree_to_shwi (DECL_SIZE (inner
)))
1921 if (DECL_THREAD_LOCAL_P (inner
))
1923 if (!TREE_STATIC (inner
))
1925 /* Automatic vars in the current function will be always
1927 if (decl_function_context (inner
) == current_function_decl
)
1930 /* Always instrument external vars, they might be dynamically
1932 else if (!DECL_EXTERNAL (inner
))
1934 /* For static vars if they are known not to be dynamically
1935 initialized, they will be always accessible. */
1936 varpool_node
*vnode
= varpool_node::get (inner
);
1937 if (vnode
&& !vnode
->dynamically_initialized
)
1942 base
= build_fold_addr_expr (t
);
1943 if (!has_mem_ref_been_instrumented (base
, size_in_bytes
))
1945 unsigned int align
= get_object_alignment (t
);
1946 build_check_stmt (location
, base
, NULL_TREE
, size_in_bytes
, iter
,
1947 /*non_zero_len_p*/size_in_bytes
> 0, /*before_p=*/true,
1948 is_store
, /*is_scalar_access*/true, align
);
1949 update_mem_ref_hash_table (base
, size_in_bytes
);
1950 update_mem_ref_hash_table (t
, size_in_bytes
);
1955 /* Instrument an access to a contiguous memory region that starts at
1956 the address pointed to by BASE, over a length of LEN (expressed in
1957 the sizeof (*BASE) bytes). ITER points to the instruction before
1958 which the instrumentation instructions must be inserted. LOCATION
1959 is the source location that the instrumentation instructions must
1960 have. If IS_STORE is true, then the memory access is a store;
1961 otherwise, it's a load. */
1964 instrument_mem_region_access (tree base
, tree len
,
1965 gimple_stmt_iterator
*iter
,
1966 location_t location
, bool is_store
)
1968 if (!POINTER_TYPE_P (TREE_TYPE (base
))
1969 || !INTEGRAL_TYPE_P (TREE_TYPE (len
))
1970 || integer_zerop (len
))
1973 /* If the beginning of the memory region has already been
1974 instrumented, do not instrument it. */
1975 bool start_instrumented
= has_mem_ref_been_instrumented (base
, 1);
1977 /* If the end of the memory region has already been instrumented, do
1978 not instrument it. */
1979 tree end
= asan_mem_ref_get_end (base
, len
);
1980 bool end_instrumented
= has_mem_ref_been_instrumented (end
, 1);
1982 HOST_WIDE_INT size_in_bytes
= tree_fits_shwi_p (len
) ? tree_to_shwi (len
) : -1;
1984 build_check_stmt (location
, base
, len
, size_in_bytes
, iter
,
1985 /*non_zero_len_p*/size_in_bytes
> 0, /*before_p*/true,
1986 is_store
, /*is_scalar_access*/false, /*align*/0,
1987 start_instrumented
, end_instrumented
);
1989 update_mem_ref_hash_table (base
, 1);
1990 if (size_in_bytes
!= -1)
1991 update_mem_ref_hash_table (end
, 1);
1993 *iter
= gsi_for_stmt (gsi_stmt (*iter
));
1996 /* Instrument the call (to the builtin strlen function) pointed to by
1999 This function instruments the access to the first byte of the
2000 argument, right before the call. After the call it instruments the
2001 access to the last byte of the argument; it uses the result of the
2002 call to deduce the offset of that last byte.
2004 Upon completion, iff the call has actually been instrumented, this
2005 function returns TRUE and *ITER points to the statement logically
2006 following the built-in strlen function call *ITER was initially
2007 pointing to. Otherwise, the function returns FALSE and *ITER
2008 remains unchanged. */
2011 instrument_strlen_call (gimple_stmt_iterator
*iter
)
2013 gimple call
= gsi_stmt (*iter
);
2014 gcc_assert (is_gimple_call (call
));
2016 tree callee
= gimple_call_fndecl (call
);
2017 gcc_assert (is_builtin_fn (callee
)
2018 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
2019 && DECL_FUNCTION_CODE (callee
) == BUILT_IN_STRLEN
);
2021 tree len
= gimple_call_lhs (call
);
2023 /* Some passes might clear the return value of the strlen call;
2024 bail out in that case. Return FALSE as we are not advancing
2027 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (len
)));
2029 location_t loc
= gimple_location (call
);
2030 tree str_arg
= gimple_call_arg (call
, 0);
2031 bool start_instrumented
= has_mem_ref_been_instrumented (str_arg
, 1);
2033 tree cptr_type
= build_pointer_type (char_type_node
);
2034 gimple str_arg_ssa
=
2035 gimple_build_assign_with_ops (NOP_EXPR
,
2036 make_ssa_name (cptr_type
, NULL
),
2038 gimple_set_location (str_arg_ssa
, loc
);
2039 gsi_insert_before (iter
, str_arg_ssa
, GSI_SAME_STMT
);
2041 build_check_stmt (loc
, gimple_assign_lhs (str_arg_ssa
), NULL_TREE
, 1, iter
,
2042 /*non_zero_len_p*/true, /*before_p=*/true,
2043 /*is_store=*/false, /*is_scalar_access*/true, /*align*/0,
2044 start_instrumented
, start_instrumented
);
2047 gimple_build_assign_with_ops (POINTER_PLUS_EXPR
,
2048 make_ssa_name (cptr_type
, NULL
),
2049 gimple_assign_lhs (str_arg_ssa
),
2051 gimple_set_location (g
, loc
);
2052 gsi_insert_after (iter
, g
, GSI_NEW_STMT
);
2054 build_check_stmt (loc
, gimple_assign_lhs (g
), NULL_TREE
, 1, iter
,
2055 /*non_zero_len_p*/true, /*before_p=*/false,
2056 /*is_store=*/false, /*is_scalar_access*/true, /*align*/0);
2061 /* Instrument the call to a built-in memory access function that is
2062 pointed to by the iterator ITER.
2064 Upon completion, return TRUE iff *ITER has been advanced to the
2065 statement following the one it was originally pointing to. */
2068 instrument_builtin_call (gimple_stmt_iterator
*iter
)
2070 if (!ASAN_MEMINTRIN
)
2073 bool iter_advanced_p
= false;
2074 gimple call
= gsi_stmt (*iter
);
2076 gcc_checking_assert (gimple_call_builtin_p (call
, BUILT_IN_NORMAL
));
2078 tree callee
= gimple_call_fndecl (call
);
2079 location_t loc
= gimple_location (call
);
2081 if (DECL_FUNCTION_CODE (callee
) == BUILT_IN_STRLEN
)
2082 iter_advanced_p
= instrument_strlen_call (iter
);
2085 asan_mem_ref src0
, src1
, dest
;
2086 asan_mem_ref_init (&src0
, NULL
, 1);
2087 asan_mem_ref_init (&src1
, NULL
, 1);
2088 asan_mem_ref_init (&dest
, NULL
, 1);
2090 tree src0_len
= NULL_TREE
, src1_len
= NULL_TREE
, dest_len
= NULL_TREE
;
2091 bool src0_is_store
= false, src1_is_store
= false,
2092 dest_is_store
= false, dest_is_deref
= false;
2094 if (get_mem_refs_of_builtin_call (call
,
2095 &src0
, &src0_len
, &src0_is_store
,
2096 &src1
, &src1_len
, &src1_is_store
,
2097 &dest
, &dest_len
, &dest_is_store
,
2102 instrument_derefs (iter
, dest
.start
, loc
, dest_is_store
);
2104 iter_advanced_p
= true;
2106 else if (src0_len
|| src1_len
|| dest_len
)
2108 if (src0
.start
!= NULL_TREE
)
2109 instrument_mem_region_access (src0
.start
, src0_len
,
2110 iter
, loc
, /*is_store=*/false);
2111 if (src1
.start
!= NULL_TREE
)
2112 instrument_mem_region_access (src1
.start
, src1_len
,
2113 iter
, loc
, /*is_store=*/false);
2114 if (dest
.start
!= NULL_TREE
)
2115 instrument_mem_region_access (dest
.start
, dest_len
,
2116 iter
, loc
, /*is_store=*/true);
2117 *iter
= gsi_for_stmt (call
);
2119 iter_advanced_p
= true;
2123 return iter_advanced_p
;
2126 /* Instrument the assignment statement ITER if it is subject to
2127 instrumentation. Return TRUE iff instrumentation actually
2128 happened. In that case, the iterator ITER is advanced to the next
2129 logical expression following the one initially pointed to by ITER,
2130 and the relevant memory reference that which access has been
2131 instrumented is added to the memory references hash table. */
2134 maybe_instrument_assignment (gimple_stmt_iterator
*iter
)
2136 gimple s
= gsi_stmt (*iter
);
2138 gcc_assert (gimple_assign_single_p (s
));
2140 tree ref_expr
= NULL_TREE
;
2141 bool is_store
, is_instrumented
= false;
2143 if (gimple_store_p (s
))
2145 ref_expr
= gimple_assign_lhs (s
);
2147 instrument_derefs (iter
, ref_expr
,
2148 gimple_location (s
),
2150 is_instrumented
= true;
2153 if (gimple_assign_load_p (s
))
2155 ref_expr
= gimple_assign_rhs1 (s
);
2157 instrument_derefs (iter
, ref_expr
,
2158 gimple_location (s
),
2160 is_instrumented
= true;
2163 if (is_instrumented
)
2166 return is_instrumented
;
2169 /* Instrument the function call pointed to by the iterator ITER, if it
2170 is subject to instrumentation. At the moment, the only function
2171 calls that are instrumented are some built-in functions that access
2172 memory. Look at instrument_builtin_call to learn more.
2174 Upon completion return TRUE iff *ITER was advanced to the statement
2175 following the one it was originally pointing to. */
2178 maybe_instrument_call (gimple_stmt_iterator
*iter
)
2180 gimple stmt
= gsi_stmt (*iter
);
2181 bool is_builtin
= gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
);
2183 if (is_builtin
&& instrument_builtin_call (iter
))
2186 if (gimple_call_noreturn_p (stmt
))
2190 tree callee
= gimple_call_fndecl (stmt
);
2191 switch (DECL_FUNCTION_CODE (callee
))
2193 case BUILT_IN_UNREACHABLE
:
2195 /* Don't instrument these. */
2199 tree decl
= builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN
);
2200 gimple g
= gimple_build_call (decl
, 0);
2201 gimple_set_location (g
, gimple_location (stmt
));
2202 gsi_insert_before (iter
, g
, GSI_SAME_STMT
);
2207 /* Walk each instruction of all basic block and instrument those that
2208 represent memory references: loads, stores, or function calls.
2209 In a given basic block, this function avoids instrumenting memory
2210 references that have already been instrumented. */
2213 transform_statements (void)
2215 basic_block bb
, last_bb
= NULL
;
2216 gimple_stmt_iterator i
;
2217 int saved_last_basic_block
= last_basic_block_for_fn (cfun
);
2219 FOR_EACH_BB_FN (bb
, cfun
)
2221 basic_block prev_bb
= bb
;
2223 if (bb
->index
>= saved_last_basic_block
) continue;
2225 /* Flush the mem ref hash table, if current bb doesn't have
2226 exactly one predecessor, or if that predecessor (skipping
2227 over asan created basic blocks) isn't the last processed
2228 basic block. Thus we effectively flush on extended basic
2229 block boundaries. */
2230 while (single_pred_p (prev_bb
))
2232 prev_bb
= single_pred (prev_bb
);
2233 if (prev_bb
->index
< saved_last_basic_block
)
2236 if (prev_bb
!= last_bb
)
2237 empty_mem_ref_hash_table ();
2240 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
);)
2242 gimple s
= gsi_stmt (i
);
2244 if (has_stmt_been_instrumented_p (s
))
2246 else if (gimple_assign_single_p (s
)
2247 && maybe_instrument_assignment (&i
))
2248 /* Nothing to do as maybe_instrument_assignment advanced
2250 else if (is_gimple_call (s
) && maybe_instrument_call (&i
))
2251 /* Nothing to do as maybe_instrument_call
2252 advanced the iterator I. */;
2255 /* No instrumentation happened.
2257 If the current instruction is a function call that
2258 might free something, let's forget about the memory
2259 references that got instrumented. Otherwise we might
2260 miss some instrumentation opportunities. */
2261 if (is_gimple_call (s
) && !nonfreeing_call_p (s
))
2262 empty_mem_ref_hash_table ();
2268 free_mem_ref_resources ();
2272 __asan_before_dynamic_init (module_name)
2274 __asan_after_dynamic_init ()
2278 asan_dynamic_init_call (bool after_p
)
2280 tree fn
= builtin_decl_implicit (after_p
2281 ? BUILT_IN_ASAN_AFTER_DYNAMIC_INIT
2282 : BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT
);
2283 tree module_name_cst
= NULL_TREE
;
2286 pretty_printer module_name_pp
;
2287 pp_string (&module_name_pp
, main_input_filename
);
2289 if (shadow_ptr_types
[0] == NULL_TREE
)
2290 asan_init_shadow_ptr_types ();
2291 module_name_cst
= asan_pp_string (&module_name_pp
);
2292 module_name_cst
= fold_convert (const_ptr_type_node
,
2296 return build_call_expr (fn
, after_p
? 0 : 1, module_name_cst
);
2300 struct __asan_global
2304 uptr __size_with_redzone;
2306 const void *__module_name;
2307 uptr __has_dynamic_init;
2311 asan_global_struct (void)
2313 static const char *field_names
[6]
2314 = { "__beg", "__size", "__size_with_redzone",
2315 "__name", "__module_name", "__has_dynamic_init" };
2316 tree fields
[6], ret
;
2319 ret
= make_node (RECORD_TYPE
);
2320 for (i
= 0; i
< 6; i
++)
2323 = build_decl (UNKNOWN_LOCATION
, FIELD_DECL
,
2324 get_identifier (field_names
[i
]),
2325 (i
== 0 || i
== 3) ? const_ptr_type_node
2326 : pointer_sized_int_node
);
2327 DECL_CONTEXT (fields
[i
]) = ret
;
2329 DECL_CHAIN (fields
[i
- 1]) = fields
[i
];
2331 TYPE_FIELDS (ret
) = fields
[0];
2332 TYPE_NAME (ret
) = get_identifier ("__asan_global");
2337 /* Append description of a single global DECL into vector V.
2338 TYPE is __asan_global struct type as returned by asan_global_struct. */
2341 asan_add_global (tree decl
, tree type
, vec
<constructor_elt
, va_gc
> *v
)
2343 tree init
, uptr
= TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type
)));
2344 unsigned HOST_WIDE_INT size
;
2345 tree str_cst
, module_name_cst
, refdecl
= decl
;
2346 vec
<constructor_elt
, va_gc
> *vinner
= NULL
;
2348 pretty_printer asan_pp
, module_name_pp
;
2350 if (DECL_NAME (decl
))
2351 pp_tree_identifier (&asan_pp
, DECL_NAME (decl
));
2353 pp_string (&asan_pp
, "<unknown>");
2354 str_cst
= asan_pp_string (&asan_pp
);
2356 pp_string (&module_name_pp
, main_input_filename
);
2357 module_name_cst
= asan_pp_string (&module_name_pp
);
2359 if (asan_needs_local_alias (decl
))
2362 ASM_GENERATE_INTERNAL_LABEL (buf
, "LASAN", vec_safe_length (v
) + 1);
2363 refdecl
= build_decl (DECL_SOURCE_LOCATION (decl
),
2364 VAR_DECL
, get_identifier (buf
), TREE_TYPE (decl
));
2365 TREE_ADDRESSABLE (refdecl
) = TREE_ADDRESSABLE (decl
);
2366 TREE_READONLY (refdecl
) = TREE_READONLY (decl
);
2367 TREE_THIS_VOLATILE (refdecl
) = TREE_THIS_VOLATILE (decl
);
2368 DECL_GIMPLE_REG_P (refdecl
) = DECL_GIMPLE_REG_P (decl
);
2369 DECL_ARTIFICIAL (refdecl
) = DECL_ARTIFICIAL (decl
);
2370 DECL_IGNORED_P (refdecl
) = DECL_IGNORED_P (decl
);
2371 TREE_STATIC (refdecl
) = 1;
2372 TREE_PUBLIC (refdecl
) = 0;
2373 TREE_USED (refdecl
) = 1;
2374 assemble_alias (refdecl
, DECL_ASSEMBLER_NAME (decl
));
2377 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
,
2378 fold_convert (const_ptr_type_node
,
2379 build_fold_addr_expr (refdecl
)));
2380 size
= tree_to_uhwi (DECL_SIZE_UNIT (decl
));
2381 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
, build_int_cst (uptr
, size
));
2382 size
+= asan_red_zone_size (size
);
2383 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
, build_int_cst (uptr
, size
));
2384 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
,
2385 fold_convert (const_ptr_type_node
, str_cst
));
2386 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
,
2387 fold_convert (const_ptr_type_node
, module_name_cst
));
2388 varpool_node
*vnode
= varpool_node::get (decl
);
2389 int has_dynamic_init
= vnode
? vnode
->dynamically_initialized
: 0;
2390 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
,
2391 build_int_cst (uptr
, has_dynamic_init
));
2392 init
= build_constructor (type
, vinner
);
2393 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, init
);
2396 /* Initialize sanitizer.def builtins if the FE hasn't initialized them. */
2398 initialize_sanitizer_builtins (void)
2402 if (builtin_decl_implicit_p (BUILT_IN_ASAN_INIT
))
2405 tree BT_FN_VOID
= build_function_type_list (void_type_node
, NULL_TREE
);
2407 = build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
2408 tree BT_FN_VOID_CONST_PTR
2409 = build_function_type_list (void_type_node
, const_ptr_type_node
, NULL_TREE
);
2410 tree BT_FN_VOID_PTR_PTR
2411 = build_function_type_list (void_type_node
, ptr_type_node
,
2412 ptr_type_node
, NULL_TREE
);
2413 tree BT_FN_VOID_PTR_PTR_PTR
2414 = build_function_type_list (void_type_node
, ptr_type_node
,
2415 ptr_type_node
, ptr_type_node
, NULL_TREE
);
2416 tree BT_FN_VOID_PTR_PTRMODE
2417 = build_function_type_list (void_type_node
, ptr_type_node
,
2418 pointer_sized_int_node
, NULL_TREE
);
2420 = build_function_type_list (void_type_node
, integer_type_node
, NULL_TREE
);
2421 tree BT_FN_BOOL_VPTR_PTR_IX_INT_INT
[5];
2422 tree BT_FN_IX_CONST_VPTR_INT
[5];
2423 tree BT_FN_IX_VPTR_IX_INT
[5];
2424 tree BT_FN_VOID_VPTR_IX_INT
[5];
2426 = build_pointer_type (build_qualified_type (void_type_node
,
2427 TYPE_QUAL_VOLATILE
));
2429 = build_pointer_type (build_qualified_type (void_type_node
,
2433 = lang_hooks
.types
.type_for_size (BOOL_TYPE_SIZE
, 1);
2435 for (i
= 0; i
< 5; i
++)
2437 tree ix
= build_nonstandard_integer_type (BITS_PER_UNIT
* (1 << i
), 1);
2438 BT_FN_BOOL_VPTR_PTR_IX_INT_INT
[i
]
2439 = build_function_type_list (boolt
, vptr
, ptr_type_node
, ix
,
2440 integer_type_node
, integer_type_node
,
2442 BT_FN_IX_CONST_VPTR_INT
[i
]
2443 = build_function_type_list (ix
, cvptr
, integer_type_node
, NULL_TREE
);
2444 BT_FN_IX_VPTR_IX_INT
[i
]
2445 = build_function_type_list (ix
, vptr
, ix
, integer_type_node
,
2447 BT_FN_VOID_VPTR_IX_INT
[i
]
2448 = build_function_type_list (void_type_node
, vptr
, ix
,
2449 integer_type_node
, NULL_TREE
);
2451 #define BT_FN_BOOL_VPTR_PTR_I1_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[0]
2452 #define BT_FN_I1_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[0]
2453 #define BT_FN_I1_VPTR_I1_INT BT_FN_IX_VPTR_IX_INT[0]
2454 #define BT_FN_VOID_VPTR_I1_INT BT_FN_VOID_VPTR_IX_INT[0]
2455 #define BT_FN_BOOL_VPTR_PTR_I2_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[1]
2456 #define BT_FN_I2_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[1]
2457 #define BT_FN_I2_VPTR_I2_INT BT_FN_IX_VPTR_IX_INT[1]
2458 #define BT_FN_VOID_VPTR_I2_INT BT_FN_VOID_VPTR_IX_INT[1]
2459 #define BT_FN_BOOL_VPTR_PTR_I4_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[2]
2460 #define BT_FN_I4_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[2]
2461 #define BT_FN_I4_VPTR_I4_INT BT_FN_IX_VPTR_IX_INT[2]
2462 #define BT_FN_VOID_VPTR_I4_INT BT_FN_VOID_VPTR_IX_INT[2]
2463 #define BT_FN_BOOL_VPTR_PTR_I8_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[3]
2464 #define BT_FN_I8_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[3]
2465 #define BT_FN_I8_VPTR_I8_INT BT_FN_IX_VPTR_IX_INT[3]
2466 #define BT_FN_VOID_VPTR_I8_INT BT_FN_VOID_VPTR_IX_INT[3]
2467 #define BT_FN_BOOL_VPTR_PTR_I16_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[4]
2468 #define BT_FN_I16_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[4]
2469 #define BT_FN_I16_VPTR_I16_INT BT_FN_IX_VPTR_IX_INT[4]
2470 #define BT_FN_VOID_VPTR_I16_INT BT_FN_VOID_VPTR_IX_INT[4]
2471 #undef ATTR_NOTHROW_LEAF_LIST
2472 #define ATTR_NOTHROW_LEAF_LIST ECF_NOTHROW | ECF_LEAF
2473 #undef ATTR_TMPURE_NOTHROW_LEAF_LIST
2474 #define ATTR_TMPURE_NOTHROW_LEAF_LIST ECF_TM_PURE | ATTR_NOTHROW_LEAF_LIST
2475 #undef ATTR_NORETURN_NOTHROW_LEAF_LIST
2476 #define ATTR_NORETURN_NOTHROW_LEAF_LIST ECF_NORETURN | ATTR_NOTHROW_LEAF_LIST
2477 #undef ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST
2478 #define ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST \
2479 ECF_TM_PURE | ATTR_NORETURN_NOTHROW_LEAF_LIST
2480 #undef ATTR_COLD_NOTHROW_LEAF_LIST
2481 #define ATTR_COLD_NOTHROW_LEAF_LIST \
2482 /* ECF_COLD missing */ ATTR_NOTHROW_LEAF_LIST
2483 #undef ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST
2484 #define ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST \
2485 /* ECF_COLD missing */ ATTR_NORETURN_NOTHROW_LEAF_LIST
2486 #undef DEF_SANITIZER_BUILTIN
2487 #define DEF_SANITIZER_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
2488 decl = add_builtin_function ("__builtin_" NAME, TYPE, ENUM, \
2489 BUILT_IN_NORMAL, NAME, NULL_TREE); \
2490 set_call_expr_flags (decl, ATTRS); \
2491 set_builtin_decl (ENUM, decl, true);
2493 #include "sanitizer.def"
2495 #undef DEF_SANITIZER_BUILTIN
2498 /* Called via htab_traverse. Count number of emitted
2499 STRING_CSTs in the constant hash table. */
2502 count_string_csts (void **slot
, void *data
)
2504 struct constant_descriptor_tree
*desc
2505 = (struct constant_descriptor_tree
*) *slot
;
2506 if (TREE_CODE (desc
->value
) == STRING_CST
2507 && TREE_ASM_WRITTEN (desc
->value
)
2508 && asan_protect_global (desc
->value
))
2509 ++*((unsigned HOST_WIDE_INT
*) data
);
2513 /* Helper structure to pass two parameters to
2516 struct asan_add_string_csts_data
2519 vec
<constructor_elt
, va_gc
> *v
;
2522 /* Called via htab_traverse. Call asan_add_global
2523 on emitted STRING_CSTs from the constant hash table. */
2526 add_string_csts (void **slot
, void *data
)
2528 struct constant_descriptor_tree
*desc
2529 = (struct constant_descriptor_tree
*) *slot
;
2530 if (TREE_CODE (desc
->value
) == STRING_CST
2531 && TREE_ASM_WRITTEN (desc
->value
)
2532 && asan_protect_global (desc
->value
))
2534 struct asan_add_string_csts_data
*aascd
2535 = (struct asan_add_string_csts_data
*) data
;
2536 asan_add_global (SYMBOL_REF_DECL (XEXP (desc
->rtl
, 0)),
2537 aascd
->type
, aascd
->v
);
2542 /* Needs to be GTY(()), because cgraph_build_static_cdtor may
2543 invoke ggc_collect. */
2544 static GTY(()) tree asan_ctor_statements
;
2546 /* Module-level instrumentation.
2547 - Insert __asan_init_vN() into the list of CTORs.
2548 - TODO: insert redzones around globals.
2552 asan_finish_file (void)
2554 varpool_node
*vnode
;
2555 unsigned HOST_WIDE_INT gcount
= 0;
2557 if (shadow_ptr_types
[0] == NULL_TREE
)
2558 asan_init_shadow_ptr_types ();
2559 /* Avoid instrumenting code in the asan ctors/dtors.
2560 We don't need to insert padding after the description strings,
2561 nor after .LASAN* array. */
2562 flag_sanitize
&= ~SANITIZE_ADDRESS
;
2564 tree fn
= builtin_decl_implicit (BUILT_IN_ASAN_INIT
);
2565 append_to_statement_list (build_call_expr (fn
, 0), &asan_ctor_statements
);
2566 FOR_EACH_DEFINED_VARIABLE (vnode
)
2567 if (TREE_ASM_WRITTEN (vnode
->decl
)
2568 && asan_protect_global (vnode
->decl
))
2570 htab_t const_desc_htab
= constant_pool_htab ();
2571 htab_traverse (const_desc_htab
, count_string_csts
, &gcount
);
2574 tree type
= asan_global_struct (), var
, ctor
;
2575 tree dtor_statements
= NULL_TREE
;
2576 vec
<constructor_elt
, va_gc
> *v
;
2579 type
= build_array_type_nelts (type
, gcount
);
2580 ASM_GENERATE_INTERNAL_LABEL (buf
, "LASAN", 0);
2581 var
= build_decl (UNKNOWN_LOCATION
, VAR_DECL
, get_identifier (buf
),
2583 TREE_STATIC (var
) = 1;
2584 TREE_PUBLIC (var
) = 0;
2585 DECL_ARTIFICIAL (var
) = 1;
2586 DECL_IGNORED_P (var
) = 1;
2587 vec_alloc (v
, gcount
);
2588 FOR_EACH_DEFINED_VARIABLE (vnode
)
2589 if (TREE_ASM_WRITTEN (vnode
->decl
)
2590 && asan_protect_global (vnode
->decl
))
2591 asan_add_global (vnode
->decl
, TREE_TYPE (type
), v
);
2592 struct asan_add_string_csts_data aascd
;
2593 aascd
.type
= TREE_TYPE (type
);
2595 htab_traverse (const_desc_htab
, add_string_csts
, &aascd
);
2596 ctor
= build_constructor (type
, v
);
2597 TREE_CONSTANT (ctor
) = 1;
2598 TREE_STATIC (ctor
) = 1;
2599 DECL_INITIAL (var
) = ctor
;
2600 varpool_node::finalize_decl (var
);
2602 fn
= builtin_decl_implicit (BUILT_IN_ASAN_REGISTER_GLOBALS
);
2603 tree gcount_tree
= build_int_cst (pointer_sized_int_node
, gcount
);
2604 append_to_statement_list (build_call_expr (fn
, 2,
2605 build_fold_addr_expr (var
),
2607 &asan_ctor_statements
);
2609 fn
= builtin_decl_implicit (BUILT_IN_ASAN_UNREGISTER_GLOBALS
);
2610 append_to_statement_list (build_call_expr (fn
, 2,
2611 build_fold_addr_expr (var
),
2614 cgraph_build_static_cdtor ('D', dtor_statements
,
2615 MAX_RESERVED_INIT_PRIORITY
- 1);
2617 cgraph_build_static_cdtor ('I', asan_ctor_statements
,
2618 MAX_RESERVED_INIT_PRIORITY
- 1);
2619 flag_sanitize
|= SANITIZE_ADDRESS
;
2622 /* Instrument the current function. */
2625 asan_instrument (void)
2627 if (shadow_ptr_types
[0] == NULL_TREE
)
2628 asan_init_shadow_ptr_types ();
2629 asan_num_accesses
= 0;
2630 transform_statements ();
2637 return (flag_sanitize
& SANITIZE_ADDRESS
) != 0
2638 && !lookup_attribute ("no_sanitize_address",
2639 DECL_ATTRIBUTES (current_function_decl
));
2644 const pass_data pass_data_asan
=
2646 GIMPLE_PASS
, /* type */
2648 OPTGROUP_NONE
, /* optinfo_flags */
2649 TV_NONE
, /* tv_id */
2650 ( PROP_ssa
| PROP_cfg
| PROP_gimple_leh
), /* properties_required */
2651 0, /* properties_provided */
2652 0, /* properties_destroyed */
2653 0, /* todo_flags_start */
2654 TODO_update_ssa
, /* todo_flags_finish */
2657 class pass_asan
: public gimple_opt_pass
2660 pass_asan (gcc::context
*ctxt
)
2661 : gimple_opt_pass (pass_data_asan
, ctxt
)
2664 /* opt_pass methods: */
2665 opt_pass
* clone () { return new pass_asan (m_ctxt
); }
2666 virtual bool gate (function
*) { return gate_asan (); }
2667 virtual unsigned int execute (function
*) { return asan_instrument (); }
2669 }; // class pass_asan
2674 make_pass_asan (gcc::context
*ctxt
)
2676 return new pass_asan (ctxt
);
2681 const pass_data pass_data_asan_O0
=
2683 GIMPLE_PASS
, /* type */
2685 OPTGROUP_NONE
, /* optinfo_flags */
2686 TV_NONE
, /* tv_id */
2687 ( PROP_ssa
| PROP_cfg
| PROP_gimple_leh
), /* properties_required */
2688 0, /* properties_provided */
2689 0, /* properties_destroyed */
2690 0, /* todo_flags_start */
2691 TODO_update_ssa
, /* todo_flags_finish */
2694 class pass_asan_O0
: public gimple_opt_pass
2697 pass_asan_O0 (gcc::context
*ctxt
)
2698 : gimple_opt_pass (pass_data_asan_O0
, ctxt
)
2701 /* opt_pass methods: */
2702 virtual bool gate (function
*) { return !optimize
&& gate_asan (); }
2703 virtual unsigned int execute (function
*) { return asan_instrument (); }
2705 }; // class pass_asan_O0
2710 make_pass_asan_O0 (gcc::context
*ctxt
)
2712 return new pass_asan_O0 (ctxt
);
2715 /* Perform optimization of sanitize functions. */
2719 const pass_data pass_data_sanopt
=
2721 GIMPLE_PASS
, /* type */
2722 "sanopt", /* name */
2723 OPTGROUP_NONE
, /* optinfo_flags */
2724 TV_NONE
, /* tv_id */
2725 ( PROP_ssa
| PROP_cfg
| PROP_gimple_leh
), /* properties_required */
2726 0, /* properties_provided */
2727 0, /* properties_destroyed */
2728 0, /* todo_flags_start */
2729 TODO_update_ssa
, /* todo_flags_finish */
2732 class pass_sanopt
: public gimple_opt_pass
2735 pass_sanopt (gcc::context
*ctxt
)
2736 : gimple_opt_pass (pass_data_sanopt
, ctxt
)
2739 /* opt_pass methods: */
2740 virtual bool gate (function
*) { return flag_sanitize
; }
2741 virtual unsigned int execute (function
*);
2743 }; // class pass_sanopt
2746 pass_sanopt::execute (function
*fun
)
2750 FOR_EACH_BB_FN (bb
, fun
)
2752 gimple_stmt_iterator gsi
;
2753 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); )
2755 gimple stmt
= gsi_stmt (gsi
);
2756 bool no_next
= false;
2758 if (!is_gimple_call (stmt
))
2764 if (gimple_call_internal_p (stmt
))
2765 switch (gimple_call_internal_fn (stmt
))
2767 case IFN_UBSAN_NULL
:
2768 no_next
= ubsan_expand_null_ifn (&gsi
);
2770 case IFN_UBSAN_BOUNDS
:
2771 no_next
= ubsan_expand_bounds_ifn (&gsi
);
2777 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2779 fprintf (dump_file
, "Optimized\n ");
2780 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
2781 fprintf (dump_file
, "\n");
2794 make_pass_sanopt (gcc::context
*ctxt
)
2796 return new pass_sanopt (ctxt
);
2799 #include "gt-asan.h"