1 /* AddressSanitizer, a fast memory error detector.
2 Copyright (C) 2012-2013 Free Software Foundation, Inc.
3 Contributed by Kostya Serebryany <kcc@google.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
26 #include "hash-table.h"
27 #include "basic-block.h"
28 #include "tree-ssa-alias.h"
29 #include "internal-fn.h"
30 #include "gimple-expr.h"
34 #include "gimple-iterator.h"
37 #include "stor-layout.h"
38 #include "tree-iterator.h"
40 #include "stringpool.h"
41 #include "tree-ssanames.h"
42 #include "tree-pass.h"
44 #include "gimple-pretty-print.h"
50 #include "langhooks.h"
51 #include "alloc-pool.h"
53 #include "gimple-builder.h"
57 /* AddressSanitizer finds out-of-bounds and use-after-free bugs
58 with <2x slowdown on average.
60 The tool consists of two parts:
61 instrumentation module (this file) and a run-time library.
62 The instrumentation module adds a run-time check before every memory insn.
63 For a 8- or 16- byte load accessing address X:
64 ShadowAddr = (X >> 3) + Offset
65 ShadowValue = *(char*)ShadowAddr; // *(short*) for 16-byte access.
67 __asan_report_load8(X);
68 For a load of N bytes (N=1, 2 or 4) from address X:
69 ShadowAddr = (X >> 3) + Offset
70 ShadowValue = *(char*)ShadowAddr;
72 if ((X & 7) + N - 1 > ShadowValue)
73 __asan_report_loadN(X);
74 Stores are instrumented similarly, but using __asan_report_storeN functions.
75 A call too __asan_init_vN() is inserted to the list of module CTORs.
76 N is the version number of the AddressSanitizer API. The changes between the
77 API versions are listed in libsanitizer/asan/asan_interface_internal.h.
79 The run-time library redefines malloc (so that redzone are inserted around
80 the allocated memory) and free (so that reuse of free-ed memory is delayed),
81 provides __asan_report* and __asan_init_vN functions.
84 http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm
86 The current implementation supports detection of out-of-bounds and
87 use-after-free in the heap, on the stack and for global variables.
89 [Protection of stack variables]
91 To understand how detection of out-of-bounds and use-after-free works
92 for stack variables, lets look at this example on x86_64 where the
107 For this function, the stack protected by asan will be organized as
108 follows, from the top of the stack to the bottom:
110 Slot 1/ [red zone of 32 bytes called 'RIGHT RedZone']
112 Slot 2/ [8 bytes of red zone, that adds up to the space of 'a' to make
113 the next slot be 32 bytes aligned; this one is called Partial
114 Redzone; this 32 bytes alignment is an asan constraint]
116 Slot 3/ [24 bytes for variable 'a']
118 Slot 4/ [red zone of 32 bytes called 'Middle RedZone']
120 Slot 5/ [24 bytes of Partial Red Zone (similar to slot 2]
122 Slot 6/ [8 bytes for variable 'b']
124 Slot 7/ [32 bytes of Red Zone at the bottom of the stack, called
127 The 32 bytes of LEFT red zone at the bottom of the stack can be
130 1/ The first 8 bytes contain a magical asan number that is always
133 2/ The following 8 bytes contains a pointer to a string (to be
134 parsed at runtime by the runtime asan library), which format is
137 "<function-name> <space> <num-of-variables-on-the-stack>
138 (<32-bytes-aligned-offset-in-bytes-of-variable> <space>
139 <length-of-var-in-bytes> ){n} "
141 where '(...){n}' means the content inside the parenthesis occurs 'n'
142 times, with 'n' being the number of variables on the stack.
144 3/ The following 8 bytes contain the PC of the current function which
145 will be used by the run-time library to print an error message.
147 4/ The following 8 bytes are reserved for internal use by the run-time.
149 The shadow memory for that stack layout is going to look like this:
151 - content of shadow memory 8 bytes for slot 7: 0xF1F1F1F1.
152 The F1 byte pattern is a magic number called
153 ASAN_STACK_MAGIC_LEFT and is a way for the runtime to know that
154 the memory for that shadow byte is part of a the LEFT red zone
155 intended to seat at the bottom of the variables on the stack.
157 - content of shadow memory 8 bytes for slots 6 and 5:
158 0xF4F4F400. The F4 byte pattern is a magic number
159 called ASAN_STACK_MAGIC_PARTIAL. It flags the fact that the
160 memory region for this shadow byte is a PARTIAL red zone
161 intended to pad a variable A, so that the slot following
162 {A,padding} is 32 bytes aligned.
164 Note that the fact that the least significant byte of this
165 shadow memory content is 00 means that 8 bytes of its
166 corresponding memory (which corresponds to the memory of
167 variable 'b') is addressable.
169 - content of shadow memory 8 bytes for slot 4: 0xF2F2F2F2.
170 The F2 byte pattern is a magic number called
171 ASAN_STACK_MAGIC_MIDDLE. It flags the fact that the memory
172 region for this shadow byte is a MIDDLE red zone intended to
173 seat between two 32 aligned slots of {variable,padding}.
175 - content of shadow memory 8 bytes for slot 3 and 2:
176 0xF4000000. This represents is the concatenation of
177 variable 'a' and the partial red zone following it, like what we
178 had for variable 'b'. The least significant 3 bytes being 00
179 means that the 3 bytes of variable 'a' are addressable.
181 - content of shadow memory 8 bytes for slot 1: 0xF3F3F3F3.
182 The F3 byte pattern is a magic number called
183 ASAN_STACK_MAGIC_RIGHT. It flags the fact that the memory
184 region for this shadow byte is a RIGHT red zone intended to seat
185 at the top of the variables of the stack.
187 Note that the real variable layout is done in expand_used_vars in
188 cfgexpand.c. As far as Address Sanitizer is concerned, it lays out
189 stack variables as well as the different red zones, emits some
190 prologue code to populate the shadow memory as to poison (mark as
191 non-accessible) the regions of the red zones and mark the regions of
192 stack variables as accessible, and emit some epilogue code to
193 un-poison (mark as accessible) the regions of red zones right before
196 [Protection of global variables]
198 The basic idea is to insert a red zone between two global variables
199 and install a constructor function that calls the asan runtime to do
200 the populating of the relevant shadow memory regions at load time.
202 So the global variables are laid out as to insert a red zone between
203 them. The size of the red zones is so that each variable starts on a
206 Then a constructor function is installed so that, for each global
207 variable, it calls the runtime asan library function
208 __asan_register_globals_with an instance of this type:
212 // Address of the beginning of the global variable.
215 // Initial size of the global variable.
218 // Size of the global variable + size of the red zone. This
219 // size is 32 bytes aligned.
220 uptr __size_with_redzone;
222 // Name of the global variable.
225 // Name of the module where the global variable is declared.
226 const void *__module_name;
228 // 1 if it has dynamic initialization, 0 otherwise.
229 uptr __has_dynamic_init;
232 A destructor function that calls the runtime asan library function
233 _asan_unregister_globals is also installed. */
235 alias_set_type asan_shadow_set
= -1;
237 /* Pointer types to 1 resp. 2 byte integers in shadow memory. A separate
238 alias set is used for all shadow memory accesses. */
239 static GTY(()) tree shadow_ptr_types
[2];
241 /* Decl for __asan_option_detect_stack_use_after_return. */
242 static GTY(()) tree asan_detect_stack_use_after_return
;
244 /* Hashtable support for memory references used by gimple
247 /* This type represents a reference to a memory region. */
250 /* The expression of the beginning of the memory region. */
253 /* The size of the access (can be 1, 2, 4, 8, 16 for now). */
257 static alloc_pool asan_mem_ref_alloc_pool
;
259 /* This creates the alloc pool used to store the instances of
260 asan_mem_ref that are stored in the hash table asan_mem_ref_ht. */
263 asan_mem_ref_get_alloc_pool ()
265 if (asan_mem_ref_alloc_pool
== NULL
)
266 asan_mem_ref_alloc_pool
= create_alloc_pool ("asan_mem_ref",
267 sizeof (asan_mem_ref
),
269 return asan_mem_ref_alloc_pool
;
273 /* Initializes an instance of asan_mem_ref. */
276 asan_mem_ref_init (asan_mem_ref
*ref
, tree start
, char access_size
)
279 ref
->access_size
= access_size
;
282 /* Allocates memory for an instance of asan_mem_ref into the memory
283 pool returned by asan_mem_ref_get_alloc_pool and initialize it.
284 START is the address of (or the expression pointing to) the
285 beginning of memory reference. ACCESS_SIZE is the size of the
286 access to the referenced memory. */
289 asan_mem_ref_new (tree start
, char access_size
)
292 (asan_mem_ref
*) pool_alloc (asan_mem_ref_get_alloc_pool ());
294 asan_mem_ref_init (ref
, start
, access_size
);
298 /* This builds and returns a pointer to the end of the memory region
299 that starts at START and of length LEN. */
302 asan_mem_ref_get_end (tree start
, tree len
)
304 if (len
== NULL_TREE
|| integer_zerop (len
))
307 return fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (start
), start
, len
);
310 /* Return a tree expression that represents the end of the referenced
311 memory region. Beware that this function can actually build a new
315 asan_mem_ref_get_end (const asan_mem_ref
*ref
, tree len
)
317 return asan_mem_ref_get_end (ref
->start
, len
);
320 struct asan_mem_ref_hasher
321 : typed_noop_remove
<asan_mem_ref
>
323 typedef asan_mem_ref value_type
;
324 typedef asan_mem_ref compare_type
;
326 static inline hashval_t
hash (const value_type
*);
327 static inline bool equal (const value_type
*, const compare_type
*);
330 /* Hash a memory reference. */
333 asan_mem_ref_hasher::hash (const asan_mem_ref
*mem_ref
)
335 hashval_t h
= iterative_hash_expr (mem_ref
->start
, 0);
336 h
= iterative_hash_hashval_t (h
, mem_ref
->access_size
);
340 /* Compare two memory references. We accept the length of either
341 memory references to be NULL_TREE. */
344 asan_mem_ref_hasher::equal (const asan_mem_ref
*m1
,
345 const asan_mem_ref
*m2
)
347 return (m1
->access_size
== m2
->access_size
348 && operand_equal_p (m1
->start
, m2
->start
, 0));
351 static hash_table
<asan_mem_ref_hasher
> asan_mem_ref_ht
;
353 /* Returns a reference to the hash table containing memory references.
354 This function ensures that the hash table is created. Note that
355 this hash table is updated by the function
356 update_mem_ref_hash_table. */
358 static hash_table
<asan_mem_ref_hasher
> &
359 get_mem_ref_hash_table ()
361 if (!asan_mem_ref_ht
.is_created ())
362 asan_mem_ref_ht
.create (10);
364 return asan_mem_ref_ht
;
367 /* Clear all entries from the memory references hash table. */
370 empty_mem_ref_hash_table ()
372 if (asan_mem_ref_ht
.is_created ())
373 asan_mem_ref_ht
.empty ();
376 /* Free the memory references hash table. */
379 free_mem_ref_resources ()
381 if (asan_mem_ref_ht
.is_created ())
382 asan_mem_ref_ht
.dispose ();
384 if (asan_mem_ref_alloc_pool
)
386 free_alloc_pool (asan_mem_ref_alloc_pool
);
387 asan_mem_ref_alloc_pool
= NULL
;
391 /* Return true iff the memory reference REF has been instrumented. */
394 has_mem_ref_been_instrumented (tree ref
, char access_size
)
397 asan_mem_ref_init (&r
, ref
, access_size
);
399 return (get_mem_ref_hash_table ().find (&r
) != NULL
);
402 /* Return true iff the memory reference REF has been instrumented. */
405 has_mem_ref_been_instrumented (const asan_mem_ref
*ref
)
407 return has_mem_ref_been_instrumented (ref
->start
, ref
->access_size
);
410 /* Return true iff access to memory region starting at REF and of
411 length LEN has been instrumented. */
414 has_mem_ref_been_instrumented (const asan_mem_ref
*ref
, tree len
)
416 /* First let's see if the address of the beginning of REF has been
418 if (!has_mem_ref_been_instrumented (ref
))
423 /* Let's see if the end of the region has been instrumented. */
424 if (!has_mem_ref_been_instrumented (asan_mem_ref_get_end (ref
, len
),
431 /* Set REF to the memory reference present in a gimple assignment
432 ASSIGNMENT. Return true upon successful completion, false
436 get_mem_ref_of_assignment (const gimple assignment
,
440 gcc_assert (gimple_assign_single_p (assignment
));
442 if (gimple_store_p (assignment
)
443 && !gimple_clobber_p (assignment
))
445 ref
->start
= gimple_assign_lhs (assignment
);
446 *ref_is_store
= true;
448 else if (gimple_assign_load_p (assignment
))
450 ref
->start
= gimple_assign_rhs1 (assignment
);
451 *ref_is_store
= false;
456 ref
->access_size
= int_size_in_bytes (TREE_TYPE (ref
->start
));
460 /* Return the memory references contained in a gimple statement
461 representing a builtin call that has to do with memory access. */
464 get_mem_refs_of_builtin_call (const gimple call
,
476 gcc_checking_assert (gimple_call_builtin_p (call
, BUILT_IN_NORMAL
));
478 tree callee
= gimple_call_fndecl (call
);
479 tree source0
= NULL_TREE
, source1
= NULL_TREE
,
480 dest
= NULL_TREE
, len
= NULL_TREE
;
481 bool is_store
= true, got_reference_p
= false;
482 char access_size
= 1;
484 switch (DECL_FUNCTION_CODE (callee
))
486 /* (s, s, n) style memops. */
488 case BUILT_IN_MEMCMP
:
489 source0
= gimple_call_arg (call
, 0);
490 source1
= gimple_call_arg (call
, 1);
491 len
= gimple_call_arg (call
, 2);
494 /* (src, dest, n) style memops. */
496 source0
= gimple_call_arg (call
, 0);
497 dest
= gimple_call_arg (call
, 1);
498 len
= gimple_call_arg (call
, 2);
501 /* (dest, src, n) style memops. */
502 case BUILT_IN_MEMCPY
:
503 case BUILT_IN_MEMCPY_CHK
:
504 case BUILT_IN_MEMMOVE
:
505 case BUILT_IN_MEMMOVE_CHK
:
506 case BUILT_IN_MEMPCPY
:
507 case BUILT_IN_MEMPCPY_CHK
:
508 dest
= gimple_call_arg (call
, 0);
509 source0
= gimple_call_arg (call
, 1);
510 len
= gimple_call_arg (call
, 2);
513 /* (dest, n) style memops. */
515 dest
= gimple_call_arg (call
, 0);
516 len
= gimple_call_arg (call
, 1);
519 /* (dest, x, n) style memops*/
520 case BUILT_IN_MEMSET
:
521 case BUILT_IN_MEMSET_CHK
:
522 dest
= gimple_call_arg (call
, 0);
523 len
= gimple_call_arg (call
, 2);
526 case BUILT_IN_STRLEN
:
527 source0
= gimple_call_arg (call
, 0);
528 len
= gimple_call_lhs (call
);
531 /* And now the __atomic* and __sync builtins.
532 These are handled differently from the classical memory memory
533 access builtins above. */
535 case BUILT_IN_ATOMIC_LOAD_1
:
536 case BUILT_IN_ATOMIC_LOAD_2
:
537 case BUILT_IN_ATOMIC_LOAD_4
:
538 case BUILT_IN_ATOMIC_LOAD_8
:
539 case BUILT_IN_ATOMIC_LOAD_16
:
543 case BUILT_IN_SYNC_FETCH_AND_ADD_1
:
544 case BUILT_IN_SYNC_FETCH_AND_ADD_2
:
545 case BUILT_IN_SYNC_FETCH_AND_ADD_4
:
546 case BUILT_IN_SYNC_FETCH_AND_ADD_8
:
547 case BUILT_IN_SYNC_FETCH_AND_ADD_16
:
549 case BUILT_IN_SYNC_FETCH_AND_SUB_1
:
550 case BUILT_IN_SYNC_FETCH_AND_SUB_2
:
551 case BUILT_IN_SYNC_FETCH_AND_SUB_4
:
552 case BUILT_IN_SYNC_FETCH_AND_SUB_8
:
553 case BUILT_IN_SYNC_FETCH_AND_SUB_16
:
555 case BUILT_IN_SYNC_FETCH_AND_OR_1
:
556 case BUILT_IN_SYNC_FETCH_AND_OR_2
:
557 case BUILT_IN_SYNC_FETCH_AND_OR_4
:
558 case BUILT_IN_SYNC_FETCH_AND_OR_8
:
559 case BUILT_IN_SYNC_FETCH_AND_OR_16
:
561 case BUILT_IN_SYNC_FETCH_AND_AND_1
:
562 case BUILT_IN_SYNC_FETCH_AND_AND_2
:
563 case BUILT_IN_SYNC_FETCH_AND_AND_4
:
564 case BUILT_IN_SYNC_FETCH_AND_AND_8
:
565 case BUILT_IN_SYNC_FETCH_AND_AND_16
:
567 case BUILT_IN_SYNC_FETCH_AND_XOR_1
:
568 case BUILT_IN_SYNC_FETCH_AND_XOR_2
:
569 case BUILT_IN_SYNC_FETCH_AND_XOR_4
:
570 case BUILT_IN_SYNC_FETCH_AND_XOR_8
:
571 case BUILT_IN_SYNC_FETCH_AND_XOR_16
:
573 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
574 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
575 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
576 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
578 case BUILT_IN_SYNC_ADD_AND_FETCH_1
:
579 case BUILT_IN_SYNC_ADD_AND_FETCH_2
:
580 case BUILT_IN_SYNC_ADD_AND_FETCH_4
:
581 case BUILT_IN_SYNC_ADD_AND_FETCH_8
:
582 case BUILT_IN_SYNC_ADD_AND_FETCH_16
:
584 case BUILT_IN_SYNC_SUB_AND_FETCH_1
:
585 case BUILT_IN_SYNC_SUB_AND_FETCH_2
:
586 case BUILT_IN_SYNC_SUB_AND_FETCH_4
:
587 case BUILT_IN_SYNC_SUB_AND_FETCH_8
:
588 case BUILT_IN_SYNC_SUB_AND_FETCH_16
:
590 case BUILT_IN_SYNC_OR_AND_FETCH_1
:
591 case BUILT_IN_SYNC_OR_AND_FETCH_2
:
592 case BUILT_IN_SYNC_OR_AND_FETCH_4
:
593 case BUILT_IN_SYNC_OR_AND_FETCH_8
:
594 case BUILT_IN_SYNC_OR_AND_FETCH_16
:
596 case BUILT_IN_SYNC_AND_AND_FETCH_1
:
597 case BUILT_IN_SYNC_AND_AND_FETCH_2
:
598 case BUILT_IN_SYNC_AND_AND_FETCH_4
:
599 case BUILT_IN_SYNC_AND_AND_FETCH_8
:
600 case BUILT_IN_SYNC_AND_AND_FETCH_16
:
602 case BUILT_IN_SYNC_XOR_AND_FETCH_1
:
603 case BUILT_IN_SYNC_XOR_AND_FETCH_2
:
604 case BUILT_IN_SYNC_XOR_AND_FETCH_4
:
605 case BUILT_IN_SYNC_XOR_AND_FETCH_8
:
606 case BUILT_IN_SYNC_XOR_AND_FETCH_16
:
608 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
609 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
610 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
611 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
613 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
:
614 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2
:
615 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4
:
616 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8
:
617 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16
:
619 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
:
620 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2
:
621 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4
:
622 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8
:
623 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16
:
625 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
:
626 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2
:
627 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4
:
628 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8
:
629 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16
:
631 case BUILT_IN_SYNC_LOCK_RELEASE_1
:
632 case BUILT_IN_SYNC_LOCK_RELEASE_2
:
633 case BUILT_IN_SYNC_LOCK_RELEASE_4
:
634 case BUILT_IN_SYNC_LOCK_RELEASE_8
:
635 case BUILT_IN_SYNC_LOCK_RELEASE_16
:
637 case BUILT_IN_ATOMIC_EXCHANGE_1
:
638 case BUILT_IN_ATOMIC_EXCHANGE_2
:
639 case BUILT_IN_ATOMIC_EXCHANGE_4
:
640 case BUILT_IN_ATOMIC_EXCHANGE_8
:
641 case BUILT_IN_ATOMIC_EXCHANGE_16
:
643 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
644 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
645 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
646 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
647 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
649 case BUILT_IN_ATOMIC_STORE_1
:
650 case BUILT_IN_ATOMIC_STORE_2
:
651 case BUILT_IN_ATOMIC_STORE_4
:
652 case BUILT_IN_ATOMIC_STORE_8
:
653 case BUILT_IN_ATOMIC_STORE_16
:
655 case BUILT_IN_ATOMIC_ADD_FETCH_1
:
656 case BUILT_IN_ATOMIC_ADD_FETCH_2
:
657 case BUILT_IN_ATOMIC_ADD_FETCH_4
:
658 case BUILT_IN_ATOMIC_ADD_FETCH_8
:
659 case BUILT_IN_ATOMIC_ADD_FETCH_16
:
661 case BUILT_IN_ATOMIC_SUB_FETCH_1
:
662 case BUILT_IN_ATOMIC_SUB_FETCH_2
:
663 case BUILT_IN_ATOMIC_SUB_FETCH_4
:
664 case BUILT_IN_ATOMIC_SUB_FETCH_8
:
665 case BUILT_IN_ATOMIC_SUB_FETCH_16
:
667 case BUILT_IN_ATOMIC_AND_FETCH_1
:
668 case BUILT_IN_ATOMIC_AND_FETCH_2
:
669 case BUILT_IN_ATOMIC_AND_FETCH_4
:
670 case BUILT_IN_ATOMIC_AND_FETCH_8
:
671 case BUILT_IN_ATOMIC_AND_FETCH_16
:
673 case BUILT_IN_ATOMIC_NAND_FETCH_1
:
674 case BUILT_IN_ATOMIC_NAND_FETCH_2
:
675 case BUILT_IN_ATOMIC_NAND_FETCH_4
:
676 case BUILT_IN_ATOMIC_NAND_FETCH_8
:
677 case BUILT_IN_ATOMIC_NAND_FETCH_16
:
679 case BUILT_IN_ATOMIC_XOR_FETCH_1
:
680 case BUILT_IN_ATOMIC_XOR_FETCH_2
:
681 case BUILT_IN_ATOMIC_XOR_FETCH_4
:
682 case BUILT_IN_ATOMIC_XOR_FETCH_8
:
683 case BUILT_IN_ATOMIC_XOR_FETCH_16
:
685 case BUILT_IN_ATOMIC_OR_FETCH_1
:
686 case BUILT_IN_ATOMIC_OR_FETCH_2
:
687 case BUILT_IN_ATOMIC_OR_FETCH_4
:
688 case BUILT_IN_ATOMIC_OR_FETCH_8
:
689 case BUILT_IN_ATOMIC_OR_FETCH_16
:
691 case BUILT_IN_ATOMIC_FETCH_ADD_1
:
692 case BUILT_IN_ATOMIC_FETCH_ADD_2
:
693 case BUILT_IN_ATOMIC_FETCH_ADD_4
:
694 case BUILT_IN_ATOMIC_FETCH_ADD_8
:
695 case BUILT_IN_ATOMIC_FETCH_ADD_16
:
697 case BUILT_IN_ATOMIC_FETCH_SUB_1
:
698 case BUILT_IN_ATOMIC_FETCH_SUB_2
:
699 case BUILT_IN_ATOMIC_FETCH_SUB_4
:
700 case BUILT_IN_ATOMIC_FETCH_SUB_8
:
701 case BUILT_IN_ATOMIC_FETCH_SUB_16
:
703 case BUILT_IN_ATOMIC_FETCH_AND_1
:
704 case BUILT_IN_ATOMIC_FETCH_AND_2
:
705 case BUILT_IN_ATOMIC_FETCH_AND_4
:
706 case BUILT_IN_ATOMIC_FETCH_AND_8
:
707 case BUILT_IN_ATOMIC_FETCH_AND_16
:
709 case BUILT_IN_ATOMIC_FETCH_NAND_1
:
710 case BUILT_IN_ATOMIC_FETCH_NAND_2
:
711 case BUILT_IN_ATOMIC_FETCH_NAND_4
:
712 case BUILT_IN_ATOMIC_FETCH_NAND_8
:
713 case BUILT_IN_ATOMIC_FETCH_NAND_16
:
715 case BUILT_IN_ATOMIC_FETCH_XOR_1
:
716 case BUILT_IN_ATOMIC_FETCH_XOR_2
:
717 case BUILT_IN_ATOMIC_FETCH_XOR_4
:
718 case BUILT_IN_ATOMIC_FETCH_XOR_8
:
719 case BUILT_IN_ATOMIC_FETCH_XOR_16
:
721 case BUILT_IN_ATOMIC_FETCH_OR_1
:
722 case BUILT_IN_ATOMIC_FETCH_OR_2
:
723 case BUILT_IN_ATOMIC_FETCH_OR_4
:
724 case BUILT_IN_ATOMIC_FETCH_OR_8
:
725 case BUILT_IN_ATOMIC_FETCH_OR_16
:
727 dest
= gimple_call_arg (call
, 0);
728 /* DEST represents the address of a memory location.
729 instrument_derefs wants the memory location, so lets
730 dereference the address DEST before handing it to
731 instrument_derefs. */
732 if (TREE_CODE (dest
) == ADDR_EXPR
)
733 dest
= TREE_OPERAND (dest
, 0);
734 else if (TREE_CODE (dest
) == SSA_NAME
|| TREE_CODE (dest
) == INTEGER_CST
)
735 dest
= build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (dest
)),
736 dest
, build_int_cst (TREE_TYPE (dest
), 0));
740 access_size
= int_size_in_bytes (TREE_TYPE (dest
));
744 /* The other builtins memory access are not instrumented in this
745 function because they either don't have any length parameter,
746 or their length parameter is just a limit. */
750 if (len
!= NULL_TREE
)
752 if (source0
!= NULL_TREE
)
754 src0
->start
= source0
;
755 src0
->access_size
= access_size
;
757 *src0_is_store
= false;
760 if (source1
!= NULL_TREE
)
762 src1
->start
= source1
;
763 src1
->access_size
= access_size
;
765 *src1_is_store
= false;
768 if (dest
!= NULL_TREE
)
771 dst
->access_size
= access_size
;
773 *dst_is_store
= true;
776 got_reference_p
= true;
781 dst
->access_size
= access_size
;
782 *dst_len
= NULL_TREE
;
783 *dst_is_store
= is_store
;
784 *dest_is_deref
= true;
785 got_reference_p
= true;
788 return got_reference_p
;
791 /* Return true iff a given gimple statement has been instrumented.
792 Note that the statement is "defined" by the memory references it
796 has_stmt_been_instrumented_p (gimple stmt
)
798 if (gimple_assign_single_p (stmt
))
802 asan_mem_ref_init (&r
, NULL
, 1);
804 if (get_mem_ref_of_assignment (stmt
, &r
, &r_is_store
))
805 return has_mem_ref_been_instrumented (&r
);
807 else if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
809 asan_mem_ref src0
, src1
, dest
;
810 asan_mem_ref_init (&src0
, NULL
, 1);
811 asan_mem_ref_init (&src1
, NULL
, 1);
812 asan_mem_ref_init (&dest
, NULL
, 1);
814 tree src0_len
= NULL_TREE
, src1_len
= NULL_TREE
, dest_len
= NULL_TREE
;
815 bool src0_is_store
= false, src1_is_store
= false,
816 dest_is_store
= false, dest_is_deref
= false;
817 if (get_mem_refs_of_builtin_call (stmt
,
818 &src0
, &src0_len
, &src0_is_store
,
819 &src1
, &src1_len
, &src1_is_store
,
820 &dest
, &dest_len
, &dest_is_store
,
823 if (src0
.start
!= NULL_TREE
824 && !has_mem_ref_been_instrumented (&src0
, src0_len
))
827 if (src1
.start
!= NULL_TREE
828 && !has_mem_ref_been_instrumented (&src1
, src1_len
))
831 if (dest
.start
!= NULL_TREE
832 && !has_mem_ref_been_instrumented (&dest
, dest_len
))
841 /* Insert a memory reference into the hash table. */
844 update_mem_ref_hash_table (tree ref
, char access_size
)
846 hash_table
<asan_mem_ref_hasher
> ht
= get_mem_ref_hash_table ();
849 asan_mem_ref_init (&r
, ref
, access_size
);
851 asan_mem_ref
**slot
= ht
.find_slot (&r
, INSERT
);
853 *slot
= asan_mem_ref_new (ref
, access_size
);
856 /* Initialize shadow_ptr_types array. */
859 asan_init_shadow_ptr_types (void)
861 asan_shadow_set
= new_alias_set ();
862 shadow_ptr_types
[0] = build_distinct_type_copy (signed_char_type_node
);
863 TYPE_ALIAS_SET (shadow_ptr_types
[0]) = asan_shadow_set
;
864 shadow_ptr_types
[0] = build_pointer_type (shadow_ptr_types
[0]);
865 shadow_ptr_types
[1] = build_distinct_type_copy (short_integer_type_node
);
866 TYPE_ALIAS_SET (shadow_ptr_types
[1]) = asan_shadow_set
;
867 shadow_ptr_types
[1] = build_pointer_type (shadow_ptr_types
[1]);
868 initialize_sanitizer_builtins ();
871 /* Create ADDR_EXPR of STRING_CST with the PP pretty printer text. */
874 asan_pp_string (pretty_printer
*pp
)
876 const char *buf
= pp_formatted_text (pp
);
877 size_t len
= strlen (buf
);
878 tree ret
= build_string (len
+ 1, buf
);
880 = build_array_type (TREE_TYPE (shadow_ptr_types
[0]),
881 build_index_type (size_int (len
)));
882 TREE_READONLY (ret
) = 1;
883 TREE_STATIC (ret
) = 1;
884 return build1 (ADDR_EXPR
, shadow_ptr_types
[0], ret
);
887 /* Return a CONST_INT representing 4 subsequent shadow memory bytes. */
890 asan_shadow_cst (unsigned char shadow_bytes
[4])
893 unsigned HOST_WIDE_INT val
= 0;
894 gcc_assert (WORDS_BIG_ENDIAN
== BYTES_BIG_ENDIAN
);
895 for (i
= 0; i
< 4; i
++)
896 val
|= (unsigned HOST_WIDE_INT
) shadow_bytes
[BYTES_BIG_ENDIAN
? 3 - i
: i
]
897 << (BITS_PER_UNIT
* i
);
898 return gen_int_mode (val
, SImode
);
901 /* Clear shadow memory at SHADOW_MEM, LEN bytes. Can't call a library call here
905 asan_clear_shadow (rtx shadow_mem
, HOST_WIDE_INT len
)
907 rtx insn
, insns
, top_label
, end
, addr
, tmp
, jump
;
910 clear_storage (shadow_mem
, GEN_INT (len
), BLOCK_OP_NORMAL
);
911 insns
= get_insns ();
913 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
916 if (insn
== NULL_RTX
)
922 gcc_assert ((len
& 3) == 0);
923 top_label
= gen_label_rtx ();
924 addr
= copy_to_mode_reg (Pmode
, XEXP (shadow_mem
, 0));
925 shadow_mem
= adjust_automodify_address (shadow_mem
, SImode
, addr
, 0);
926 end
= force_reg (Pmode
, plus_constant (Pmode
, addr
, len
));
927 emit_label (top_label
);
929 emit_move_insn (shadow_mem
, const0_rtx
);
930 tmp
= expand_simple_binop (Pmode
, PLUS
, addr
, gen_int_mode (4, Pmode
), addr
,
931 true, OPTAB_LIB_WIDEN
);
933 emit_move_insn (addr
, tmp
);
934 emit_cmp_and_jump_insns (addr
, end
, LT
, NULL_RTX
, Pmode
, true, top_label
);
935 jump
= get_last_insn ();
936 gcc_assert (JUMP_P (jump
));
937 add_int_reg_note (jump
, REG_BR_PROB
, REG_BR_PROB_BASE
* 80 / 100);
941 asan_function_start (void)
943 section
*fnsec
= function_section (current_function_decl
);
944 switch_to_section (fnsec
);
945 ASM_OUTPUT_DEBUG_LABEL (asm_out_file
, "LASANPC",
946 current_function_funcdef_no
);
949 /* Insert code to protect stack vars. The prologue sequence should be emitted
950 directly, epilogue sequence returned. BASE is the register holding the
951 stack base, against which OFFSETS array offsets are relative to, OFFSETS
952 array contains pairs of offsets in reverse order, always the end offset
953 of some gap that needs protection followed by starting offset,
954 and DECLS is an array of representative decls for each var partition.
955 LENGTH is the length of the OFFSETS array, DECLS array is LENGTH / 2 - 1
956 elements long (OFFSETS include gap before the first variable as well
957 as gaps after each stack variable). PBASE is, if non-NULL, some pseudo
958 register which stack vars DECL_RTLs are based on. Either BASE should be
959 assigned to PBASE, when not doing use after return protection, or
960 corresponding address based on __asan_stack_malloc* return value. */
963 asan_emit_stack_protection (rtx base
, rtx pbase
, unsigned int alignb
,
964 HOST_WIDE_INT
*offsets
, tree
*decls
, int length
)
966 rtx shadow_base
, shadow_mem
, ret
, mem
, orig_base
, lab
;
968 unsigned char shadow_bytes
[4];
969 HOST_WIDE_INT base_offset
= offsets
[length
- 1];
970 HOST_WIDE_INT base_align_bias
= 0, offset
, prev_offset
;
971 HOST_WIDE_INT asan_frame_size
= offsets
[0] - base_offset
;
972 HOST_WIDE_INT last_offset
, last_size
;
974 unsigned char cur_shadow_byte
= ASAN_STACK_MAGIC_LEFT
;
975 tree str_cst
, decl
, id
;
976 int use_after_return_class
= -1;
978 if (shadow_ptr_types
[0] == NULL_TREE
)
979 asan_init_shadow_ptr_types ();
981 /* First of all, prepare the description string. */
982 pretty_printer asan_pp
;
984 pp_decimal_int (&asan_pp
, length
/ 2 - 1);
986 for (l
= length
- 2; l
; l
-= 2)
988 tree decl
= decls
[l
/ 2 - 1];
989 pp_wide_integer (&asan_pp
, offsets
[l
] - base_offset
);
991 pp_wide_integer (&asan_pp
, offsets
[l
- 1] - offsets
[l
]);
993 if (DECL_P (decl
) && DECL_NAME (decl
))
995 pp_decimal_int (&asan_pp
, IDENTIFIER_LENGTH (DECL_NAME (decl
)));
997 pp_tree_identifier (&asan_pp
, DECL_NAME (decl
));
1000 pp_string (&asan_pp
, "9 <unknown>");
1001 pp_space (&asan_pp
);
1003 str_cst
= asan_pp_string (&asan_pp
);
1005 /* Emit the prologue sequence. */
1006 if (asan_frame_size
> 32 && asan_frame_size
<= 65536 && pbase
)
1008 use_after_return_class
= floor_log2 (asan_frame_size
- 1) - 5;
1009 /* __asan_stack_malloc_N guarantees alignment
1010 N < 6 ? (64 << N) : 4096 bytes. */
1011 if (alignb
> (use_after_return_class
< 6
1012 ? (64U << use_after_return_class
) : 4096U))
1013 use_after_return_class
= -1;
1014 else if (alignb
> ASAN_RED_ZONE_SIZE
&& (asan_frame_size
& (alignb
- 1)))
1015 base_align_bias
= ((asan_frame_size
+ alignb
- 1)
1016 & ~(alignb
- HOST_WIDE_INT_1
)) - asan_frame_size
;
1018 if (use_after_return_class
== -1 && pbase
)
1019 emit_move_insn (pbase
, base
);
1020 base
= expand_binop (Pmode
, add_optab
, base
,
1021 gen_int_mode (base_offset
- base_align_bias
, Pmode
),
1022 NULL_RTX
, 1, OPTAB_DIRECT
);
1023 orig_base
= NULL_RTX
;
1024 if (use_after_return_class
!= -1)
1026 if (asan_detect_stack_use_after_return
== NULL_TREE
)
1028 id
= get_identifier ("__asan_option_detect_stack_use_after_return");
1029 decl
= build_decl (BUILTINS_LOCATION
, VAR_DECL
, id
,
1031 SET_DECL_ASSEMBLER_NAME (decl
, id
);
1032 TREE_ADDRESSABLE (decl
) = 1;
1033 DECL_ARTIFICIAL (decl
) = 1;
1034 DECL_IGNORED_P (decl
) = 1;
1035 DECL_EXTERNAL (decl
) = 1;
1036 TREE_STATIC (decl
) = 1;
1037 TREE_PUBLIC (decl
) = 1;
1038 TREE_USED (decl
) = 1;
1039 asan_detect_stack_use_after_return
= decl
;
1041 orig_base
= gen_reg_rtx (Pmode
);
1042 emit_move_insn (orig_base
, base
);
1043 ret
= expand_normal (asan_detect_stack_use_after_return
);
1044 lab
= gen_label_rtx ();
1045 int very_likely
= REG_BR_PROB_BASE
- (REG_BR_PROB_BASE
/ 2000 - 1);
1046 emit_cmp_and_jump_insns (ret
, const0_rtx
, EQ
, NULL_RTX
,
1047 VOIDmode
, 0, lab
, very_likely
);
1048 snprintf (buf
, sizeof buf
, "__asan_stack_malloc_%d",
1049 use_after_return_class
);
1050 ret
= init_one_libfunc (buf
);
1051 rtx addr
= convert_memory_address (ptr_mode
, base
);
1052 ret
= emit_library_call_value (ret
, NULL_RTX
, LCT_NORMAL
, ptr_mode
, 2,
1053 GEN_INT (asan_frame_size
1055 TYPE_MODE (pointer_sized_int_node
),
1057 ret
= convert_memory_address (Pmode
, ret
);
1058 emit_move_insn (base
, ret
);
1060 emit_move_insn (pbase
, expand_binop (Pmode
, add_optab
, base
,
1061 gen_int_mode (base_align_bias
1062 - base_offset
, Pmode
),
1063 NULL_RTX
, 1, OPTAB_DIRECT
));
1065 mem
= gen_rtx_MEM (ptr_mode
, base
);
1066 mem
= adjust_address (mem
, VOIDmode
, base_align_bias
);
1067 emit_move_insn (mem
, gen_int_mode (ASAN_STACK_FRAME_MAGIC
, ptr_mode
));
1068 mem
= adjust_address (mem
, VOIDmode
, GET_MODE_SIZE (ptr_mode
));
1069 emit_move_insn (mem
, expand_normal (str_cst
));
1070 mem
= adjust_address (mem
, VOIDmode
, GET_MODE_SIZE (ptr_mode
));
1071 ASM_GENERATE_INTERNAL_LABEL (buf
, "LASANPC", current_function_funcdef_no
);
1072 id
= get_identifier (buf
);
1073 decl
= build_decl (DECL_SOURCE_LOCATION (current_function_decl
),
1074 VAR_DECL
, id
, char_type_node
);
1075 SET_DECL_ASSEMBLER_NAME (decl
, id
);
1076 TREE_ADDRESSABLE (decl
) = 1;
1077 TREE_READONLY (decl
) = 1;
1078 DECL_ARTIFICIAL (decl
) = 1;
1079 DECL_IGNORED_P (decl
) = 1;
1080 TREE_STATIC (decl
) = 1;
1081 TREE_PUBLIC (decl
) = 0;
1082 TREE_USED (decl
) = 1;
1083 DECL_INITIAL (decl
) = decl
;
1084 TREE_ASM_WRITTEN (decl
) = 1;
1085 TREE_ASM_WRITTEN (id
) = 1;
1086 emit_move_insn (mem
, expand_normal (build_fold_addr_expr (decl
)));
1087 shadow_base
= expand_binop (Pmode
, lshr_optab
, base
,
1088 GEN_INT (ASAN_SHADOW_SHIFT
),
1089 NULL_RTX
, 1, OPTAB_DIRECT
);
1091 = plus_constant (Pmode
, shadow_base
,
1092 targetm
.asan_shadow_offset ()
1093 + (base_align_bias
>> ASAN_SHADOW_SHIFT
));
1094 gcc_assert (asan_shadow_set
!= -1
1095 && (ASAN_RED_ZONE_SIZE
>> ASAN_SHADOW_SHIFT
) == 4);
1096 shadow_mem
= gen_rtx_MEM (SImode
, shadow_base
);
1097 set_mem_alias_set (shadow_mem
, asan_shadow_set
);
1098 prev_offset
= base_offset
;
1099 for (l
= length
; l
; l
-= 2)
1102 cur_shadow_byte
= ASAN_STACK_MAGIC_RIGHT
;
1103 offset
= offsets
[l
- 1];
1104 if ((offset
- base_offset
) & (ASAN_RED_ZONE_SIZE
- 1))
1108 = base_offset
+ ((offset
- base_offset
)
1109 & ~(ASAN_RED_ZONE_SIZE
- HOST_WIDE_INT_1
));
1110 shadow_mem
= adjust_address (shadow_mem
, VOIDmode
,
1111 (aoff
- prev_offset
)
1112 >> ASAN_SHADOW_SHIFT
);
1114 for (i
= 0; i
< 4; i
++, aoff
+= (1 << ASAN_SHADOW_SHIFT
))
1117 if (aoff
< offset
- (1 << ASAN_SHADOW_SHIFT
) + 1)
1118 shadow_bytes
[i
] = 0;
1120 shadow_bytes
[i
] = offset
- aoff
;
1123 shadow_bytes
[i
] = ASAN_STACK_MAGIC_PARTIAL
;
1124 emit_move_insn (shadow_mem
, asan_shadow_cst (shadow_bytes
));
1127 while (offset
<= offsets
[l
- 2] - ASAN_RED_ZONE_SIZE
)
1129 shadow_mem
= adjust_address (shadow_mem
, VOIDmode
,
1130 (offset
- prev_offset
)
1131 >> ASAN_SHADOW_SHIFT
);
1132 prev_offset
= offset
;
1133 memset (shadow_bytes
, cur_shadow_byte
, 4);
1134 emit_move_insn (shadow_mem
, asan_shadow_cst (shadow_bytes
));
1135 offset
+= ASAN_RED_ZONE_SIZE
;
1137 cur_shadow_byte
= ASAN_STACK_MAGIC_MIDDLE
;
1139 do_pending_stack_adjust ();
1141 /* Construct epilogue sequence. */
1145 if (use_after_return_class
!= -1)
1147 rtx lab2
= gen_label_rtx ();
1148 char c
= (char) ASAN_STACK_MAGIC_USE_AFTER_RET
;
1149 int very_likely
= REG_BR_PROB_BASE
- (REG_BR_PROB_BASE
/ 2000 - 1);
1150 emit_cmp_and_jump_insns (orig_base
, base
, EQ
, NULL_RTX
,
1151 VOIDmode
, 0, lab2
, very_likely
);
1152 shadow_mem
= gen_rtx_MEM (BLKmode
, shadow_base
);
1153 set_mem_alias_set (shadow_mem
, asan_shadow_set
);
1154 mem
= gen_rtx_MEM (ptr_mode
, base
);
1155 mem
= adjust_address (mem
, VOIDmode
, base_align_bias
);
1156 emit_move_insn (mem
, gen_int_mode (ASAN_STACK_RETIRED_MAGIC
, ptr_mode
));
1157 unsigned HOST_WIDE_INT sz
= asan_frame_size
>> ASAN_SHADOW_SHIFT
;
1158 if (use_after_return_class
< 5
1159 && can_store_by_pieces (sz
, builtin_memset_read_str
, &c
,
1160 BITS_PER_UNIT
, true))
1161 store_by_pieces (shadow_mem
, sz
, builtin_memset_read_str
, &c
,
1162 BITS_PER_UNIT
, true, 0);
1163 else if (use_after_return_class
>= 5
1164 || !set_storage_via_setmem (shadow_mem
,
1166 gen_int_mode (c
, QImode
),
1167 BITS_PER_UNIT
, BITS_PER_UNIT
,
1170 snprintf (buf
, sizeof buf
, "__asan_stack_free_%d",
1171 use_after_return_class
);
1172 ret
= init_one_libfunc (buf
);
1173 rtx addr
= convert_memory_address (ptr_mode
, base
);
1174 rtx orig_addr
= convert_memory_address (ptr_mode
, orig_base
);
1175 emit_library_call (ret
, LCT_NORMAL
, ptr_mode
, 3, addr
, ptr_mode
,
1176 GEN_INT (asan_frame_size
+ base_align_bias
),
1177 TYPE_MODE (pointer_sized_int_node
),
1178 orig_addr
, ptr_mode
);
1180 lab
= gen_label_rtx ();
1185 shadow_mem
= gen_rtx_MEM (BLKmode
, shadow_base
);
1186 set_mem_alias_set (shadow_mem
, asan_shadow_set
);
1187 prev_offset
= base_offset
;
1188 last_offset
= base_offset
;
1190 for (l
= length
; l
; l
-= 2)
1192 offset
= base_offset
+ ((offsets
[l
- 1] - base_offset
)
1193 & ~(ASAN_RED_ZONE_SIZE
- HOST_WIDE_INT_1
));
1194 if (last_offset
+ last_size
!= offset
)
1196 shadow_mem
= adjust_address (shadow_mem
, VOIDmode
,
1197 (last_offset
- prev_offset
)
1198 >> ASAN_SHADOW_SHIFT
);
1199 prev_offset
= last_offset
;
1200 asan_clear_shadow (shadow_mem
, last_size
>> ASAN_SHADOW_SHIFT
);
1201 last_offset
= offset
;
1204 last_size
+= base_offset
+ ((offsets
[l
- 2] - base_offset
)
1205 & ~(ASAN_RED_ZONE_SIZE
- HOST_WIDE_INT_1
))
1210 shadow_mem
= adjust_address (shadow_mem
, VOIDmode
,
1211 (last_offset
- prev_offset
)
1212 >> ASAN_SHADOW_SHIFT
);
1213 asan_clear_shadow (shadow_mem
, last_size
>> ASAN_SHADOW_SHIFT
);
1216 do_pending_stack_adjust ();
1225 /* Return true if DECL, a global var, might be overridden and needs
1226 therefore a local alias. */
1229 asan_needs_local_alias (tree decl
)
1231 return DECL_WEAK (decl
) || !targetm
.binds_local_p (decl
);
1234 /* Return true if DECL is a VAR_DECL that should be protected
1235 by Address Sanitizer, by appending a red zone with protected
1236 shadow memory after it and aligning it to at least
1237 ASAN_RED_ZONE_SIZE bytes. */
1240 asan_protect_global (tree decl
)
1244 if (TREE_CODE (decl
) == STRING_CST
)
1246 /* Instrument all STRING_CSTs except those created
1247 by asan_pp_string here. */
1248 if (shadow_ptr_types
[0] != NULL_TREE
1249 && TREE_CODE (TREE_TYPE (decl
)) == ARRAY_TYPE
1250 && TREE_TYPE (TREE_TYPE (decl
)) == TREE_TYPE (shadow_ptr_types
[0]))
1254 if (TREE_CODE (decl
) != VAR_DECL
1255 /* TLS vars aren't statically protectable. */
1256 || DECL_THREAD_LOCAL_P (decl
)
1257 /* Externs will be protected elsewhere. */
1258 || DECL_EXTERNAL (decl
)
1259 || !DECL_RTL_SET_P (decl
)
1260 /* Comdat vars pose an ABI problem, we can't know if
1261 the var that is selected by the linker will have
1263 || DECL_ONE_ONLY (decl
)
1264 /* Similarly for common vars. People can use -fno-common. */
1265 || (DECL_COMMON (decl
) && TREE_PUBLIC (decl
))
1266 /* Don't protect if using user section, often vars placed
1267 into user section from multiple TUs are then assumed
1268 to be an array of such vars, putting padding in there
1269 breaks this assumption. */
1270 || (DECL_SECTION_NAME (decl
) != NULL_TREE
1271 && !DECL_HAS_IMPLICIT_SECTION_NAME_P (decl
))
1272 || DECL_SIZE (decl
) == 0
1273 || ASAN_RED_ZONE_SIZE
* BITS_PER_UNIT
> MAX_OFILE_ALIGNMENT
1274 || !valid_constant_size_p (DECL_SIZE_UNIT (decl
))
1275 || DECL_ALIGN_UNIT (decl
) > 2 * ASAN_RED_ZONE_SIZE
)
1278 rtl
= DECL_RTL (decl
);
1279 if (!MEM_P (rtl
) || GET_CODE (XEXP (rtl
, 0)) != SYMBOL_REF
)
1281 symbol
= XEXP (rtl
, 0);
1283 if (CONSTANT_POOL_ADDRESS_P (symbol
)
1284 || TREE_CONSTANT_POOL_ADDRESS_P (symbol
))
1287 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl
)))
1290 #ifndef ASM_OUTPUT_DEF
1291 if (asan_needs_local_alias (decl
))
1298 /* Construct a function tree for __asan_report_{load,store}{1,2,4,8,16}.
1299 IS_STORE is either 1 (for a store) or 0 (for a load).
1300 SIZE_IN_BYTES is one of 1, 2, 4, 8, 16. */
1303 report_error_func (bool is_store
, int size_in_bytes
)
1305 static enum built_in_function report
[2][5]
1306 = { { BUILT_IN_ASAN_REPORT_LOAD1
, BUILT_IN_ASAN_REPORT_LOAD2
,
1307 BUILT_IN_ASAN_REPORT_LOAD4
, BUILT_IN_ASAN_REPORT_LOAD8
,
1308 BUILT_IN_ASAN_REPORT_LOAD16
},
1309 { BUILT_IN_ASAN_REPORT_STORE1
, BUILT_IN_ASAN_REPORT_STORE2
,
1310 BUILT_IN_ASAN_REPORT_STORE4
, BUILT_IN_ASAN_REPORT_STORE8
,
1311 BUILT_IN_ASAN_REPORT_STORE16
} };
1312 return builtin_decl_implicit (report
[is_store
][exact_log2 (size_in_bytes
)]);
1315 /* Split the current basic block and create a condition statement
1316 insertion point right before or after the statement pointed to by
1317 ITER. Return an iterator to the point at which the caller might
1318 safely insert the condition statement.
1320 THEN_BLOCK must be set to the address of an uninitialized instance
1321 of basic_block. The function will then set *THEN_BLOCK to the
1322 'then block' of the condition statement to be inserted by the
1325 If CREATE_THEN_FALLTHRU_EDGE is false, no edge will be created from
1326 *THEN_BLOCK to *FALLTHROUGH_BLOCK.
1328 Similarly, the function will set *FALLTRHOUGH_BLOCK to the 'else
1329 block' of the condition statement to be inserted by the caller.
1331 Note that *FALLTHROUGH_BLOCK is a new block that contains the
1332 statements starting from *ITER, and *THEN_BLOCK is a new empty
1335 *ITER is adjusted to point to always point to the first statement
1336 of the basic block * FALLTHROUGH_BLOCK. That statement is the
1337 same as what ITER was pointing to prior to calling this function,
1338 if BEFORE_P is true; otherwise, it is its following statement. */
1340 static gimple_stmt_iterator
1341 create_cond_insert_point (gimple_stmt_iterator
*iter
,
1343 bool then_more_likely_p
,
1344 bool create_then_fallthru_edge
,
1345 basic_block
*then_block
,
1346 basic_block
*fallthrough_block
)
1348 gimple_stmt_iterator gsi
= *iter
;
1350 if (!gsi_end_p (gsi
) && before_p
)
1353 basic_block cur_bb
= gsi_bb (*iter
);
1355 edge e
= split_block (cur_bb
, gsi_stmt (gsi
));
1357 /* Get a hold on the 'condition block', the 'then block' and the
1359 basic_block cond_bb
= e
->src
;
1360 basic_block fallthru_bb
= e
->dest
;
1361 basic_block then_bb
= create_empty_bb (cond_bb
);
1364 add_bb_to_loop (then_bb
, cond_bb
->loop_father
);
1365 loops_state_set (LOOPS_NEED_FIXUP
);
1368 /* Set up the newly created 'then block'. */
1369 e
= make_edge (cond_bb
, then_bb
, EDGE_TRUE_VALUE
);
1370 int fallthrough_probability
1371 = then_more_likely_p
1372 ? PROB_VERY_UNLIKELY
1373 : PROB_ALWAYS
- PROB_VERY_UNLIKELY
;
1374 e
->probability
= PROB_ALWAYS
- fallthrough_probability
;
1375 if (create_then_fallthru_edge
)
1376 make_single_succ_edge (then_bb
, fallthru_bb
, EDGE_FALLTHRU
);
1378 /* Set up the fallthrough basic block. */
1379 e
= find_edge (cond_bb
, fallthru_bb
);
1380 e
->flags
= EDGE_FALSE_VALUE
;
1381 e
->count
= cond_bb
->count
;
1382 e
->probability
= fallthrough_probability
;
1384 /* Update dominance info for the newly created then_bb; note that
1385 fallthru_bb's dominance info has already been updated by
1387 if (dom_info_available_p (CDI_DOMINATORS
))
1388 set_immediate_dominator (CDI_DOMINATORS
, then_bb
, cond_bb
);
1390 *then_block
= then_bb
;
1391 *fallthrough_block
= fallthru_bb
;
1392 *iter
= gsi_start_bb (fallthru_bb
);
1394 return gsi_last_bb (cond_bb
);
1397 /* Insert an if condition followed by a 'then block' right before the
1398 statement pointed to by ITER. The fallthrough block -- which is the
1399 else block of the condition as well as the destination of the
1400 outcoming edge of the 'then block' -- starts with the statement
1403 COND is the condition of the if.
1405 If THEN_MORE_LIKELY_P is true, the probability of the edge to the
1406 'then block' is higher than the probability of the edge to the
1409 Upon completion of the function, *THEN_BB is set to the newly
1410 inserted 'then block' and similarly, *FALLTHROUGH_BB is set to the
1413 *ITER is adjusted to still point to the same statement it was
1414 pointing to initially. */
1417 insert_if_then_before_iter (gimple cond
,
1418 gimple_stmt_iterator
*iter
,
1419 bool then_more_likely_p
,
1420 basic_block
*then_bb
,
1421 basic_block
*fallthrough_bb
)
1423 gimple_stmt_iterator cond_insert_point
=
1424 create_cond_insert_point (iter
,
1427 /*create_then_fallthru_edge=*/true,
1430 gsi_insert_after (&cond_insert_point
, cond
, GSI_NEW_STMT
);
1433 /* Instrument the memory access instruction BASE. Insert new
1434 statements before or after ITER.
1436 Note that the memory access represented by BASE can be either an
1437 SSA_NAME, or a non-SSA expression. LOCATION is the source code
1438 location. IS_STORE is TRUE for a store, FALSE for a load.
1439 BEFORE_P is TRUE for inserting the instrumentation code before
1440 ITER, FALSE for inserting it after ITER. SIZE_IN_BYTES is one of
1443 If BEFORE_P is TRUE, *ITER is arranged to still point to the
1444 statement it was pointing to prior to calling this function,
1445 otherwise, it points to the statement logically following it. */
1448 build_check_stmt (location_t location
, tree base
, gimple_stmt_iterator
*iter
,
1449 bool before_p
, bool is_store
, int size_in_bytes
)
1451 gimple_stmt_iterator gsi
;
1452 basic_block then_bb
, else_bb
;
1453 tree t
, base_addr
, shadow
;
1455 tree shadow_ptr_type
= shadow_ptr_types
[size_in_bytes
== 16 ? 1 : 0];
1456 tree shadow_type
= TREE_TYPE (shadow_ptr_type
);
1458 = build_nonstandard_integer_type (TYPE_PRECISION (TREE_TYPE (base
)), 1);
1459 tree base_ssa
= base
;
1461 /* Get an iterator on the point where we can add the condition
1462 statement for the instrumentation. */
1463 gsi
= create_cond_insert_point (iter
, before_p
,
1464 /*then_more_likely_p=*/false,
1465 /*create_then_fallthru_edge=*/false,
1469 base
= unshare_expr (base
);
1471 /* BASE can already be an SSA_NAME; in that case, do not create a
1472 new SSA_NAME for it. */
1473 if (TREE_CODE (base
) != SSA_NAME
)
1475 g
= gimple_build_assign_with_ops (TREE_CODE (base
),
1476 make_ssa_name (TREE_TYPE (base
), NULL
),
1478 gimple_set_location (g
, location
);
1479 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
1480 base_ssa
= gimple_assign_lhs (g
);
1483 g
= gimple_build_assign_with_ops (NOP_EXPR
,
1484 make_ssa_name (uintptr_type
, NULL
),
1485 base_ssa
, NULL_TREE
);
1486 gimple_set_location (g
, location
);
1487 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
1488 base_addr
= gimple_assign_lhs (g
);
1491 (base_addr >> ASAN_SHADOW_SHIFT) + targetm.asan_shadow_offset (). */
1493 t
= build_int_cst (uintptr_type
, ASAN_SHADOW_SHIFT
);
1494 g
= gimple_build_assign_with_ops (RSHIFT_EXPR
,
1495 make_ssa_name (uintptr_type
, NULL
),
1497 gimple_set_location (g
, location
);
1498 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
1500 t
= build_int_cst (uintptr_type
, targetm
.asan_shadow_offset ());
1501 g
= gimple_build_assign_with_ops (PLUS_EXPR
,
1502 make_ssa_name (uintptr_type
, NULL
),
1503 gimple_assign_lhs (g
), t
);
1504 gimple_set_location (g
, location
);
1505 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
1507 g
= gimple_build_assign_with_ops (NOP_EXPR
,
1508 make_ssa_name (shadow_ptr_type
, NULL
),
1509 gimple_assign_lhs (g
), NULL_TREE
);
1510 gimple_set_location (g
, location
);
1511 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
1513 t
= build2 (MEM_REF
, shadow_type
, gimple_assign_lhs (g
),
1514 build_int_cst (shadow_ptr_type
, 0));
1515 g
= gimple_build_assign_with_ops (MEM_REF
,
1516 make_ssa_name (shadow_type
, NULL
),
1518 gimple_set_location (g
, location
);
1519 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
1520 shadow
= gimple_assign_lhs (g
);
1522 if (size_in_bytes
< 8)
1524 /* Slow path for 1, 2 and 4 byte accesses.
1526 & ((base_addr & 7) + (size_in_bytes - 1)) >= shadow). */
1527 gimple_seq seq
= NULL
;
1528 gimple shadow_test
= build_assign (NE_EXPR
, shadow
, 0);
1529 gimple_seq_add_stmt (&seq
, shadow_test
);
1530 gimple_seq_add_stmt (&seq
, build_assign (BIT_AND_EXPR
, base_addr
, 7));
1531 gimple_seq_add_stmt (&seq
, build_type_cast (shadow_type
,
1532 gimple_seq_last (seq
)));
1533 if (size_in_bytes
> 1)
1534 gimple_seq_add_stmt (&seq
,
1535 build_assign (PLUS_EXPR
, gimple_seq_last (seq
),
1536 size_in_bytes
- 1));
1537 gimple_seq_add_stmt (&seq
, build_assign (GE_EXPR
, gimple_seq_last (seq
),
1539 gimple_seq_add_stmt (&seq
, build_assign (BIT_AND_EXPR
, shadow_test
,
1540 gimple_seq_last (seq
)));
1541 t
= gimple_assign_lhs (gimple_seq_last (seq
));
1542 gimple_seq_set_location (seq
, location
);
1543 gsi_insert_seq_after (&gsi
, seq
, GSI_CONTINUE_LINKING
);
1548 g
= gimple_build_cond (NE_EXPR
, t
, build_int_cst (TREE_TYPE (t
), 0),
1549 NULL_TREE
, NULL_TREE
);
1550 gimple_set_location (g
, location
);
1551 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
1553 /* Generate call to the run-time library (e.g. __asan_report_load8). */
1554 gsi
= gsi_start_bb (then_bb
);
1555 g
= gimple_build_call (report_error_func (is_store
, size_in_bytes
),
1557 gimple_set_location (g
, location
);
1558 gsi_insert_after (&gsi
, g
, GSI_NEW_STMT
);
1560 *iter
= gsi_start_bb (else_bb
);
1563 /* If T represents a memory access, add instrumentation code before ITER.
1564 LOCATION is source code location.
1565 IS_STORE is either TRUE (for a store) or FALSE (for a load). */
1568 instrument_derefs (gimple_stmt_iterator
*iter
, tree t
,
1569 location_t location
, bool is_store
)
1572 HOST_WIDE_INT size_in_bytes
;
1574 type
= TREE_TYPE (t
);
1575 switch (TREE_CODE (t
))
1588 size_in_bytes
= int_size_in_bytes (type
);
1589 if ((size_in_bytes
& (size_in_bytes
- 1)) != 0
1590 || (unsigned HOST_WIDE_INT
) size_in_bytes
- 1 >= 16)
1593 HOST_WIDE_INT bitsize
, bitpos
;
1595 enum machine_mode mode
;
1596 int volatilep
= 0, unsignedp
= 0;
1597 tree inner
= get_inner_reference (t
, &bitsize
, &bitpos
, &offset
,
1598 &mode
, &unsignedp
, &volatilep
, false);
1599 if (bitpos
% (size_in_bytes
* BITS_PER_UNIT
)
1600 || bitsize
!= size_in_bytes
* BITS_PER_UNIT
)
1602 if (TREE_CODE (t
) == COMPONENT_REF
1603 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t
, 1)) != NULL_TREE
)
1605 tree repr
= DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t
, 1));
1606 instrument_derefs (iter
, build3 (COMPONENT_REF
, TREE_TYPE (repr
),
1607 TREE_OPERAND (t
, 0), repr
,
1608 NULL_TREE
), location
, is_store
);
1613 if (TREE_CODE (inner
) == VAR_DECL
1614 && offset
== NULL_TREE
1616 && DECL_SIZE (inner
)
1617 && tree_fits_shwi_p (DECL_SIZE (inner
))
1618 && bitpos
+ bitsize
<= tree_to_shwi (DECL_SIZE (inner
)))
1620 if (DECL_THREAD_LOCAL_P (inner
))
1622 if (!TREE_STATIC (inner
))
1624 /* Automatic vars in the current function will be always
1626 if (decl_function_context (inner
) == current_function_decl
)
1629 /* Always instrument external vars, they might be dynamically
1631 else if (!DECL_EXTERNAL (inner
))
1633 /* For static vars if they are known not to be dynamically
1634 initialized, they will be always accessible. */
1635 varpool_node
*vnode
= varpool_get_node (inner
);
1636 if (vnode
&& !vnode
->dynamically_initialized
)
1641 base
= build_fold_addr_expr (t
);
1642 if (!has_mem_ref_been_instrumented (base
, size_in_bytes
))
1644 build_check_stmt (location
, base
, iter
, /*before_p=*/true,
1645 is_store
, size_in_bytes
);
1646 update_mem_ref_hash_table (base
, size_in_bytes
);
1647 update_mem_ref_hash_table (t
, size_in_bytes
);
1652 /* Instrument an access to a contiguous memory region that starts at
1653 the address pointed to by BASE, over a length of LEN (expressed in
1654 the sizeof (*BASE) bytes). ITER points to the instruction before
1655 which the instrumentation instructions must be inserted. LOCATION
1656 is the source location that the instrumentation instructions must
1657 have. If IS_STORE is true, then the memory access is a store;
1658 otherwise, it's a load. */
1661 instrument_mem_region_access (tree base
, tree len
,
1662 gimple_stmt_iterator
*iter
,
1663 location_t location
, bool is_store
)
1665 if (!POINTER_TYPE_P (TREE_TYPE (base
))
1666 || !INTEGRAL_TYPE_P (TREE_TYPE (len
))
1667 || integer_zerop (len
))
1670 gimple_stmt_iterator gsi
= *iter
;
1672 basic_block fallthrough_bb
= NULL
, then_bb
= NULL
;
1674 /* If the beginning of the memory region has already been
1675 instrumented, do not instrument it. */
1676 bool start_instrumented
= has_mem_ref_been_instrumented (base
, 1);
1678 /* If the end of the memory region has already been instrumented, do
1679 not instrument it. */
1680 tree end
= asan_mem_ref_get_end (base
, len
);
1681 bool end_instrumented
= has_mem_ref_been_instrumented (end
, 1);
1683 if (start_instrumented
&& end_instrumented
)
1686 if (!is_gimple_constant (len
))
1688 /* So, the length of the memory area to asan-protect is
1689 non-constant. Let's guard the generated instrumentation code
1694 //asan instrumentation code goes here.
1696 // falltrough instructions, starting with *ITER. */
1698 gimple g
= gimple_build_cond (NE_EXPR
,
1700 build_int_cst (TREE_TYPE (len
), 0),
1701 NULL_TREE
, NULL_TREE
);
1702 gimple_set_location (g
, location
);
1703 insert_if_then_before_iter (g
, iter
, /*then_more_likely_p=*/true,
1704 &then_bb
, &fallthrough_bb
);
1705 /* Note that fallthrough_bb starts with the statement that was
1706 pointed to by ITER. */
1708 /* The 'then block' of the 'if (len != 0) condition is where
1709 we'll generate the asan instrumentation code now. */
1710 gsi
= gsi_last_bb (then_bb
);
1713 if (!start_instrumented
)
1715 /* Instrument the beginning of the memory region to be accessed,
1716 and arrange for the rest of the intrumentation code to be
1717 inserted in the then block *after* the current gsi. */
1718 build_check_stmt (location
, base
, &gsi
, /*before_p=*/true, is_store
, 1);
1721 /* We are in the case where the length of the region is not
1722 constant; so instrumentation code is being generated in the
1723 'then block' of the 'if (len != 0) condition. Let's arrange
1724 for the subsequent instrumentation statements to go in the
1726 gsi
= gsi_last_bb (then_bb
);
1730 /* Don't remember this access as instrumented, if length
1731 is unknown. It might be zero and not being actually
1732 instrumented, so we can't rely on it being instrumented. */
1733 update_mem_ref_hash_table (base
, 1);
1737 if (end_instrumented
)
1740 /* We want to instrument the access at the end of the memory region,
1741 which is at (base + len - 1). */
1743 /* offset = len - 1; */
1744 len
= unshare_expr (len
);
1746 gimple_seq seq
= NULL
;
1747 if (TREE_CODE (len
) == INTEGER_CST
)
1748 offset
= fold_build2 (MINUS_EXPR
, size_type_node
,
1749 fold_convert (size_type_node
, len
),
1750 build_int_cst (size_type_node
, 1));
1756 if (TREE_CODE (len
) != SSA_NAME
)
1758 t
= make_ssa_name (TREE_TYPE (len
), NULL
);
1759 g
= gimple_build_assign_with_ops (TREE_CODE (len
), t
, len
, NULL
);
1760 gimple_set_location (g
, location
);
1761 gimple_seq_add_stmt_without_update (&seq
, g
);
1764 if (!useless_type_conversion_p (size_type_node
, TREE_TYPE (len
)))
1766 t
= make_ssa_name (size_type_node
, NULL
);
1767 g
= gimple_build_assign_with_ops (NOP_EXPR
, t
, len
, NULL
);
1768 gimple_set_location (g
, location
);
1769 gimple_seq_add_stmt_without_update (&seq
, g
);
1773 t
= make_ssa_name (size_type_node
, NULL
);
1774 g
= gimple_build_assign_with_ops (MINUS_EXPR
, t
, len
,
1775 build_int_cst (size_type_node
, 1));
1776 gimple_set_location (g
, location
);
1777 gimple_seq_add_stmt_without_update (&seq
, g
);
1778 offset
= gimple_assign_lhs (g
);
1782 base
= unshare_expr (base
);
1784 gimple_build_assign_with_ops (TREE_CODE (base
),
1785 make_ssa_name (TREE_TYPE (base
), NULL
),
1787 gimple_set_location (region_end
, location
);
1788 gimple_seq_add_stmt_without_update (&seq
, region_end
);
1790 /* _2 = _1 + offset; */
1792 gimple_build_assign_with_ops (POINTER_PLUS_EXPR
,
1793 make_ssa_name (TREE_TYPE (base
), NULL
),
1794 gimple_assign_lhs (region_end
),
1796 gimple_set_location (region_end
, location
);
1797 gimple_seq_add_stmt_without_update (&seq
, region_end
);
1798 gsi_insert_seq_before (&gsi
, seq
, GSI_SAME_STMT
);
1800 /* instrument access at _2; */
1801 gsi
= gsi_for_stmt (region_end
);
1802 build_check_stmt (location
, gimple_assign_lhs (region_end
),
1803 &gsi
, /*before_p=*/false, is_store
, 1);
1805 if (then_bb
== NULL
)
1806 update_mem_ref_hash_table (end
, 1);
1808 *iter
= gsi_for_stmt (gsi_stmt (*iter
));
1811 /* Instrument the call (to the builtin strlen function) pointed to by
1814 This function instruments the access to the first byte of the
1815 argument, right before the call. After the call it instruments the
1816 access to the last byte of the argument; it uses the result of the
1817 call to deduce the offset of that last byte.
1819 Upon completion, iff the call has actually been instrumented, this
1820 function returns TRUE and *ITER points to the statement logically
1821 following the built-in strlen function call *ITER was initially
1822 pointing to. Otherwise, the function returns FALSE and *ITER
1823 remains unchanged. */
1826 instrument_strlen_call (gimple_stmt_iterator
*iter
)
1828 gimple call
= gsi_stmt (*iter
);
1829 gcc_assert (is_gimple_call (call
));
1831 tree callee
= gimple_call_fndecl (call
);
1832 gcc_assert (is_builtin_fn (callee
)
1833 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
1834 && DECL_FUNCTION_CODE (callee
) == BUILT_IN_STRLEN
);
1836 tree len
= gimple_call_lhs (call
);
1838 /* Some passes might clear the return value of the strlen call;
1839 bail out in that case. Return FALSE as we are not advancing
1842 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (len
)));
1844 location_t loc
= gimple_location (call
);
1845 tree str_arg
= gimple_call_arg (call
, 0);
1847 /* Instrument the access to the first byte of str_arg. i.e:
1849 _1 = str_arg; instrument (_1); */
1850 tree cptr_type
= build_pointer_type (char_type_node
);
1851 gimple str_arg_ssa
=
1852 gimple_build_assign_with_ops (NOP_EXPR
,
1853 make_ssa_name (cptr_type
, NULL
),
1855 gimple_set_location (str_arg_ssa
, loc
);
1856 gimple_stmt_iterator gsi
= *iter
;
1857 gsi_insert_before (&gsi
, str_arg_ssa
, GSI_NEW_STMT
);
1858 build_check_stmt (loc
, gimple_assign_lhs (str_arg_ssa
), &gsi
,
1859 /*before_p=*/false, /*is_store=*/false, 1);
1861 /* If we initially had an instruction like:
1863 int n = strlen (str)
1865 we now want to instrument the access to str[n], after the
1866 instruction above.*/
1868 /* So let's build the access to str[n] that is, access through the
1869 pointer_plus expr: (_1 + len). */
1871 gimple_build_assign_with_ops (POINTER_PLUS_EXPR
,
1872 make_ssa_name (cptr_type
, NULL
),
1873 gimple_assign_lhs (str_arg_ssa
),
1875 gimple_set_location (stmt
, loc
);
1876 gsi_insert_after (&gsi
, stmt
, GSI_NEW_STMT
);
1878 build_check_stmt (loc
, gimple_assign_lhs (stmt
), &gsi
,
1879 /*before_p=*/false, /*is_store=*/false, 1);
1881 /* Ensure that iter points to the statement logically following the
1882 one it was initially pointing to. */
1884 /* As *ITER has been advanced to point to the next statement, let's
1885 return true to inform transform_statements that it shouldn't
1886 advance *ITER anymore; otherwises it will skip that next
1887 statement, which wouldn't be instrumented. */
1891 /* Instrument the call to a built-in memory access function that is
1892 pointed to by the iterator ITER.
1894 Upon completion, return TRUE iff *ITER has been advanced to the
1895 statement following the one it was originally pointing to. */
1898 instrument_builtin_call (gimple_stmt_iterator
*iter
)
1900 bool iter_advanced_p
= false;
1901 gimple call
= gsi_stmt (*iter
);
1903 gcc_checking_assert (gimple_call_builtin_p (call
, BUILT_IN_NORMAL
));
1905 tree callee
= gimple_call_fndecl (call
);
1906 location_t loc
= gimple_location (call
);
1908 if (DECL_FUNCTION_CODE (callee
) == BUILT_IN_STRLEN
)
1909 iter_advanced_p
= instrument_strlen_call (iter
);
1912 asan_mem_ref src0
, src1
, dest
;
1913 asan_mem_ref_init (&src0
, NULL
, 1);
1914 asan_mem_ref_init (&src1
, NULL
, 1);
1915 asan_mem_ref_init (&dest
, NULL
, 1);
1917 tree src0_len
= NULL_TREE
, src1_len
= NULL_TREE
, dest_len
= NULL_TREE
;
1918 bool src0_is_store
= false, src1_is_store
= false,
1919 dest_is_store
= false, dest_is_deref
= false;
1921 if (get_mem_refs_of_builtin_call (call
,
1922 &src0
, &src0_len
, &src0_is_store
,
1923 &src1
, &src1_len
, &src1_is_store
,
1924 &dest
, &dest_len
, &dest_is_store
,
1929 instrument_derefs (iter
, dest
.start
, loc
, dest_is_store
);
1931 iter_advanced_p
= true;
1933 else if (src0_len
|| src1_len
|| dest_len
)
1935 if (src0
.start
!= NULL_TREE
)
1936 instrument_mem_region_access (src0
.start
, src0_len
,
1937 iter
, loc
, /*is_store=*/false);
1938 if (src1
.start
!= NULL_TREE
)
1939 instrument_mem_region_access (src1
.start
, src1_len
,
1940 iter
, loc
, /*is_store=*/false);
1941 if (dest
.start
!= NULL_TREE
)
1942 instrument_mem_region_access (dest
.start
, dest_len
,
1943 iter
, loc
, /*is_store=*/true);
1944 *iter
= gsi_for_stmt (call
);
1946 iter_advanced_p
= true;
1950 return iter_advanced_p
;
1953 /* Instrument the assignment statement ITER if it is subject to
1954 instrumentation. Return TRUE iff instrumentation actually
1955 happened. In that case, the iterator ITER is advanced to the next
1956 logical expression following the one initially pointed to by ITER,
1957 and the relevant memory reference that which access has been
1958 instrumented is added to the memory references hash table. */
1961 maybe_instrument_assignment (gimple_stmt_iterator
*iter
)
1963 gimple s
= gsi_stmt (*iter
);
1965 gcc_assert (gimple_assign_single_p (s
));
1967 tree ref_expr
= NULL_TREE
;
1968 bool is_store
, is_instrumented
= false;
1970 if (gimple_store_p (s
))
1972 ref_expr
= gimple_assign_lhs (s
);
1974 instrument_derefs (iter
, ref_expr
,
1975 gimple_location (s
),
1977 is_instrumented
= true;
1980 if (gimple_assign_load_p (s
))
1982 ref_expr
= gimple_assign_rhs1 (s
);
1984 instrument_derefs (iter
, ref_expr
,
1985 gimple_location (s
),
1987 is_instrumented
= true;
1990 if (is_instrumented
)
1993 return is_instrumented
;
1996 /* Instrument the function call pointed to by the iterator ITER, if it
1997 is subject to instrumentation. At the moment, the only function
1998 calls that are instrumented are some built-in functions that access
1999 memory. Look at instrument_builtin_call to learn more.
2001 Upon completion return TRUE iff *ITER was advanced to the statement
2002 following the one it was originally pointing to. */
2005 maybe_instrument_call (gimple_stmt_iterator
*iter
)
2007 gimple stmt
= gsi_stmt (*iter
);
2008 bool is_builtin
= gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
);
2010 if (is_builtin
&& instrument_builtin_call (iter
))
2013 if (gimple_call_noreturn_p (stmt
))
2017 tree callee
= gimple_call_fndecl (stmt
);
2018 switch (DECL_FUNCTION_CODE (callee
))
2020 case BUILT_IN_UNREACHABLE
:
2022 /* Don't instrument these. */
2026 tree decl
= builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN
);
2027 gimple g
= gimple_build_call (decl
, 0);
2028 gimple_set_location (g
, gimple_location (stmt
));
2029 gsi_insert_before (iter
, g
, GSI_SAME_STMT
);
2034 /* Walk each instruction of all basic block and instrument those that
2035 represent memory references: loads, stores, or function calls.
2036 In a given basic block, this function avoids instrumenting memory
2037 references that have already been instrumented. */
2040 transform_statements (void)
2042 basic_block bb
, last_bb
= NULL
;
2043 gimple_stmt_iterator i
;
2044 int saved_last_basic_block
= last_basic_block
;
2048 basic_block prev_bb
= bb
;
2050 if (bb
->index
>= saved_last_basic_block
) continue;
2052 /* Flush the mem ref hash table, if current bb doesn't have
2053 exactly one predecessor, or if that predecessor (skipping
2054 over asan created basic blocks) isn't the last processed
2055 basic block. Thus we effectively flush on extended basic
2056 block boundaries. */
2057 while (single_pred_p (prev_bb
))
2059 prev_bb
= single_pred (prev_bb
);
2060 if (prev_bb
->index
< saved_last_basic_block
)
2063 if (prev_bb
!= last_bb
)
2064 empty_mem_ref_hash_table ();
2067 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
);)
2069 gimple s
= gsi_stmt (i
);
2071 if (has_stmt_been_instrumented_p (s
))
2073 else if (gimple_assign_single_p (s
)
2074 && maybe_instrument_assignment (&i
))
2075 /* Nothing to do as maybe_instrument_assignment advanced
2077 else if (is_gimple_call (s
) && maybe_instrument_call (&i
))
2078 /* Nothing to do as maybe_instrument_call
2079 advanced the iterator I. */;
2082 /* No instrumentation happened.
2084 If the current instruction is a function call that
2085 might free something, let's forget about the memory
2086 references that got instrumented. Otherwise we might
2087 miss some instrumentation opportunities. */
2088 if (is_gimple_call (s
) && !nonfreeing_call_p (s
))
2089 empty_mem_ref_hash_table ();
2095 free_mem_ref_resources ();
2099 __asan_before_dynamic_init (module_name)
2101 __asan_after_dynamic_init ()
2105 asan_dynamic_init_call (bool after_p
)
2107 tree fn
= builtin_decl_implicit (after_p
2108 ? BUILT_IN_ASAN_AFTER_DYNAMIC_INIT
2109 : BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT
);
2110 tree module_name_cst
= NULL_TREE
;
2113 pretty_printer module_name_pp
;
2114 pp_string (&module_name_pp
, main_input_filename
);
2116 if (shadow_ptr_types
[0] == NULL_TREE
)
2117 asan_init_shadow_ptr_types ();
2118 module_name_cst
= asan_pp_string (&module_name_pp
);
2119 module_name_cst
= fold_convert (const_ptr_type_node
,
2123 return build_call_expr (fn
, after_p
? 0 : 1, module_name_cst
);
2127 struct __asan_global
2131 uptr __size_with_redzone;
2133 const void *__module_name;
2134 uptr __has_dynamic_init;
2138 asan_global_struct (void)
2140 static const char *field_names
[6]
2141 = { "__beg", "__size", "__size_with_redzone",
2142 "__name", "__module_name", "__has_dynamic_init" };
2143 tree fields
[6], ret
;
2146 ret
= make_node (RECORD_TYPE
);
2147 for (i
= 0; i
< 6; i
++)
2150 = build_decl (UNKNOWN_LOCATION
, FIELD_DECL
,
2151 get_identifier (field_names
[i
]),
2152 (i
== 0 || i
== 3) ? const_ptr_type_node
2153 : pointer_sized_int_node
);
2154 DECL_CONTEXT (fields
[i
]) = ret
;
2156 DECL_CHAIN (fields
[i
- 1]) = fields
[i
];
2158 TYPE_FIELDS (ret
) = fields
[0];
2159 TYPE_NAME (ret
) = get_identifier ("__asan_global");
2164 /* Append description of a single global DECL into vector V.
2165 TYPE is __asan_global struct type as returned by asan_global_struct. */
2168 asan_add_global (tree decl
, tree type
, vec
<constructor_elt
, va_gc
> *v
)
2170 tree init
, uptr
= TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type
)));
2171 unsigned HOST_WIDE_INT size
;
2172 tree str_cst
, module_name_cst
, refdecl
= decl
;
2173 vec
<constructor_elt
, va_gc
> *vinner
= NULL
;
2175 pretty_printer asan_pp
, module_name_pp
;
2177 if (DECL_NAME (decl
))
2178 pp_tree_identifier (&asan_pp
, DECL_NAME (decl
));
2180 pp_string (&asan_pp
, "<unknown>");
2181 str_cst
= asan_pp_string (&asan_pp
);
2183 pp_string (&module_name_pp
, main_input_filename
);
2184 module_name_cst
= asan_pp_string (&module_name_pp
);
2186 if (asan_needs_local_alias (decl
))
2189 ASM_GENERATE_INTERNAL_LABEL (buf
, "LASAN", vec_safe_length (v
) + 1);
2190 refdecl
= build_decl (DECL_SOURCE_LOCATION (decl
),
2191 VAR_DECL
, get_identifier (buf
), TREE_TYPE (decl
));
2192 TREE_ADDRESSABLE (refdecl
) = TREE_ADDRESSABLE (decl
);
2193 TREE_READONLY (refdecl
) = TREE_READONLY (decl
);
2194 TREE_THIS_VOLATILE (refdecl
) = TREE_THIS_VOLATILE (decl
);
2195 DECL_GIMPLE_REG_P (refdecl
) = DECL_GIMPLE_REG_P (decl
);
2196 DECL_ARTIFICIAL (refdecl
) = DECL_ARTIFICIAL (decl
);
2197 DECL_IGNORED_P (refdecl
) = DECL_IGNORED_P (decl
);
2198 TREE_STATIC (refdecl
) = 1;
2199 TREE_PUBLIC (refdecl
) = 0;
2200 TREE_USED (refdecl
) = 1;
2201 assemble_alias (refdecl
, DECL_ASSEMBLER_NAME (decl
));
2204 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
,
2205 fold_convert (const_ptr_type_node
,
2206 build_fold_addr_expr (refdecl
)));
2207 size
= tree_to_uhwi (DECL_SIZE_UNIT (decl
));
2208 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
, build_int_cst (uptr
, size
));
2209 size
+= asan_red_zone_size (size
);
2210 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
, build_int_cst (uptr
, size
));
2211 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
,
2212 fold_convert (const_ptr_type_node
, str_cst
));
2213 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
,
2214 fold_convert (const_ptr_type_node
, module_name_cst
));
2215 varpool_node
*vnode
= varpool_get_node (decl
);
2216 int has_dynamic_init
= vnode
? vnode
->dynamically_initialized
: 0;
2217 CONSTRUCTOR_APPEND_ELT (vinner
, NULL_TREE
,
2218 build_int_cst (uptr
, has_dynamic_init
));
2219 init
= build_constructor (type
, vinner
);
2220 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, init
);
2223 /* Initialize sanitizer.def builtins if the FE hasn't initialized them. */
2225 initialize_sanitizer_builtins (void)
2229 if (builtin_decl_implicit_p (BUILT_IN_ASAN_INIT
))
2232 tree BT_FN_VOID
= build_function_type_list (void_type_node
, NULL_TREE
);
2234 = build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
2235 tree BT_FN_VOID_CONST_PTR
2236 = build_function_type_list (void_type_node
, const_ptr_type_node
, NULL_TREE
);
2237 tree BT_FN_VOID_PTR_PTR
2238 = build_function_type_list (void_type_node
, ptr_type_node
,
2239 ptr_type_node
, NULL_TREE
);
2240 tree BT_FN_VOID_PTR_PTR_PTR
2241 = build_function_type_list (void_type_node
, ptr_type_node
,
2242 ptr_type_node
, ptr_type_node
, NULL_TREE
);
2243 tree BT_FN_VOID_PTR_PTRMODE
2244 = build_function_type_list (void_type_node
, ptr_type_node
,
2245 pointer_sized_int_node
, NULL_TREE
);
2247 = build_function_type_list (void_type_node
, integer_type_node
, NULL_TREE
);
2248 tree BT_FN_BOOL_VPTR_PTR_IX_INT_INT
[5];
2249 tree BT_FN_IX_CONST_VPTR_INT
[5];
2250 tree BT_FN_IX_VPTR_IX_INT
[5];
2251 tree BT_FN_VOID_VPTR_IX_INT
[5];
2253 = build_pointer_type (build_qualified_type (void_type_node
,
2254 TYPE_QUAL_VOLATILE
));
2256 = build_pointer_type (build_qualified_type (void_type_node
,
2260 = lang_hooks
.types
.type_for_size (BOOL_TYPE_SIZE
, 1);
2262 for (i
= 0; i
< 5; i
++)
2264 tree ix
= build_nonstandard_integer_type (BITS_PER_UNIT
* (1 << i
), 1);
2265 BT_FN_BOOL_VPTR_PTR_IX_INT_INT
[i
]
2266 = build_function_type_list (boolt
, vptr
, ptr_type_node
, ix
,
2267 integer_type_node
, integer_type_node
,
2269 BT_FN_IX_CONST_VPTR_INT
[i
]
2270 = build_function_type_list (ix
, cvptr
, integer_type_node
, NULL_TREE
);
2271 BT_FN_IX_VPTR_IX_INT
[i
]
2272 = build_function_type_list (ix
, vptr
, ix
, integer_type_node
,
2274 BT_FN_VOID_VPTR_IX_INT
[i
]
2275 = build_function_type_list (void_type_node
, vptr
, ix
,
2276 integer_type_node
, NULL_TREE
);
2278 #define BT_FN_BOOL_VPTR_PTR_I1_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[0]
2279 #define BT_FN_I1_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[0]
2280 #define BT_FN_I1_VPTR_I1_INT BT_FN_IX_VPTR_IX_INT[0]
2281 #define BT_FN_VOID_VPTR_I1_INT BT_FN_VOID_VPTR_IX_INT[0]
2282 #define BT_FN_BOOL_VPTR_PTR_I2_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[1]
2283 #define BT_FN_I2_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[1]
2284 #define BT_FN_I2_VPTR_I2_INT BT_FN_IX_VPTR_IX_INT[1]
2285 #define BT_FN_VOID_VPTR_I2_INT BT_FN_VOID_VPTR_IX_INT[1]
2286 #define BT_FN_BOOL_VPTR_PTR_I4_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[2]
2287 #define BT_FN_I4_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[2]
2288 #define BT_FN_I4_VPTR_I4_INT BT_FN_IX_VPTR_IX_INT[2]
2289 #define BT_FN_VOID_VPTR_I4_INT BT_FN_VOID_VPTR_IX_INT[2]
2290 #define BT_FN_BOOL_VPTR_PTR_I8_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[3]
2291 #define BT_FN_I8_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[3]
2292 #define BT_FN_I8_VPTR_I8_INT BT_FN_IX_VPTR_IX_INT[3]
2293 #define BT_FN_VOID_VPTR_I8_INT BT_FN_VOID_VPTR_IX_INT[3]
2294 #define BT_FN_BOOL_VPTR_PTR_I16_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[4]
2295 #define BT_FN_I16_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[4]
2296 #define BT_FN_I16_VPTR_I16_INT BT_FN_IX_VPTR_IX_INT[4]
2297 #define BT_FN_VOID_VPTR_I16_INT BT_FN_VOID_VPTR_IX_INT[4]
2298 #undef ATTR_NOTHROW_LEAF_LIST
2299 #define ATTR_NOTHROW_LEAF_LIST ECF_NOTHROW | ECF_LEAF
2300 #undef ATTR_TMPURE_NOTHROW_LEAF_LIST
2301 #define ATTR_TMPURE_NOTHROW_LEAF_LIST ECF_TM_PURE | ATTR_NOTHROW_LEAF_LIST
2302 #undef ATTR_NORETURN_NOTHROW_LEAF_LIST
2303 #define ATTR_NORETURN_NOTHROW_LEAF_LIST ECF_NORETURN | ATTR_NOTHROW_LEAF_LIST
2304 #undef ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST
2305 #define ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST \
2306 ECF_TM_PURE | ATTR_NORETURN_NOTHROW_LEAF_LIST
2307 #undef ATTR_COLD_NOTHROW_LEAF_LIST
2308 #define ATTR_COLD_NOTHROW_LEAF_LIST \
2309 /* ECF_COLD missing */ ATTR_NOTHROW_LEAF_LIST
2310 #undef ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST
2311 #define ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST \
2312 /* ECF_COLD missing */ ATTR_NORETURN_NOTHROW_LEAF_LIST
2313 #undef DEF_SANITIZER_BUILTIN
2314 #define DEF_SANITIZER_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
2315 decl = add_builtin_function ("__builtin_" NAME, TYPE, ENUM, \
2316 BUILT_IN_NORMAL, NAME, NULL_TREE); \
2317 set_call_expr_flags (decl, ATTRS); \
2318 set_builtin_decl (ENUM, decl, true);
2320 #include "sanitizer.def"
2322 #undef DEF_SANITIZER_BUILTIN
2325 /* Called via htab_traverse. Count number of emitted
2326 STRING_CSTs in the constant hash table. */
2329 count_string_csts (void **slot
, void *data
)
2331 struct constant_descriptor_tree
*desc
2332 = (struct constant_descriptor_tree
*) *slot
;
2333 if (TREE_CODE (desc
->value
) == STRING_CST
2334 && TREE_ASM_WRITTEN (desc
->value
)
2335 && asan_protect_global (desc
->value
))
2336 ++*((unsigned HOST_WIDE_INT
*) data
);
2340 /* Helper structure to pass two parameters to
2343 struct asan_add_string_csts_data
2346 vec
<constructor_elt
, va_gc
> *v
;
2349 /* Called via htab_traverse. Call asan_add_global
2350 on emitted STRING_CSTs from the constant hash table. */
2353 add_string_csts (void **slot
, void *data
)
2355 struct constant_descriptor_tree
*desc
2356 = (struct constant_descriptor_tree
*) *slot
;
2357 if (TREE_CODE (desc
->value
) == STRING_CST
2358 && TREE_ASM_WRITTEN (desc
->value
)
2359 && asan_protect_global (desc
->value
))
2361 struct asan_add_string_csts_data
*aascd
2362 = (struct asan_add_string_csts_data
*) data
;
2363 asan_add_global (SYMBOL_REF_DECL (XEXP (desc
->rtl
, 0)),
2364 aascd
->type
, aascd
->v
);
2369 /* Needs to be GTY(()), because cgraph_build_static_cdtor may
2370 invoke ggc_collect. */
2371 static GTY(()) tree asan_ctor_statements
;
2373 /* Module-level instrumentation.
2374 - Insert __asan_init_vN() into the list of CTORs.
2375 - TODO: insert redzones around globals.
2379 asan_finish_file (void)
2381 varpool_node
*vnode
;
2382 unsigned HOST_WIDE_INT gcount
= 0;
2384 if (shadow_ptr_types
[0] == NULL_TREE
)
2385 asan_init_shadow_ptr_types ();
2386 /* Avoid instrumenting code in the asan ctors/dtors.
2387 We don't need to insert padding after the description strings,
2388 nor after .LASAN* array. */
2389 flag_sanitize
&= ~SANITIZE_ADDRESS
;
2391 tree fn
= builtin_decl_implicit (BUILT_IN_ASAN_INIT
);
2392 append_to_statement_list (build_call_expr (fn
, 0), &asan_ctor_statements
);
2393 FOR_EACH_DEFINED_VARIABLE (vnode
)
2394 if (TREE_ASM_WRITTEN (vnode
->decl
)
2395 && asan_protect_global (vnode
->decl
))
2397 htab_t const_desc_htab
= constant_pool_htab ();
2398 htab_traverse (const_desc_htab
, count_string_csts
, &gcount
);
2401 tree type
= asan_global_struct (), var
, ctor
;
2402 tree dtor_statements
= NULL_TREE
;
2403 vec
<constructor_elt
, va_gc
> *v
;
2406 type
= build_array_type_nelts (type
, gcount
);
2407 ASM_GENERATE_INTERNAL_LABEL (buf
, "LASAN", 0);
2408 var
= build_decl (UNKNOWN_LOCATION
, VAR_DECL
, get_identifier (buf
),
2410 TREE_STATIC (var
) = 1;
2411 TREE_PUBLIC (var
) = 0;
2412 DECL_ARTIFICIAL (var
) = 1;
2413 DECL_IGNORED_P (var
) = 1;
2414 vec_alloc (v
, gcount
);
2415 FOR_EACH_DEFINED_VARIABLE (vnode
)
2416 if (TREE_ASM_WRITTEN (vnode
->decl
)
2417 && asan_protect_global (vnode
->decl
))
2418 asan_add_global (vnode
->decl
, TREE_TYPE (type
), v
);
2419 struct asan_add_string_csts_data aascd
;
2420 aascd
.type
= TREE_TYPE (type
);
2422 htab_traverse (const_desc_htab
, add_string_csts
, &aascd
);
2423 ctor
= build_constructor (type
, v
);
2424 TREE_CONSTANT (ctor
) = 1;
2425 TREE_STATIC (ctor
) = 1;
2426 DECL_INITIAL (var
) = ctor
;
2427 varpool_assemble_decl (varpool_node_for_decl (var
));
2429 fn
= builtin_decl_implicit (BUILT_IN_ASAN_REGISTER_GLOBALS
);
2430 tree gcount_tree
= build_int_cst (pointer_sized_int_node
, gcount
);
2431 append_to_statement_list (build_call_expr (fn
, 2,
2432 build_fold_addr_expr (var
),
2434 &asan_ctor_statements
);
2436 fn
= builtin_decl_implicit (BUILT_IN_ASAN_UNREGISTER_GLOBALS
);
2437 append_to_statement_list (build_call_expr (fn
, 2,
2438 build_fold_addr_expr (var
),
2441 cgraph_build_static_cdtor ('D', dtor_statements
,
2442 MAX_RESERVED_INIT_PRIORITY
- 1);
2444 cgraph_build_static_cdtor ('I', asan_ctor_statements
,
2445 MAX_RESERVED_INIT_PRIORITY
- 1);
2446 flag_sanitize
|= SANITIZE_ADDRESS
;
2449 /* Instrument the current function. */
2452 asan_instrument (void)
2454 if (shadow_ptr_types
[0] == NULL_TREE
)
2455 asan_init_shadow_ptr_types ();
2456 transform_statements ();
2463 return (flag_sanitize
& SANITIZE_ADDRESS
) != 0
2464 && !lookup_attribute ("no_sanitize_address",
2465 DECL_ATTRIBUTES (current_function_decl
));
2470 const pass_data pass_data_asan
=
2472 GIMPLE_PASS
, /* type */
2474 OPTGROUP_NONE
, /* optinfo_flags */
2475 true, /* has_gate */
2476 true, /* has_execute */
2477 TV_NONE
, /* tv_id */
2478 ( PROP_ssa
| PROP_cfg
| PROP_gimple_leh
), /* properties_required */
2479 0, /* properties_provided */
2480 0, /* properties_destroyed */
2481 0, /* todo_flags_start */
2482 ( TODO_verify_flow
| TODO_verify_stmts
2483 | TODO_update_ssa
), /* todo_flags_finish */
2486 class pass_asan
: public gimple_opt_pass
2489 pass_asan (gcc::context
*ctxt
)
2490 : gimple_opt_pass (pass_data_asan
, ctxt
)
2493 /* opt_pass methods: */
2494 opt_pass
* clone () { return new pass_asan (m_ctxt
); }
2495 bool gate () { return gate_asan (); }
2496 unsigned int execute () { return asan_instrument (); }
2498 }; // class pass_asan
2503 make_pass_asan (gcc::context
*ctxt
)
2505 return new pass_asan (ctxt
);
2511 return !optimize
&& gate_asan ();
2516 const pass_data pass_data_asan_O0
=
2518 GIMPLE_PASS
, /* type */
2520 OPTGROUP_NONE
, /* optinfo_flags */
2521 true, /* has_gate */
2522 true, /* has_execute */
2523 TV_NONE
, /* tv_id */
2524 ( PROP_ssa
| PROP_cfg
| PROP_gimple_leh
), /* properties_required */
2525 0, /* properties_provided */
2526 0, /* properties_destroyed */
2527 0, /* todo_flags_start */
2528 ( TODO_verify_flow
| TODO_verify_stmts
2529 | TODO_update_ssa
), /* todo_flags_finish */
2532 class pass_asan_O0
: public gimple_opt_pass
2535 pass_asan_O0 (gcc::context
*ctxt
)
2536 : gimple_opt_pass (pass_data_asan_O0
, ctxt
)
2539 /* opt_pass methods: */
2540 bool gate () { return gate_asan_O0 (); }
2541 unsigned int execute () { return asan_instrument (); }
2543 }; // class pass_asan_O0
2548 make_pass_asan_O0 (gcc::context
*ctxt
)
2550 return new pass_asan_O0 (ctxt
);
2553 /* Perform optimization of sanitize functions. */
2556 execute_sanopt (void)
2562 gimple_stmt_iterator gsi
;
2563 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2565 gimple stmt
= gsi_stmt (gsi
);
2567 if (!is_gimple_call (stmt
))
2570 if (gimple_call_internal_p (stmt
))
2571 switch (gimple_call_internal_fn (stmt
))
2573 case IFN_UBSAN_NULL
:
2574 ubsan_expand_null_ifn (gsi
);
2580 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2582 fprintf (dump_file
, "Optimized\n ");
2583 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
2584 fprintf (dump_file
, "\n");
2594 return flag_sanitize
;
2599 const pass_data pass_data_sanopt
=
2601 GIMPLE_PASS
, /* type */
2602 "sanopt", /* name */
2603 OPTGROUP_NONE
, /* optinfo_flags */
2604 true, /* has_gate */
2605 true, /* has_execute */
2606 TV_NONE
, /* tv_id */
2607 ( PROP_ssa
| PROP_cfg
| PROP_gimple_leh
), /* properties_required */
2608 0, /* properties_provided */
2609 0, /* properties_destroyed */
2610 0, /* todo_flags_start */
2611 ( TODO_verify_flow
| TODO_verify_stmts
2612 | TODO_update_ssa
), /* todo_flags_finish */
2615 class pass_sanopt
: public gimple_opt_pass
2618 pass_sanopt (gcc::context
*ctxt
)
2619 : gimple_opt_pass (pass_data_sanopt
, ctxt
)
2622 /* opt_pass methods: */
2623 bool gate () { return gate_sanopt (); }
2624 unsigned int execute () { return execute_sanopt (); }
2626 }; // class pass_sanopt
2631 make_pass_sanopt (gcc::context
*ctxt
)
2633 return new pass_sanopt (ctxt
);
2636 #include "gt-asan.h"